@aztec/archiver 0.0.1-commit.d3ec352c → 0.0.1-commit.fcb71a6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +84 -70
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +439 -228
- package/dest/archiver/archiver_store.d.ts +95 -43
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +1847 -366
- package/dest/archiver/config.d.ts +5 -4
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +10 -3
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/instrumentation.d.ts +3 -1
- package/dest/archiver/instrumentation.d.ts.map +1 -1
- package/dest/archiver/instrumentation.js +11 -0
- package/dest/archiver/kv_archiver_store/block_store.d.ts +50 -18
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +320 -84
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.js +1 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +40 -51
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +65 -48
- package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +148 -84
- package/dest/archiver/kv_archiver_store/message_store.d.ts +6 -5
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/message_store.js +15 -14
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
- package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/archiver/l1/calldata_retriever.js +471 -0
- package/dest/archiver/l1/data_retrieval.d.ts +90 -0
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
- package/dest/archiver/{data_retrieval.js → l1/data_retrieval.js} +50 -106
- package/dest/archiver/l1/debug_tx.d.ts +19 -0
- package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
- package/dest/archiver/l1/debug_tx.js +73 -0
- package/dest/archiver/l1/spire_proposer.d.ts +70 -0
- package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
- package/dest/archiver/l1/spire_proposer.js +157 -0
- package/dest/archiver/l1/trace_tx.d.ts +97 -0
- package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
- package/dest/archiver/l1/trace_tx.js +91 -0
- package/dest/archiver/l1/types.d.ts +12 -0
- package/dest/archiver/l1/types.d.ts.map +1 -0
- package/dest/archiver/l1/types.js +3 -0
- package/dest/archiver/l1/validate_trace.d.ts +29 -0
- package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
- package/dest/archiver/l1/validate_trace.js +150 -0
- package/dest/archiver/structs/inbox_message.d.ts +4 -4
- package/dest/archiver/structs/inbox_message.d.ts.map +1 -1
- package/dest/archiver/structs/inbox_message.js +6 -6
- package/dest/archiver/structs/published.d.ts +1 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/factory.d.ts +1 -1
- package/dest/factory.js +1 -1
- package/dest/index.d.ts +2 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -1
- package/dest/test/mock_archiver.d.ts +4 -5
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +5 -9
- package/dest/test/mock_l1_to_l2_message_source.d.ts +5 -6
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +7 -11
- package/dest/test/mock_l2_block_source.d.ts +11 -4
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +18 -17
- package/dest/test/mock_structs.d.ts +3 -2
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +9 -9
- package/package.json +15 -14
- package/src/archiver/archiver.ts +567 -290
- package/src/archiver/archiver_store.ts +104 -42
- package/src/archiver/archiver_store_test_suite.ts +1895 -347
- package/src/archiver/config.ts +15 -10
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/instrumentation.ts +14 -0
- package/src/archiver/kv_archiver_store/block_store.ts +435 -95
- package/src/archiver/kv_archiver_store/contract_class_store.ts +1 -1
- package/src/archiver/kv_archiver_store/contract_instance_store.ts +1 -1
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +81 -66
- package/src/archiver/kv_archiver_store/log_store.ts +208 -99
- package/src/archiver/kv_archiver_store/message_store.ts +21 -18
- package/src/archiver/l1/README.md +98 -0
- package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
- package/src/archiver/l1/calldata_retriever.ts +641 -0
- package/src/archiver/{data_retrieval.ts → l1/data_retrieval.ts} +96 -161
- package/src/archiver/l1/debug_tx.ts +99 -0
- package/src/archiver/l1/spire_proposer.ts +160 -0
- package/src/archiver/l1/trace_tx.ts +128 -0
- package/src/archiver/l1/types.ts +13 -0
- package/src/archiver/l1/validate_trace.ts +211 -0
- package/src/archiver/structs/inbox_message.ts +7 -8
- package/src/archiver/structs/published.ts +0 -1
- package/src/factory.ts +1 -1
- package/src/index.ts +1 -1
- package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
- package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
- package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
- package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
- package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
- package/src/test/fixtures/trace_transaction-proxied.json +128 -0
- package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
- package/src/test/mock_archiver.ts +6 -11
- package/src/test/mock_l1_to_l2_message_source.ts +6 -11
- package/src/test/mock_l2_block_source.ts +22 -18
- package/src/test/mock_structs.ts +10 -10
- package/dest/archiver/data_retrieval.d.ts +0 -80
- package/dest/archiver/data_retrieval.d.ts.map +0 -1
|
@@ -1,22 +1,25 @@
|
|
|
1
|
-
import { INITIAL_L2_BLOCK_NUM, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PRIVATE_LOG_SIZE_IN_FIELDS } from '@aztec/constants';
|
|
1
|
+
import { INITIAL_CHECKPOINT_NUMBER, INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PRIVATE_LOG_SIZE_IN_FIELDS } from '@aztec/constants';
|
|
2
2
|
import { makeTuple } from '@aztec/foundation/array';
|
|
3
|
-
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
4
4
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
5
5
|
import { times, timesParallel } from '@aztec/foundation/collection';
|
|
6
|
-
import { randomInt } from '@aztec/foundation/crypto';
|
|
7
|
-
import { Fr } from '@aztec/foundation/
|
|
6
|
+
import { randomInt } from '@aztec/foundation/crypto/random';
|
|
7
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
8
8
|
import { toArray } from '@aztec/foundation/iterable';
|
|
9
9
|
import { sleep } from '@aztec/foundation/sleep';
|
|
10
10
|
import { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
11
|
-
import { CommitteeAttestation, EthAddress,
|
|
11
|
+
import { CommitteeAttestation, EthAddress, L2BlockHash, L2BlockNew, randomBlockInfo } from '@aztec/stdlib/block';
|
|
12
|
+
import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
12
13
|
import { SerializableContractInstance, computePublicBytecodeCommitment } from '@aztec/stdlib/contract';
|
|
13
|
-
import { LogId, PrivateLog, PublicLog } from '@aztec/stdlib/logs';
|
|
14
|
+
import { ContractClassLog, LogId, PrivateLog, PublicLog, SiloedTag, Tag } from '@aztec/stdlib/logs';
|
|
14
15
|
import { InboxLeaf } from '@aztec/stdlib/messaging';
|
|
16
|
+
import { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
15
17
|
import { makeContractClassPublic, makeExecutablePrivateFunctionWithMembershipProof, makeUtilityFunctionWithMembershipProof } from '@aztec/stdlib/testing';
|
|
16
18
|
import '@aztec/stdlib/testing/jest';
|
|
17
|
-
import {
|
|
19
|
+
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
20
|
+
import { PartialStateReference, StateReference, TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
18
21
|
import { makeInboxMessage, makeInboxMessages } from '../test/mock_structs.js';
|
|
19
|
-
import { BlockNumberNotSequentialError, InitialBlockNumberNotSequentialError } from './errors.js';
|
|
22
|
+
import { BlockArchiveNotConsistentError, BlockIndexNotSequentialError, BlockNumberNotSequentialError, CheckpointNumberNotConsistentError, CheckpointNumberNotSequentialError, InitialBlockNumberNotSequentialError, InitialCheckpointNumberNotSequentialError } from './errors.js';
|
|
20
23
|
import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
21
24
|
/**
|
|
22
25
|
* @param testName - The name of the test suite.
|
|
@@ -24,100 +27,256 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
24
27
|
*/ export function describeArchiverDataStore(testName, getStore) {
|
|
25
28
|
describe(testName, ()=>{
|
|
26
29
|
let store;
|
|
27
|
-
let
|
|
28
|
-
const
|
|
30
|
+
let publishedCheckpoints;
|
|
31
|
+
const blockNumberTests = [
|
|
29
32
|
[
|
|
30
33
|
1,
|
|
31
|
-
|
|
32
|
-
()=>blocks.slice(0, 1)
|
|
33
|
-
],
|
|
34
|
-
[
|
|
35
|
-
10,
|
|
36
|
-
1,
|
|
37
|
-
()=>blocks.slice(9, 10)
|
|
34
|
+
()=>publishedCheckpoints[0].checkpoint.blocks[0]
|
|
38
35
|
],
|
|
39
36
|
[
|
|
40
|
-
1,
|
|
41
37
|
10,
|
|
42
|
-
()=>blocks
|
|
38
|
+
()=>publishedCheckpoints[9].checkpoint.blocks[0]
|
|
43
39
|
],
|
|
44
40
|
[
|
|
45
|
-
2,
|
|
46
41
|
5,
|
|
47
|
-
()=>blocks
|
|
48
|
-
],
|
|
49
|
-
[
|
|
50
|
-
5,
|
|
51
|
-
2,
|
|
52
|
-
()=>blocks.slice(4, 6)
|
|
42
|
+
()=>publishedCheckpoints[4].checkpoint.blocks[0]
|
|
53
43
|
]
|
|
54
44
|
];
|
|
55
45
|
const makeBlockHash = (blockNumber)=>`0x${blockNumber.toString(16).padStart(64, '0')}`;
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
const
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
}
|
|
46
|
+
// Create a state reference with properly calculated noteHashTree.nextAvailableLeafIndex
|
|
47
|
+
// This is needed because the log store calculates dataStartIndexForBlock as:
|
|
48
|
+
// noteHashTree.nextAvailableLeafIndex - txEffects.length * MAX_NOTE_HASHES_PER_TX
|
|
49
|
+
// If nextAvailableLeafIndex is too small (random values 0-1000), this becomes negative
|
|
50
|
+
const makeStateForBlock = (blockNumber, txsPerBlock)=>{
|
|
51
|
+
// Ensure nextAvailableLeafIndex is large enough for all blocks up to this point
|
|
52
|
+
const noteHashIndex = blockNumber * txsPerBlock * MAX_NOTE_HASHES_PER_TX;
|
|
53
|
+
return new StateReference(AppendOnlyTreeSnapshot.random(), new PartialStateReference(new AppendOnlyTreeSnapshot(Fr.random(), noteHashIndex), AppendOnlyTreeSnapshot.random(), AppendOnlyTreeSnapshot.random()));
|
|
54
|
+
};
|
|
55
|
+
const makePublishedCheckpoint = (checkpoint, l1BlockNumber)=>{
|
|
56
|
+
return new PublishedCheckpoint(checkpoint, new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)), times(3, CommitteeAttestation.random));
|
|
57
|
+
};
|
|
58
|
+
const expectCheckpointedBlockEquals = (actual, expectedBlock, expectedCheckpoint)=>{
|
|
59
|
+
expect(actual.l1).toEqual(expectedCheckpoint.l1);
|
|
60
|
+
expect(actual.block.header.equals(expectedBlock.header)).toBe(true);
|
|
61
|
+
expect(actual.checkpointNumber).toEqual(expectedCheckpoint.checkpoint.number);
|
|
62
|
+
expect(actual.attestations.every((a, i)=>a.equals(expectedCheckpoint.attestations[i]))).toBe(true);
|
|
74
63
|
};
|
|
75
64
|
beforeEach(async ()=>{
|
|
76
65
|
store = await getStore();
|
|
77
|
-
|
|
66
|
+
// Create checkpoints sequentially to ensure archive roots are chained properly.
|
|
67
|
+
// Each block's header.lastArchive must equal the previous block's archive.
|
|
68
|
+
publishedCheckpoints = [];
|
|
69
|
+
const txsPerBlock = 4;
|
|
70
|
+
for(let i = 0; i < 10; i++){
|
|
71
|
+
const blockNumber = i + 1;
|
|
72
|
+
const previousArchive = i > 0 ? publishedCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
73
|
+
const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
|
|
74
|
+
numBlocks: 1,
|
|
75
|
+
startBlockNumber: blockNumber,
|
|
76
|
+
previousArchive,
|
|
77
|
+
txsPerBlock,
|
|
78
|
+
state: makeStateForBlock(blockNumber, txsPerBlock),
|
|
79
|
+
// Ensure each tx has public logs for getPublicLogs tests
|
|
80
|
+
txOptions: {
|
|
81
|
+
numPublicCallsPerTx: 2,
|
|
82
|
+
numPublicLogsPerCall: 2
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
publishedCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
|
|
86
|
+
}
|
|
78
87
|
});
|
|
79
|
-
describe('
|
|
80
|
-
it('returns success when adding
|
|
81
|
-
await expect(store.
|
|
88
|
+
describe('addCheckpoints', ()=>{
|
|
89
|
+
it('returns success when adding checkpoints', async ()=>{
|
|
90
|
+
await expect(store.addCheckpoints(publishedCheckpoints)).resolves.toBe(true);
|
|
82
91
|
});
|
|
83
|
-
it('
|
|
84
|
-
await store.
|
|
85
|
-
await expect(store.
|
|
92
|
+
it('throws on duplicate checkpoints', async ()=>{
|
|
93
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
94
|
+
await expect(store.addCheckpoints(publishedCheckpoints)).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
86
95
|
});
|
|
87
96
|
it('throws an error if the previous block does not exist in the store', async ()=>{
|
|
88
|
-
const
|
|
89
|
-
|
|
97
|
+
const checkpoint = await Checkpoint.random(CheckpointNumber(2), {
|
|
98
|
+
numBlocks: 1,
|
|
99
|
+
startBlockNumber: 2
|
|
100
|
+
});
|
|
101
|
+
const block = makePublishedCheckpoint(checkpoint, 2);
|
|
102
|
+
await expect(store.addCheckpoints([
|
|
90
103
|
block
|
|
91
|
-
])).rejects.toThrow(
|
|
92
|
-
await expect(store.
|
|
104
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
105
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
93
106
|
});
|
|
94
107
|
it('throws an error if there is a gap in the blocks being added', async ()=>{
|
|
95
|
-
const
|
|
96
|
-
|
|
97
|
-
|
|
108
|
+
const checkpoint1 = await Checkpoint.random(CheckpointNumber(1), {
|
|
109
|
+
numBlocks: 1,
|
|
110
|
+
startBlockNumber: 1
|
|
111
|
+
});
|
|
112
|
+
const checkpoint3 = await Checkpoint.random(CheckpointNumber(3), {
|
|
113
|
+
numBlocks: 1,
|
|
114
|
+
startBlockNumber: 3
|
|
115
|
+
});
|
|
116
|
+
const checkpoints = [
|
|
117
|
+
makePublishedCheckpoint(checkpoint1, 1),
|
|
118
|
+
makePublishedCheckpoint(checkpoint3, 3)
|
|
98
119
|
];
|
|
99
|
-
await expect(store.
|
|
100
|
-
await expect(store.
|
|
120
|
+
await expect(store.addCheckpoints(checkpoints)).rejects.toThrow(CheckpointNumberNotSequentialError);
|
|
121
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
122
|
+
});
|
|
123
|
+
it('throws an error if blocks within a checkpoint are not sequential', async ()=>{
|
|
124
|
+
// Create a checkpoint with non-sequential block numbers (block 1 and block 3, skipping block 2)
|
|
125
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
126
|
+
checkpointNumber: CheckpointNumber(1)
|
|
127
|
+
});
|
|
128
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
129
|
+
checkpointNumber: CheckpointNumber(1)
|
|
130
|
+
});
|
|
131
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
132
|
+
block1,
|
|
133
|
+
block3
|
|
134
|
+
], CheckpointNumber(1));
|
|
135
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
136
|
+
await expect(store.addCheckpoints([
|
|
137
|
+
publishedCheckpoint
|
|
138
|
+
])).rejects.toThrow(BlockNumberNotSequentialError);
|
|
139
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
140
|
+
});
|
|
141
|
+
it('throws an error if blocks within a checkpoint do not have sequential indexes', async ()=>{
|
|
142
|
+
// Create a checkpoint with non-sequential indexes
|
|
143
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
144
|
+
checkpointNumber: CheckpointNumber(1),
|
|
145
|
+
indexWithinCheckpoint: 0
|
|
146
|
+
});
|
|
147
|
+
const block3 = await L2BlockNew.random(BlockNumber(2), {
|
|
148
|
+
checkpointNumber: CheckpointNumber(1),
|
|
149
|
+
indexWithinCheckpoint: 2
|
|
150
|
+
});
|
|
151
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
152
|
+
block1,
|
|
153
|
+
block3
|
|
154
|
+
], CheckpointNumber(1));
|
|
155
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
156
|
+
await expect(store.addCheckpoints([
|
|
157
|
+
publishedCheckpoint
|
|
158
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
159
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
160
|
+
});
|
|
161
|
+
it('throws an error if blocks within a checkpoint do not start from index 0', async ()=>{
|
|
162
|
+
// Create a checkpoint with non-sequential indexes
|
|
163
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
164
|
+
checkpointNumber: CheckpointNumber(1),
|
|
165
|
+
indexWithinCheckpoint: 1
|
|
166
|
+
});
|
|
167
|
+
const block3 = await L2BlockNew.random(BlockNumber(2), {
|
|
168
|
+
checkpointNumber: CheckpointNumber(1),
|
|
169
|
+
indexWithinCheckpoint: 2
|
|
170
|
+
});
|
|
171
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
172
|
+
block1,
|
|
173
|
+
block3
|
|
174
|
+
], CheckpointNumber(1));
|
|
175
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
176
|
+
await expect(store.addCheckpoints([
|
|
177
|
+
publishedCheckpoint
|
|
178
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
179
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
180
|
+
});
|
|
181
|
+
it('throws an error if block has invalid checkpoint index', async ()=>{
|
|
182
|
+
// Create a block wit an invalid checkpoint index
|
|
183
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
184
|
+
checkpointNumber: CheckpointNumber(1),
|
|
185
|
+
indexWithinCheckpoint: -1
|
|
186
|
+
});
|
|
187
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
188
|
+
block1
|
|
189
|
+
], CheckpointNumber(1));
|
|
190
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
191
|
+
await expect(store.addCheckpoints([
|
|
192
|
+
publishedCheckpoint
|
|
193
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
194
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
195
|
+
});
|
|
196
|
+
it('throws an error if checkpoint has invalid initial number', async ()=>{
|
|
197
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
198
|
+
checkpointNumber: CheckpointNumber(2),
|
|
199
|
+
indexWithinCheckpoint: 0
|
|
200
|
+
});
|
|
201
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
202
|
+
block1
|
|
203
|
+
], CheckpointNumber(2));
|
|
204
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
205
|
+
await expect(store.addCheckpoints([
|
|
206
|
+
publishedCheckpoint
|
|
207
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
208
|
+
});
|
|
209
|
+
it('allows the correct initial checkpoint', async ()=>{
|
|
210
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
211
|
+
checkpointNumber: CheckpointNumber(1),
|
|
212
|
+
indexWithinCheckpoint: 0
|
|
213
|
+
});
|
|
214
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
215
|
+
block1
|
|
216
|
+
], CheckpointNumber(1));
|
|
217
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
218
|
+
await expect(store.addCheckpoints([
|
|
219
|
+
publishedCheckpoint
|
|
220
|
+
])).resolves.toBe(true);
|
|
221
|
+
});
|
|
222
|
+
it('throws on duplicate initial checkpoint', async ()=>{
|
|
223
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
224
|
+
checkpointNumber: CheckpointNumber(1),
|
|
225
|
+
indexWithinCheckpoint: 0
|
|
226
|
+
});
|
|
227
|
+
const block2 = await L2BlockNew.random(BlockNumber(1), {
|
|
228
|
+
checkpointNumber: CheckpointNumber(1),
|
|
229
|
+
indexWithinCheckpoint: 0
|
|
230
|
+
});
|
|
231
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
232
|
+
block1
|
|
233
|
+
], CheckpointNumber(1));
|
|
234
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
235
|
+
const checkpoint2 = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
236
|
+
block2
|
|
237
|
+
], CheckpointNumber(1));
|
|
238
|
+
const publishedCheckpoint2 = makePublishedCheckpoint(checkpoint2, 10);
|
|
239
|
+
await expect(store.addCheckpoints([
|
|
240
|
+
publishedCheckpoint
|
|
241
|
+
])).resolves.toBe(true);
|
|
242
|
+
await expect(store.addCheckpoints([
|
|
243
|
+
publishedCheckpoint2
|
|
244
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
101
245
|
});
|
|
102
246
|
});
|
|
103
|
-
describe('
|
|
104
|
-
it('unwinding
|
|
105
|
-
await store.
|
|
106
|
-
const
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
await store.
|
|
111
|
-
expect(
|
|
112
|
-
expect(
|
|
247
|
+
describe('unwindcheckpoints', ()=>{
|
|
248
|
+
it('unwinding checkpoints will remove checkpoints from the chain', async ()=>{
|
|
249
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
250
|
+
const checkpointNumber = await store.getSynchedCheckpointNumber();
|
|
251
|
+
const lastCheckpoint = publishedCheckpoints.at(-1);
|
|
252
|
+
const lastBlockNumber = lastCheckpoint.checkpoint.blocks[0].number;
|
|
253
|
+
// Verify block exists before unwinding
|
|
254
|
+
const retrievedBlock = await store.getCheckpointedBlock(lastBlockNumber);
|
|
255
|
+
expect(retrievedBlock).toBeDefined();
|
|
256
|
+
expect(retrievedBlock.block.header.equals(lastCheckpoint.checkpoint.blocks[0].header)).toBe(true);
|
|
257
|
+
expect(retrievedBlock.checkpointNumber).toEqual(checkpointNumber);
|
|
258
|
+
await store.unwindCheckpoints(checkpointNumber, 1);
|
|
259
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(checkpointNumber - 1);
|
|
260
|
+
await expect(store.getCheckpointedBlock(lastBlockNumber)).resolves.toBeUndefined();
|
|
113
261
|
});
|
|
114
262
|
it('can unwind multiple empty blocks', async ()=>{
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
263
|
+
// Create checkpoints sequentially to chain archive roots
|
|
264
|
+
const emptyCheckpoints = [];
|
|
265
|
+
for(let i = 0; i < 10; i++){
|
|
266
|
+
const previousArchive = i > 0 ? emptyCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
267
|
+
const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
|
|
268
|
+
numBlocks: 1,
|
|
269
|
+
startBlockNumber: i + 1,
|
|
270
|
+
txsPerBlock: 0,
|
|
271
|
+
previousArchive
|
|
272
|
+
});
|
|
273
|
+
emptyCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
|
|
274
|
+
}
|
|
275
|
+
await store.addCheckpoints(emptyCheckpoints);
|
|
276
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(10);
|
|
277
|
+
await store.unwindCheckpoints(CheckpointNumber(10), 3);
|
|
278
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(7);
|
|
279
|
+
expect((await store.getRangeOfCheckpoints(CheckpointNumber(1), 10)).map((b)=>b.checkpointNumber)).toEqual([
|
|
121
280
|
1,
|
|
122
281
|
2,
|
|
123
282
|
3,
|
|
@@ -127,113 +286,1308 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
127
286
|
7
|
|
128
287
|
]);
|
|
129
288
|
});
|
|
130
|
-
it('refuses to unwind
|
|
131
|
-
await store.
|
|
132
|
-
await expect(store.
|
|
289
|
+
it('refuses to unwind checkpoints if the tip is not the last checkpoint', async ()=>{
|
|
290
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
291
|
+
await expect(store.unwindCheckpoints(CheckpointNumber(5), 1)).rejects.toThrow(/can only unwind checkpoints from the tip/i);
|
|
133
292
|
});
|
|
134
293
|
it('unwound blocks and headers cannot be retrieved by hash or archive', async ()=>{
|
|
135
|
-
await store.
|
|
136
|
-
const
|
|
137
|
-
const
|
|
138
|
-
const
|
|
294
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
295
|
+
const lastCheckpoint = publishedCheckpoints[publishedCheckpoints.length - 1];
|
|
296
|
+
const lastBlock = lastCheckpoint.checkpoint.blocks[0];
|
|
297
|
+
const blockHash = await lastBlock.header.hash();
|
|
298
|
+
const archive = lastBlock.archive.root;
|
|
139
299
|
// Verify block and header exist before unwinding
|
|
140
|
-
|
|
141
|
-
expect(
|
|
142
|
-
expect(
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
300
|
+
const retrievedByHash = await store.getCheckpointedBlockByHash(blockHash);
|
|
301
|
+
expect(retrievedByHash).toBeDefined();
|
|
302
|
+
expect(retrievedByHash.block.header.equals(lastBlock.header)).toBe(true);
|
|
303
|
+
const retrievedByArchive = await store.getCheckpointedBlockByArchive(archive);
|
|
304
|
+
expect(retrievedByArchive).toBeDefined();
|
|
305
|
+
expect(retrievedByArchive.block.header.equals(lastBlock.header)).toBe(true);
|
|
306
|
+
const headerByHash = await store.getBlockHeaderByHash(blockHash);
|
|
307
|
+
expect(headerByHash).toBeDefined();
|
|
308
|
+
expect(headerByHash.equals(lastBlock.header)).toBe(true);
|
|
309
|
+
const headerByArchive = await store.getBlockHeaderByArchive(archive);
|
|
310
|
+
expect(headerByArchive).toBeDefined();
|
|
311
|
+
expect(headerByArchive.equals(lastBlock.header)).toBe(true);
|
|
312
|
+
// Unwind the checkpoint
|
|
313
|
+
await store.unwindCheckpoints(lastCheckpoint.checkpoint.number, 1);
|
|
146
314
|
// Verify neither block nor header can be retrieved after unwinding
|
|
147
|
-
expect(await store.
|
|
148
|
-
expect(await store.
|
|
315
|
+
expect(await store.getCheckpointedBlockByHash(blockHash)).toBeUndefined();
|
|
316
|
+
expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
|
|
149
317
|
expect(await store.getBlockHeaderByHash(blockHash)).toBeUndefined();
|
|
150
318
|
expect(await store.getBlockHeaderByArchive(archive)).toBeUndefined();
|
|
151
319
|
});
|
|
152
320
|
});
|
|
153
|
-
describe('
|
|
154
|
-
|
|
155
|
-
|
|
321
|
+
describe('multi-block checkpoints', ()=>{
|
|
322
|
+
it('block number increases correctly when adding checkpoints with multiple blocks', async ()=>{
|
|
323
|
+
// Create 3 checkpoints: first with 2 blocks, second with 3 blocks, third with 1 block
|
|
324
|
+
// Total blocks: 6, spanning block numbers 1-6
|
|
325
|
+
// Chain archive roots across checkpoints
|
|
326
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
327
|
+
numBlocks: 2,
|
|
328
|
+
startBlockNumber: 1
|
|
329
|
+
});
|
|
330
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
331
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
332
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
333
|
+
numBlocks: 3,
|
|
334
|
+
startBlockNumber: 3,
|
|
335
|
+
previousArchive: previousArchive1
|
|
336
|
+
});
|
|
337
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
338
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
339
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
340
|
+
numBlocks: 1,
|
|
341
|
+
startBlockNumber: 6,
|
|
342
|
+
previousArchive: previousArchive2
|
|
343
|
+
});
|
|
344
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
345
|
+
await store.addCheckpoints([
|
|
346
|
+
checkpoint1,
|
|
347
|
+
checkpoint2,
|
|
348
|
+
checkpoint3
|
|
349
|
+
]);
|
|
350
|
+
// Checkpoint number should be 3 (the last checkpoint number)
|
|
351
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(3);
|
|
352
|
+
// Block number should be 6 (the last block number across all checkpoints)
|
|
353
|
+
expect(await store.getLatestBlockNumber()).toBe(6);
|
|
354
|
+
});
|
|
355
|
+
it('block number decreases correctly when unwinding checkpoints with multiple blocks', async ()=>{
|
|
356
|
+
// Create 3 checkpoints with varying block counts, chaining archive roots
|
|
357
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
358
|
+
numBlocks: 2,
|
|
359
|
+
startBlockNumber: 1
|
|
360
|
+
});
|
|
361
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
362
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
363
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
364
|
+
numBlocks: 3,
|
|
365
|
+
startBlockNumber: 3,
|
|
366
|
+
previousArchive: previousArchive1
|
|
367
|
+
});
|
|
368
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
369
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
370
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
371
|
+
numBlocks: 2,
|
|
372
|
+
startBlockNumber: 6,
|
|
373
|
+
previousArchive: previousArchive2
|
|
374
|
+
});
|
|
375
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
376
|
+
await store.addCheckpoints([
|
|
377
|
+
checkpoint1,
|
|
378
|
+
checkpoint2,
|
|
379
|
+
checkpoint3
|
|
380
|
+
]);
|
|
381
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(3);
|
|
382
|
+
expect(await store.getLatestBlockNumber()).toBe(7);
|
|
383
|
+
// Unwind the last checkpoint (which has 2 blocks)
|
|
384
|
+
await store.unwindCheckpoints(CheckpointNumber(3), 1);
|
|
385
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(2);
|
|
386
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
387
|
+
// Unwind another checkpoint (which has 3 blocks)
|
|
388
|
+
await store.unwindCheckpoints(CheckpointNumber(2), 1);
|
|
389
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
390
|
+
expect(await store.getLatestBlockNumber()).toBe(2);
|
|
391
|
+
});
|
|
392
|
+
it('unwinding multiple checkpoints with multiple blocks in one go', async ()=>{
|
|
393
|
+
// Create 4 checkpoints with varying block counts, chaining archive roots
|
|
394
|
+
// Checkpoint 1: blocks 1-2 (2 blocks)
|
|
395
|
+
// Checkpoint 2: blocks 3-5 (3 blocks)
|
|
396
|
+
// Checkpoint 3: blocks 6-7 (2 blocks)
|
|
397
|
+
// Checkpoint 4: blocks 8-10 (3 blocks)
|
|
398
|
+
// Total: 10 blocks across 4 checkpoints
|
|
399
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
400
|
+
numBlocks: 2,
|
|
401
|
+
startBlockNumber: 1
|
|
402
|
+
});
|
|
403
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
404
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
405
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
406
|
+
numBlocks: 3,
|
|
407
|
+
startBlockNumber: 3,
|
|
408
|
+
previousArchive: previousArchive1
|
|
409
|
+
});
|
|
410
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
411
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
412
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
413
|
+
numBlocks: 2,
|
|
414
|
+
startBlockNumber: 6,
|
|
415
|
+
previousArchive: previousArchive2
|
|
416
|
+
});
|
|
417
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
418
|
+
const previousArchive3 = checkpoint3Cp.blocks.at(-1).archive;
|
|
419
|
+
const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
|
|
420
|
+
numBlocks: 3,
|
|
421
|
+
startBlockNumber: 8,
|
|
422
|
+
previousArchive: previousArchive3
|
|
423
|
+
});
|
|
424
|
+
const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
|
|
425
|
+
await store.addCheckpoints([
|
|
426
|
+
checkpoint1,
|
|
427
|
+
checkpoint2,
|
|
428
|
+
checkpoint3,
|
|
429
|
+
checkpoint4
|
|
430
|
+
]);
|
|
431
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(4);
|
|
432
|
+
expect(await store.getLatestBlockNumber()).toBe(10);
|
|
433
|
+
// Unwind 2 checkpoints at once (checkpoints 3 and 4, which together have 5 blocks)
|
|
434
|
+
await store.unwindCheckpoints(CheckpointNumber(4), 2);
|
|
435
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(2);
|
|
436
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
437
|
+
// Verify blocks 1-5 still exist (from checkpoints 1 and 2)
|
|
438
|
+
for(let blockNumber = 1; blockNumber <= 5; blockNumber++){
|
|
439
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
|
|
440
|
+
}
|
|
441
|
+
// Verify blocks 6-10 are gone (from checkpoints 3 and 4)
|
|
442
|
+
for(let blockNumber = 6; blockNumber <= 10; blockNumber++){
|
|
443
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
|
|
444
|
+
}
|
|
445
|
+
// Unwind remaining 2 checkpoints at once (checkpoints 1 and 2, which together have 5 blocks)
|
|
446
|
+
await store.unwindCheckpoints(CheckpointNumber(2), 2);
|
|
447
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(0);
|
|
448
|
+
expect(await store.getLatestBlockNumber()).toBe(0);
|
|
449
|
+
// Verify all blocks are gone
|
|
450
|
+
for(let blockNumber = 1; blockNumber <= 10; blockNumber++){
|
|
451
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
|
|
452
|
+
}
|
|
156
453
|
});
|
|
157
|
-
it
|
|
158
|
-
|
|
454
|
+
it('getCheckpointedBlock returns correct checkpoint info for blocks within multi-block checkpoints', async ()=>{
|
|
455
|
+
// Create checkpoints with chained archive roots
|
|
456
|
+
// Create a checkpoint with 3 blocks
|
|
457
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
458
|
+
numBlocks: 3,
|
|
459
|
+
startBlockNumber: 1
|
|
460
|
+
});
|
|
461
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
462
|
+
// Create another checkpoint with 2 blocks
|
|
463
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
464
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
465
|
+
numBlocks: 2,
|
|
466
|
+
startBlockNumber: 4,
|
|
467
|
+
previousArchive: previousArchive1
|
|
468
|
+
});
|
|
469
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
470
|
+
await store.addCheckpoints([
|
|
471
|
+
checkpoint1,
|
|
472
|
+
checkpoint2
|
|
473
|
+
]);
|
|
474
|
+
// Check blocks from the first checkpoint (blocks 1, 2, 3)
|
|
475
|
+
for(let i = 0; i < 3; i++){
|
|
476
|
+
const blockNumber = i + 1;
|
|
477
|
+
const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
|
|
478
|
+
expect(retrievedBlock).toBeDefined();
|
|
479
|
+
expect(retrievedBlock.checkpointNumber).toBe(1);
|
|
480
|
+
expect(retrievedBlock.block.number).toBe(blockNumber);
|
|
481
|
+
expect(retrievedBlock.l1).toEqual(checkpoint1.l1);
|
|
482
|
+
expect(retrievedBlock.attestations.every((a, j)=>a.equals(checkpoint1.attestations[j]))).toBe(true);
|
|
483
|
+
}
|
|
484
|
+
// Check blocks from the second checkpoint (blocks 4, 5)
|
|
485
|
+
for(let i = 0; i < 2; i++){
|
|
486
|
+
const blockNumber = i + 4;
|
|
487
|
+
const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
|
|
488
|
+
expect(retrievedBlock).toBeDefined();
|
|
489
|
+
expect(retrievedBlock.checkpointNumber).toBe(2);
|
|
490
|
+
expect(retrievedBlock.block.number).toBe(blockNumber);
|
|
491
|
+
expect(retrievedBlock.l1).toEqual(checkpoint2.l1);
|
|
492
|
+
expect(retrievedBlock.attestations.every((a, j)=>a.equals(checkpoint2.attestations[j]))).toBe(true);
|
|
493
|
+
}
|
|
159
494
|
});
|
|
160
|
-
it('returns
|
|
161
|
-
await
|
|
495
|
+
it('getCheckpointedBlockByHash returns correct checkpoint info for blocks within multi-block checkpoints', async ()=>{
|
|
496
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
497
|
+
numBlocks: 3,
|
|
498
|
+
startBlockNumber: 1
|
|
499
|
+
}), 10);
|
|
500
|
+
await store.addCheckpoints([
|
|
501
|
+
checkpoint
|
|
502
|
+
]);
|
|
503
|
+
// Check each block by its hash
|
|
504
|
+
for(let i = 0; i < checkpoint.checkpoint.blocks.length; i++){
|
|
505
|
+
const block = checkpoint.checkpoint.blocks[i];
|
|
506
|
+
const blockHash = await block.header.hash();
|
|
507
|
+
const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
|
|
508
|
+
expect(retrievedBlock).toBeDefined();
|
|
509
|
+
expect(retrievedBlock.checkpointNumber).toBe(1);
|
|
510
|
+
expect(retrievedBlock.block.number).toBe(i + 1);
|
|
511
|
+
expect(retrievedBlock.l1).toEqual(checkpoint.l1);
|
|
512
|
+
}
|
|
162
513
|
});
|
|
163
|
-
it('
|
|
164
|
-
await
|
|
514
|
+
it('getCheckpointedBlockByArchive returns correct checkpoint info for blocks within multi-block checkpoints', async ()=>{
|
|
515
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
516
|
+
numBlocks: 3,
|
|
517
|
+
startBlockNumber: 1
|
|
518
|
+
}), 10);
|
|
519
|
+
await store.addCheckpoints([
|
|
520
|
+
checkpoint
|
|
521
|
+
]);
|
|
522
|
+
// Check each block by its archive root
|
|
523
|
+
for(let i = 0; i < checkpoint.checkpoint.blocks.length; i++){
|
|
524
|
+
const block = checkpoint.checkpoint.blocks[i];
|
|
525
|
+
const archive = block.archive.root;
|
|
526
|
+
const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
|
|
527
|
+
expect(retrievedBlock).toBeDefined();
|
|
528
|
+
expect(retrievedBlock.checkpointNumber).toBe(1);
|
|
529
|
+
expect(retrievedBlock.block.number).toBe(i + 1);
|
|
530
|
+
expect(retrievedBlock.l1).toEqual(checkpoint.l1);
|
|
531
|
+
}
|
|
165
532
|
});
|
|
166
|
-
it('
|
|
167
|
-
await
|
|
533
|
+
it('unwinding a multi-block checkpoint removes all its blocks', async ()=>{
|
|
534
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
535
|
+
numBlocks: 3,
|
|
536
|
+
startBlockNumber: 1
|
|
537
|
+
}), 10);
|
|
538
|
+
await store.addCheckpoints([
|
|
539
|
+
checkpoint
|
|
540
|
+
]);
|
|
541
|
+
// Verify all 3 blocks exist
|
|
542
|
+
for(let blockNumber = 1; blockNumber <= 3; blockNumber++){
|
|
543
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
|
|
544
|
+
}
|
|
545
|
+
// Unwind the checkpoint
|
|
546
|
+
await store.unwindCheckpoints(CheckpointNumber(1), 1);
|
|
547
|
+
// Verify all 3 blocks are removed
|
|
548
|
+
for(let blockNumber = 1; blockNumber <= 3; blockNumber++){
|
|
549
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
|
|
550
|
+
}
|
|
551
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(0);
|
|
552
|
+
expect(await store.getLatestBlockNumber()).toBe(0);
|
|
168
553
|
});
|
|
169
|
-
|
|
554
|
+
});
|
|
555
|
+
describe('uncheckpointed blocks', ()=>{
|
|
556
|
+
it('can add blocks independently before a checkpoint arrives', async ()=>{
|
|
557
|
+
// First, establish some checkpointed blocks (checkpoint 1 with blocks 1-3)
|
|
558
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
559
|
+
numBlocks: 3,
|
|
560
|
+
startBlockNumber: 1
|
|
561
|
+
}), 10);
|
|
562
|
+
await store.addCheckpoints([
|
|
563
|
+
checkpoint1
|
|
564
|
+
]);
|
|
565
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
566
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
567
|
+
// Now add blocks 4, 5, 6 independently (without a checkpoint) for upcoming checkpoint 2
|
|
568
|
+
// Chain archive roots from the last block of checkpoint 1
|
|
569
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
570
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
571
|
+
checkpointNumber: CheckpointNumber(2),
|
|
572
|
+
indexWithinCheckpoint: 0,
|
|
573
|
+
lastArchive: lastBlockArchive
|
|
574
|
+
});
|
|
575
|
+
const block5 = await L2BlockNew.random(BlockNumber(5), {
|
|
576
|
+
checkpointNumber: CheckpointNumber(2),
|
|
577
|
+
indexWithinCheckpoint: 1,
|
|
578
|
+
lastArchive: block4.archive
|
|
579
|
+
});
|
|
580
|
+
const block6 = await L2BlockNew.random(BlockNumber(6), {
|
|
581
|
+
checkpointNumber: CheckpointNumber(2),
|
|
582
|
+
indexWithinCheckpoint: 2,
|
|
583
|
+
lastArchive: block5.archive
|
|
584
|
+
});
|
|
170
585
|
await store.addBlocks([
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
586
|
+
block4,
|
|
587
|
+
block5,
|
|
588
|
+
block6
|
|
589
|
+
]);
|
|
590
|
+
// Checkpoint number should still be 1 (no new checkpoint added)
|
|
591
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
592
|
+
// But latest block number should be 6
|
|
593
|
+
expect(await store.getLatestBlockNumber()).toBe(6);
|
|
594
|
+
});
|
|
595
|
+
it('getBlock retrieves uncheckpointed blocks', async ()=>{
|
|
596
|
+
// First, establish some checkpointed blocks
|
|
597
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
598
|
+
numBlocks: 2,
|
|
599
|
+
startBlockNumber: 1
|
|
600
|
+
}), 10);
|
|
601
|
+
await store.addCheckpoints([
|
|
602
|
+
checkpoint1
|
|
603
|
+
]);
|
|
604
|
+
// Add uncheckpointed blocks for upcoming checkpoint 2, chaining archive roots
|
|
605
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
606
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
607
|
+
checkpointNumber: CheckpointNumber(2),
|
|
608
|
+
indexWithinCheckpoint: 0,
|
|
609
|
+
lastArchive: lastBlockArchive
|
|
610
|
+
});
|
|
611
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
612
|
+
checkpointNumber: CheckpointNumber(2),
|
|
613
|
+
indexWithinCheckpoint: 1,
|
|
614
|
+
lastArchive: block3.archive
|
|
615
|
+
});
|
|
616
|
+
await store.addBlocks([
|
|
617
|
+
block3,
|
|
618
|
+
block4
|
|
619
|
+
]);
|
|
620
|
+
// getBlock should work for both checkpointed and uncheckpointed blocks
|
|
621
|
+
expect((await store.getBlock(1))?.number).toBe(1);
|
|
622
|
+
expect((await store.getBlock(2))?.number).toBe(2);
|
|
623
|
+
expect((await store.getBlock(3))?.equals(block3)).toBe(true);
|
|
624
|
+
expect((await store.getBlock(4))?.equals(block4)).toBe(true);
|
|
625
|
+
expect(await store.getBlock(5)).toBeUndefined();
|
|
626
|
+
const block5 = await L2BlockNew.random(BlockNumber(5), {
|
|
627
|
+
checkpointNumber: CheckpointNumber(2),
|
|
628
|
+
indexWithinCheckpoint: 2,
|
|
629
|
+
lastArchive: block4.archive
|
|
630
|
+
});
|
|
631
|
+
await store.addBlocks([
|
|
632
|
+
block5
|
|
633
|
+
]);
|
|
634
|
+
// Verify the uncheckpointed blocks have correct data
|
|
635
|
+
const retrieved3 = await store.getBlock(3);
|
|
636
|
+
expect(retrieved3.number).toBe(3);
|
|
637
|
+
expect(retrieved3.equals(block3)).toBe(true);
|
|
638
|
+
const retrieved4 = await store.getBlock(4);
|
|
639
|
+
expect(retrieved4.number).toBe(4);
|
|
640
|
+
expect(retrieved4.equals(block4)).toBe(true);
|
|
641
|
+
const retrieved5 = await store.getBlock(5);
|
|
642
|
+
expect(retrieved5.number).toBe(5);
|
|
643
|
+
expect(retrieved5.equals(block5)).toBe(true);
|
|
644
|
+
});
|
|
645
|
+
it('getBlockByHash retrieves uncheckpointed blocks', async ()=>{
|
|
646
|
+
// Add uncheckpointed blocks (no checkpoints at all) for initial checkpoint 1, chaining archive roots
|
|
647
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
648
|
+
checkpointNumber: CheckpointNumber(1),
|
|
649
|
+
indexWithinCheckpoint: 0
|
|
650
|
+
});
|
|
651
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
652
|
+
checkpointNumber: CheckpointNumber(1),
|
|
653
|
+
indexWithinCheckpoint: 1,
|
|
654
|
+
lastArchive: block1.archive
|
|
174
655
|
});
|
|
175
|
-
await
|
|
656
|
+
await store.addBlocks([
|
|
657
|
+
block1,
|
|
658
|
+
block2
|
|
659
|
+
]);
|
|
660
|
+
// getBlockByHash should work for uncheckpointed blocks
|
|
661
|
+
const hash1 = await block1.header.hash();
|
|
662
|
+
const hash2 = await block2.header.hash();
|
|
663
|
+
const retrieved1 = await store.getBlockByHash(hash1);
|
|
664
|
+
expect(retrieved1.equals(block1)).toBe(true);
|
|
665
|
+
const retrieved2 = await store.getBlockByHash(hash2);
|
|
666
|
+
expect(retrieved2.equals(block2)).toBe(true);
|
|
667
|
+
});
|
|
668
|
+
it('getBlockByArchive retrieves uncheckpointed blocks', async ()=>{
|
|
669
|
+
// Add uncheckpointed blocks for initial checkpoint 1, chaining archive roots
|
|
670
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
671
|
+
checkpointNumber: CheckpointNumber(1),
|
|
672
|
+
indexWithinCheckpoint: 0
|
|
673
|
+
});
|
|
674
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
675
|
+
checkpointNumber: CheckpointNumber(1),
|
|
676
|
+
indexWithinCheckpoint: 1,
|
|
677
|
+
lastArchive: block1.archive
|
|
678
|
+
});
|
|
679
|
+
await store.addBlocks([
|
|
680
|
+
block1,
|
|
681
|
+
block2
|
|
682
|
+
]);
|
|
683
|
+
// getBlockByArchive should work for uncheckpointed blocks
|
|
684
|
+
const archive1 = block1.archive.root;
|
|
685
|
+
const archive2 = block2.archive.root;
|
|
686
|
+
const retrieved1 = await store.getBlockByArchive(archive1);
|
|
687
|
+
expect(retrieved1.equals(block1)).toBe(true);
|
|
688
|
+
const retrieved2 = await store.getBlockByArchive(archive2);
|
|
689
|
+
expect(retrieved2.equals(block2)).toBe(true);
|
|
690
|
+
});
|
|
691
|
+
it('getCheckpointedBlock returns undefined for uncheckpointed blocks', async ()=>{
|
|
692
|
+
// Add a checkpoint with blocks 1-2
|
|
693
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
694
|
+
numBlocks: 2,
|
|
695
|
+
startBlockNumber: 1
|
|
696
|
+
}), 10);
|
|
697
|
+
await store.addCheckpoints([
|
|
698
|
+
checkpoint1
|
|
699
|
+
]);
|
|
700
|
+
// Add uncheckpointed blocks 3-4 for upcoming checkpoint 2, chaining archive roots
|
|
701
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
702
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
703
|
+
checkpointNumber: CheckpointNumber(2),
|
|
704
|
+
indexWithinCheckpoint: 0,
|
|
705
|
+
lastArchive: lastBlockArchive
|
|
706
|
+
});
|
|
707
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
708
|
+
checkpointNumber: CheckpointNumber(2),
|
|
709
|
+
indexWithinCheckpoint: 1,
|
|
710
|
+
lastArchive: block3.archive
|
|
711
|
+
});
|
|
712
|
+
await store.addBlocks([
|
|
713
|
+
block3,
|
|
714
|
+
block4
|
|
715
|
+
]);
|
|
716
|
+
// getCheckpointedBlock should work for checkpointed blocks
|
|
717
|
+
expect((await store.getCheckpointedBlock(1))?.block.number).toBe(1);
|
|
718
|
+
expect((await store.getCheckpointedBlock(2))?.block.number).toBe(2);
|
|
719
|
+
// getCheckpointedBlock should return undefined for uncheckpointed blocks
|
|
720
|
+
expect(await store.getCheckpointedBlock(3)).toBeUndefined();
|
|
721
|
+
expect(await store.getCheckpointedBlock(4)).toBeUndefined();
|
|
722
|
+
// But getBlock should work for all blocks
|
|
723
|
+
expect((await store.getBlock(3))?.equals(block3)).toBe(true);
|
|
724
|
+
expect((await store.getBlock(4))?.equals(block4)).toBe(true);
|
|
725
|
+
});
|
|
726
|
+
it('getCheckpointedBlockByHash returns undefined for uncheckpointed blocks', async ()=>{
|
|
727
|
+
// Add uncheckpointed blocks for initial checkpoint 1
|
|
728
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
729
|
+
checkpointNumber: CheckpointNumber(1),
|
|
730
|
+
indexWithinCheckpoint: 0
|
|
731
|
+
});
|
|
732
|
+
await store.addBlocks([
|
|
733
|
+
block1
|
|
734
|
+
]);
|
|
735
|
+
const hash = await block1.header.hash();
|
|
736
|
+
// getCheckpointedBlockByHash should return undefined
|
|
737
|
+
expect(await store.getCheckpointedBlockByHash(hash)).toBeUndefined();
|
|
738
|
+
// But getBlockByHash should work
|
|
739
|
+
expect((await store.getBlockByHash(hash))?.equals(block1)).toBe(true);
|
|
740
|
+
});
|
|
741
|
+
it('getCheckpointedBlockByArchive returns undefined for uncheckpointed blocks', async ()=>{
|
|
742
|
+
// Add uncheckpointed blocks for initial checkpoint 1
|
|
743
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
744
|
+
checkpointNumber: CheckpointNumber(1),
|
|
745
|
+
indexWithinCheckpoint: 0
|
|
746
|
+
});
|
|
747
|
+
await store.addBlocks([
|
|
748
|
+
block1
|
|
749
|
+
]);
|
|
750
|
+
const archive = block1.archive.root;
|
|
751
|
+
// getCheckpointedBlockByArchive should return undefined
|
|
752
|
+
expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
|
|
753
|
+
// But getBlockByArchive should work
|
|
754
|
+
expect((await store.getBlockByArchive(archive))?.equals(block1)).toBe(true);
|
|
755
|
+
});
|
|
756
|
+
it('checkpoint adopts previously added uncheckpointed blocks', async ()=>{
|
|
757
|
+
// Add blocks 1-3 without a checkpoint (for initial checkpoint 1), chaining archive roots
|
|
758
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
759
|
+
checkpointNumber: CheckpointNumber(1),
|
|
760
|
+
indexWithinCheckpoint: 0
|
|
761
|
+
});
|
|
762
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
763
|
+
checkpointNumber: CheckpointNumber(1),
|
|
764
|
+
indexWithinCheckpoint: 1,
|
|
765
|
+
lastArchive: block1.archive
|
|
766
|
+
});
|
|
767
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
768
|
+
checkpointNumber: CheckpointNumber(1),
|
|
769
|
+
indexWithinCheckpoint: 2,
|
|
770
|
+
lastArchive: block2.archive
|
|
771
|
+
});
|
|
772
|
+
await store.addBlocks([
|
|
773
|
+
block1,
|
|
774
|
+
block2,
|
|
775
|
+
block3
|
|
776
|
+
]);
|
|
777
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(0);
|
|
778
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
779
|
+
// getCheckpointedBlock should return undefined for all
|
|
780
|
+
expect(await store.getCheckpointedBlock(1)).toBeUndefined();
|
|
781
|
+
expect(await store.getCheckpointedBlock(2)).toBeUndefined();
|
|
782
|
+
expect(await store.getCheckpointedBlock(3)).toBeUndefined();
|
|
783
|
+
// Now add a checkpoint that covers blocks 1-3
|
|
784
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
785
|
+
numBlocks: 3,
|
|
786
|
+
startBlockNumber: 1
|
|
787
|
+
}), 10);
|
|
788
|
+
await store.addCheckpoints([
|
|
789
|
+
checkpoint1
|
|
790
|
+
]);
|
|
791
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
792
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
793
|
+
// Now getCheckpointedBlock should work for all blocks
|
|
794
|
+
const checkpointed1 = await store.getCheckpointedBlock(1);
|
|
795
|
+
expect(checkpointed1).toBeDefined();
|
|
796
|
+
expect(checkpointed1.checkpointNumber).toBe(1);
|
|
797
|
+
expect(checkpointed1.l1).toEqual(checkpoint1.l1);
|
|
798
|
+
const checkpointed2 = await store.getCheckpointedBlock(2);
|
|
799
|
+
expect(checkpointed2).toBeDefined();
|
|
800
|
+
expect(checkpointed2.checkpointNumber).toBe(1);
|
|
801
|
+
const checkpointed3 = await store.getCheckpointedBlock(3);
|
|
802
|
+
expect(checkpointed3).toBeDefined();
|
|
803
|
+
expect(checkpointed3.checkpointNumber).toBe(1);
|
|
804
|
+
});
|
|
805
|
+
it('can add more uncheckpointed blocks after a checkpoint and then checkpoint them', async ()=>{
|
|
806
|
+
// Start with checkpoint 1 covering blocks 1-2
|
|
807
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
808
|
+
numBlocks: 2,
|
|
809
|
+
startBlockNumber: 1
|
|
810
|
+
}), 10);
|
|
811
|
+
await store.addCheckpoints([
|
|
812
|
+
checkpoint1
|
|
813
|
+
]);
|
|
814
|
+
// Add uncheckpointed blocks 3-5 for the upcoming checkpoint 2, chaining archive roots
|
|
815
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
816
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
817
|
+
checkpointNumber: CheckpointNumber(2),
|
|
818
|
+
indexWithinCheckpoint: 0,
|
|
819
|
+
lastArchive: lastBlockArchive
|
|
820
|
+
});
|
|
821
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
822
|
+
checkpointNumber: CheckpointNumber(2),
|
|
823
|
+
indexWithinCheckpoint: 1,
|
|
824
|
+
lastArchive: block3.archive
|
|
825
|
+
});
|
|
826
|
+
const block5 = await L2BlockNew.random(BlockNumber(5), {
|
|
827
|
+
checkpointNumber: CheckpointNumber(2),
|
|
828
|
+
indexWithinCheckpoint: 2,
|
|
829
|
+
lastArchive: block4.archive
|
|
830
|
+
});
|
|
831
|
+
await store.addBlocks([
|
|
832
|
+
block3,
|
|
833
|
+
block4,
|
|
834
|
+
block5
|
|
835
|
+
]);
|
|
836
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
837
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
838
|
+
// Blocks 3-5 are not checkpointed yet
|
|
839
|
+
expect(await store.getCheckpointedBlock(3)).toBeUndefined();
|
|
840
|
+
expect(await store.getCheckpointedBlock(4)).toBeUndefined();
|
|
841
|
+
expect(await store.getCheckpointedBlock(5)).toBeUndefined();
|
|
842
|
+
// Add checkpoint 2 covering blocks 3-5, chaining from checkpoint1
|
|
843
|
+
const checkpoint2 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(2), {
|
|
844
|
+
numBlocks: 3,
|
|
845
|
+
startBlockNumber: 3,
|
|
846
|
+
previousArchive: lastBlockArchive
|
|
847
|
+
}), 11);
|
|
848
|
+
await store.addCheckpoints([
|
|
849
|
+
checkpoint2
|
|
850
|
+
]);
|
|
851
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(2);
|
|
852
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
853
|
+
// Now blocks 3-5 should be checkpointed with checkpoint 2's info
|
|
854
|
+
const checkpointed3 = await store.getCheckpointedBlock(3);
|
|
855
|
+
expect(checkpointed3).toBeDefined();
|
|
856
|
+
expect(checkpointed3.checkpointNumber).toBe(2);
|
|
857
|
+
expect(checkpointed3.l1).toEqual(checkpoint2.l1);
|
|
858
|
+
const checkpointed4 = await store.getCheckpointedBlock(4);
|
|
859
|
+
expect(checkpointed4).toBeDefined();
|
|
860
|
+
expect(checkpointed4.checkpointNumber).toBe(2);
|
|
861
|
+
const checkpointed5 = await store.getCheckpointedBlock(5);
|
|
862
|
+
expect(checkpointed5).toBeDefined();
|
|
863
|
+
expect(checkpointed5.checkpointNumber).toBe(2);
|
|
864
|
+
});
|
|
865
|
+
it('getBlocks retrieves both checkpointed and uncheckpointed blocks', async ()=>{
|
|
866
|
+
// Add checkpoint with blocks 1-2
|
|
867
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
868
|
+
numBlocks: 2,
|
|
869
|
+
startBlockNumber: 1
|
|
870
|
+
}), 10);
|
|
871
|
+
await store.addCheckpoints([
|
|
872
|
+
checkpoint1
|
|
873
|
+
]);
|
|
874
|
+
// Add uncheckpointed blocks 3-4 for the upcoming checkpoint 2, chaining archive roots
|
|
875
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
876
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
877
|
+
checkpointNumber: CheckpointNumber(2),
|
|
878
|
+
indexWithinCheckpoint: 0,
|
|
879
|
+
lastArchive: lastBlockArchive
|
|
880
|
+
});
|
|
881
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
882
|
+
checkpointNumber: CheckpointNumber(2),
|
|
883
|
+
indexWithinCheckpoint: 1,
|
|
884
|
+
lastArchive: block3.archive
|
|
885
|
+
});
|
|
886
|
+
await store.addBlocks([
|
|
887
|
+
block3,
|
|
888
|
+
block4
|
|
889
|
+
]);
|
|
890
|
+
// getBlocks should retrieve all blocks
|
|
891
|
+
const allBlocks = await store.getBlocks(1, 10);
|
|
892
|
+
expect(allBlocks.length).toBe(4);
|
|
893
|
+
expect(allBlocks.map((b)=>b.number)).toEqual([
|
|
894
|
+
1,
|
|
895
|
+
2,
|
|
896
|
+
3,
|
|
897
|
+
4
|
|
898
|
+
]);
|
|
176
899
|
});
|
|
177
|
-
|
|
900
|
+
});
|
|
901
|
+
describe('addBlocks validation', ()=>{
|
|
902
|
+
it('throws if blocks have different checkpoint numbers', async ()=>{
|
|
903
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
904
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
905
|
+
numBlocks: 2,
|
|
906
|
+
startBlockNumber: 1
|
|
907
|
+
}), 10);
|
|
908
|
+
await store.addCheckpoints([
|
|
909
|
+
checkpoint1
|
|
910
|
+
]);
|
|
911
|
+
// Try to add blocks 3 and 4 with different checkpoint numbers
|
|
912
|
+
// Chain archives correctly to test the checkpoint number validation
|
|
913
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
914
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
915
|
+
checkpointNumber: CheckpointNumber(2),
|
|
916
|
+
indexWithinCheckpoint: 0,
|
|
917
|
+
lastArchive: lastBlockArchive
|
|
918
|
+
});
|
|
919
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
920
|
+
checkpointNumber: CheckpointNumber(3),
|
|
921
|
+
indexWithinCheckpoint: 1,
|
|
922
|
+
lastArchive: block3.archive
|
|
923
|
+
});
|
|
924
|
+
await expect(store.addBlocks([
|
|
925
|
+
block3,
|
|
926
|
+
block4
|
|
927
|
+
])).rejects.toThrow(CheckpointNumberNotConsistentError);
|
|
928
|
+
});
|
|
929
|
+
it('throws if checkpoint number is not the current checkpoint', async ()=>{
|
|
930
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
931
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
932
|
+
numBlocks: 2,
|
|
933
|
+
startBlockNumber: 1
|
|
934
|
+
}), 10);
|
|
935
|
+
await store.addCheckpoints([
|
|
936
|
+
checkpoint1
|
|
937
|
+
]);
|
|
938
|
+
// Try to add blocks for checkpoint 3 (skipping checkpoint 2)
|
|
939
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
940
|
+
checkpointNumber: CheckpointNumber(3),
|
|
941
|
+
indexWithinCheckpoint: 0
|
|
942
|
+
});
|
|
943
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
944
|
+
checkpointNumber: CheckpointNumber(3),
|
|
945
|
+
indexWithinCheckpoint: 1
|
|
946
|
+
});
|
|
947
|
+
await expect(store.addBlocks([
|
|
948
|
+
block3,
|
|
949
|
+
block4
|
|
950
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
951
|
+
});
|
|
952
|
+
it('allows blocks with the same checkpoint number for the current checkpoint', async ()=>{
|
|
953
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
954
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
955
|
+
numBlocks: 2,
|
|
956
|
+
startBlockNumber: 1
|
|
957
|
+
}), 10);
|
|
958
|
+
await store.addCheckpoints([
|
|
959
|
+
checkpoint1
|
|
960
|
+
]);
|
|
961
|
+
// Add blocks 3 and 4 with consistent checkpoint number (2), chaining archive roots
|
|
962
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
963
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
964
|
+
checkpointNumber: CheckpointNumber(2),
|
|
965
|
+
indexWithinCheckpoint: 0,
|
|
966
|
+
lastArchive: lastBlockArchive
|
|
967
|
+
});
|
|
968
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
969
|
+
checkpointNumber: CheckpointNumber(2),
|
|
970
|
+
indexWithinCheckpoint: 1,
|
|
971
|
+
lastArchive: block3.archive
|
|
972
|
+
});
|
|
973
|
+
await expect(store.addBlocks([
|
|
974
|
+
block3,
|
|
975
|
+
block4
|
|
976
|
+
])).resolves.toBe(true);
|
|
977
|
+
// Verify blocks were added
|
|
978
|
+
expect((await store.getBlock(3))?.equals(block3)).toBe(true);
|
|
979
|
+
expect((await store.getBlock(4))?.equals(block4)).toBe(true);
|
|
980
|
+
});
|
|
981
|
+
it('allows blocks for the initial checkpoint when store is empty', async ()=>{
|
|
982
|
+
// Add blocks for the initial checkpoint (1), chaining archive roots
|
|
983
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
984
|
+
checkpointNumber: CheckpointNumber(1),
|
|
985
|
+
indexWithinCheckpoint: 0
|
|
986
|
+
});
|
|
987
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
988
|
+
checkpointNumber: CheckpointNumber(1),
|
|
989
|
+
indexWithinCheckpoint: 1,
|
|
990
|
+
lastArchive: block1.archive
|
|
991
|
+
});
|
|
992
|
+
await expect(store.addBlocks([
|
|
993
|
+
block1,
|
|
994
|
+
block2
|
|
995
|
+
])).resolves.toBe(true);
|
|
996
|
+
// Verify blocks were added
|
|
997
|
+
expect((await store.getBlock(1))?.equals(block1)).toBe(true);
|
|
998
|
+
expect((await store.getBlock(2))?.equals(block2)).toBe(true);
|
|
999
|
+
expect(await store.getLatestBlockNumber()).toBe(2);
|
|
1000
|
+
});
|
|
1001
|
+
it('throws if initial block is duplicated across calls', async ()=>{
|
|
1002
|
+
// Add blocks for the initial checkpoint (1)
|
|
1003
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
1004
|
+
checkpointNumber: CheckpointNumber(1),
|
|
1005
|
+
indexWithinCheckpoint: 0
|
|
1006
|
+
});
|
|
1007
|
+
const block2 = await L2BlockNew.random(BlockNumber(1), {
|
|
1008
|
+
checkpointNumber: CheckpointNumber(1),
|
|
1009
|
+
indexWithinCheckpoint: 0
|
|
1010
|
+
});
|
|
1011
|
+
await expect(store.addBlocks([
|
|
1012
|
+
block1
|
|
1013
|
+
])).resolves.toBe(true);
|
|
1014
|
+
await expect(store.addBlocks([
|
|
1015
|
+
block2
|
|
1016
|
+
])).rejects.toThrow(InitialBlockNumberNotSequentialError);
|
|
1017
|
+
});
|
|
1018
|
+
it('throws if first block has wrong checkpoint number when store is empty', async ()=>{
|
|
1019
|
+
// Try to add blocks for checkpoint 2 when store is empty (should start at 1)
|
|
1020
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
1021
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1022
|
+
indexWithinCheckpoint: 0
|
|
1023
|
+
});
|
|
1024
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
1025
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1026
|
+
indexWithinCheckpoint: 1
|
|
1027
|
+
});
|
|
1028
|
+
await expect(store.addBlocks([
|
|
1029
|
+
block1,
|
|
1030
|
+
block2
|
|
1031
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
1032
|
+
});
|
|
1033
|
+
it('allows adding more blocks to the same checkpoint in separate calls', async ()=>{
|
|
1034
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1035
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1036
|
+
numBlocks: 2,
|
|
1037
|
+
startBlockNumber: 1
|
|
1038
|
+
}), 10);
|
|
1039
|
+
await store.addCheckpoints([
|
|
1040
|
+
checkpoint1
|
|
1041
|
+
]);
|
|
1042
|
+
// Add block 3 for checkpoint 2, chaining archive roots
|
|
1043
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1044
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1045
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1046
|
+
indexWithinCheckpoint: 0,
|
|
1047
|
+
lastArchive: lastBlockArchive
|
|
1048
|
+
});
|
|
1049
|
+
await expect(store.addBlocks([
|
|
1050
|
+
block3
|
|
1051
|
+
])).resolves.toBe(true);
|
|
1052
|
+
// Add block 4 for the same checkpoint 2 in a separate call
|
|
1053
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1054
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1055
|
+
indexWithinCheckpoint: 1,
|
|
1056
|
+
lastArchive: block3.archive
|
|
1057
|
+
});
|
|
1058
|
+
await expect(store.addBlocks([
|
|
1059
|
+
block4
|
|
1060
|
+
])).resolves.toBe(true);
|
|
1061
|
+
expect(await store.getLatestBlockNumber()).toBe(4);
|
|
1062
|
+
});
|
|
1063
|
+
it('throws if adding blocks in separate calls with non-consecutive indexes', async ()=>{
|
|
1064
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1065
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1066
|
+
numBlocks: 2,
|
|
1067
|
+
startBlockNumber: 1
|
|
1068
|
+
}), 10);
|
|
1069
|
+
await store.addCheckpoints([
|
|
1070
|
+
checkpoint1
|
|
1071
|
+
]);
|
|
1072
|
+
// Add block 3 for checkpoint 2, chaining archive roots
|
|
1073
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1074
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1075
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1076
|
+
indexWithinCheckpoint: 0,
|
|
1077
|
+
lastArchive: lastBlockArchive
|
|
1078
|
+
});
|
|
1079
|
+
await expect(store.addBlocks([
|
|
1080
|
+
block3
|
|
1081
|
+
])).resolves.toBe(true);
|
|
1082
|
+
// Add block 4 for the same checkpoint 2 in a separate call but with a missing index
|
|
1083
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1084
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1085
|
+
indexWithinCheckpoint: 2,
|
|
1086
|
+
lastArchive: block3.archive
|
|
1087
|
+
});
|
|
1088
|
+
await expect(store.addBlocks([
|
|
1089
|
+
block4
|
|
1090
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
1091
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
1092
|
+
});
|
|
1093
|
+
it('throws if second batch of blocks has different checkpoint number than first batch', async ()=>{
|
|
1094
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1095
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1096
|
+
numBlocks: 2,
|
|
1097
|
+
startBlockNumber: 1
|
|
1098
|
+
}), 10);
|
|
1099
|
+
await store.addCheckpoints([
|
|
1100
|
+
checkpoint1
|
|
1101
|
+
]);
|
|
1102
|
+
// Add block 3 for checkpoint 2, chaining archive roots
|
|
1103
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1104
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1105
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1106
|
+
indexWithinCheckpoint: 0,
|
|
1107
|
+
lastArchive: lastBlockArchive
|
|
1108
|
+
});
|
|
178
1109
|
await store.addBlocks([
|
|
179
|
-
|
|
180
|
-
|
|
1110
|
+
block3
|
|
1111
|
+
]);
|
|
1112
|
+
// Try to add block 4 for checkpoint 3 (should fail because current checkpoint is still 2)
|
|
1113
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1114
|
+
checkpointNumber: CheckpointNumber(3),
|
|
1115
|
+
indexWithinCheckpoint: 0,
|
|
1116
|
+
lastArchive: block3.archive
|
|
1117
|
+
});
|
|
1118
|
+
await expect(store.addBlocks([
|
|
1119
|
+
block4
|
|
1120
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
1121
|
+
});
|
|
1122
|
+
it('force option bypasses checkpoint number validation', async ()=>{
|
|
1123
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1124
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1125
|
+
numBlocks: 2,
|
|
1126
|
+
startBlockNumber: 1
|
|
1127
|
+
}), 10);
|
|
1128
|
+
await store.addCheckpoints([
|
|
1129
|
+
checkpoint1
|
|
1130
|
+
]);
|
|
1131
|
+
// Add blocks with different checkpoint numbers using force option, chaining archive roots
|
|
1132
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1133
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1134
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1135
|
+
indexWithinCheckpoint: 0,
|
|
1136
|
+
lastArchive: lastBlockArchive
|
|
1137
|
+
});
|
|
1138
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1139
|
+
checkpointNumber: CheckpointNumber(5),
|
|
1140
|
+
indexWithinCheckpoint: 0,
|
|
1141
|
+
lastArchive: block3.archive
|
|
1142
|
+
});
|
|
1143
|
+
await expect(store.addBlocks([
|
|
1144
|
+
block3,
|
|
1145
|
+
block4
|
|
1146
|
+
], {
|
|
1147
|
+
force: true
|
|
1148
|
+
})).resolves.toBe(true);
|
|
1149
|
+
});
|
|
1150
|
+
it('force option bypasses blockindex number validation', async ()=>{
|
|
1151
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1152
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1153
|
+
numBlocks: 2,
|
|
1154
|
+
startBlockNumber: 1
|
|
1155
|
+
}), 10);
|
|
1156
|
+
await store.addCheckpoints([
|
|
1157
|
+
checkpoint1
|
|
1158
|
+
]);
|
|
1159
|
+
// Add blocks with different checkpoint numbers using force option, chaining archive roots
|
|
1160
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1161
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1162
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1163
|
+
indexWithinCheckpoint: 0,
|
|
1164
|
+
lastArchive: lastBlockArchive
|
|
1165
|
+
});
|
|
1166
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1167
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1168
|
+
indexWithinCheckpoint: 2,
|
|
1169
|
+
lastArchive: block3.archive
|
|
1170
|
+
});
|
|
1171
|
+
await expect(store.addBlocks([
|
|
1172
|
+
block3,
|
|
1173
|
+
block4
|
|
181
1174
|
], {
|
|
182
1175
|
force: true
|
|
1176
|
+
})).resolves.toBe(true);
|
|
1177
|
+
});
|
|
1178
|
+
it('throws if adding blocks with non-consecutive archives', async ()=>{
|
|
1179
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1180
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1181
|
+
numBlocks: 2,
|
|
1182
|
+
startBlockNumber: 1
|
|
1183
|
+
}), 10);
|
|
1184
|
+
await store.addCheckpoints([
|
|
1185
|
+
checkpoint1
|
|
1186
|
+
]);
|
|
1187
|
+
// Add block 3 for checkpoint 2 with incorrect archive
|
|
1188
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1189
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1190
|
+
indexWithinCheckpoint: 0
|
|
183
1191
|
});
|
|
184
|
-
await expect(store.
|
|
1192
|
+
await expect(store.addBlocks([
|
|
1193
|
+
block3
|
|
1194
|
+
])).rejects.toThrow(BlockArchiveNotConsistentError);
|
|
1195
|
+
expect(await store.getLatestBlockNumber()).toBe(2);
|
|
1196
|
+
});
|
|
1197
|
+
it('throws if adding blocks with non-consecutive archives across calls', async ()=>{
|
|
1198
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1199
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1200
|
+
numBlocks: 2,
|
|
1201
|
+
startBlockNumber: 1
|
|
1202
|
+
}), 10);
|
|
1203
|
+
await store.addCheckpoints([
|
|
1204
|
+
checkpoint1
|
|
1205
|
+
]);
|
|
1206
|
+
// Add block 3 for checkpoint 2 with correct archive
|
|
1207
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1208
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1209
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1210
|
+
indexWithinCheckpoint: 0,
|
|
1211
|
+
lastArchive: lastBlockArchive
|
|
1212
|
+
});
|
|
1213
|
+
await expect(store.addBlocks([
|
|
1214
|
+
block3
|
|
1215
|
+
])).resolves.toBe(true);
|
|
1216
|
+
// Add block 4 with incorrect archive (should fail)
|
|
1217
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1218
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1219
|
+
indexWithinCheckpoint: 1,
|
|
1220
|
+
lastArchive: AppendOnlyTreeSnapshot.random()
|
|
1221
|
+
});
|
|
1222
|
+
await expect(store.addBlocks([
|
|
1223
|
+
block4
|
|
1224
|
+
])).rejects.toThrow(BlockArchiveNotConsistentError);
|
|
1225
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
1226
|
+
});
|
|
1227
|
+
});
|
|
1228
|
+
describe('getBlocksForCheckpoint', ()=>{
|
|
1229
|
+
it('returns blocks for a single-block checkpoint', async ()=>{
|
|
1230
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1231
|
+
numBlocks: 1,
|
|
1232
|
+
startBlockNumber: 1
|
|
1233
|
+
}), 10);
|
|
1234
|
+
await store.addCheckpoints([
|
|
1235
|
+
checkpoint
|
|
1236
|
+
]);
|
|
1237
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1238
|
+
expect(blocks).toBeDefined();
|
|
1239
|
+
expect(blocks.length).toBe(1);
|
|
1240
|
+
expect(blocks[0].number).toBe(1);
|
|
1241
|
+
});
|
|
1242
|
+
it('returns all blocks for a multi-block checkpoint', async ()=>{
|
|
1243
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1244
|
+
numBlocks: 4,
|
|
1245
|
+
startBlockNumber: 1
|
|
1246
|
+
}), 10);
|
|
1247
|
+
await store.addCheckpoints([
|
|
1248
|
+
checkpoint
|
|
1249
|
+
]);
|
|
1250
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1251
|
+
expect(blocks).toBeDefined();
|
|
1252
|
+
expect(blocks.length).toBe(4);
|
|
1253
|
+
expect(blocks.map((b)=>b.number)).toEqual([
|
|
1254
|
+
1,
|
|
1255
|
+
2,
|
|
1256
|
+
3,
|
|
1257
|
+
4
|
|
1258
|
+
]);
|
|
1259
|
+
});
|
|
1260
|
+
it('returns correct blocks for different checkpoints', async ()=>{
|
|
1261
|
+
// Create checkpoints with chained archive roots
|
|
1262
|
+
// Checkpoint 1: blocks 1-2
|
|
1263
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1264
|
+
numBlocks: 2,
|
|
1265
|
+
startBlockNumber: 1
|
|
1266
|
+
});
|
|
1267
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1268
|
+
// Checkpoint 2: blocks 3-5
|
|
1269
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1270
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1271
|
+
numBlocks: 3,
|
|
1272
|
+
startBlockNumber: 3,
|
|
1273
|
+
previousArchive: previousArchive1
|
|
1274
|
+
});
|
|
1275
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1276
|
+
// Checkpoint 3: blocks 6-7
|
|
1277
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1278
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1279
|
+
numBlocks: 2,
|
|
1280
|
+
startBlockNumber: 6,
|
|
1281
|
+
previousArchive: previousArchive2
|
|
1282
|
+
});
|
|
1283
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1284
|
+
await store.addCheckpoints([
|
|
1285
|
+
checkpoint1,
|
|
1286
|
+
checkpoint2,
|
|
1287
|
+
checkpoint3
|
|
1288
|
+
]);
|
|
1289
|
+
const blocks1 = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1290
|
+
expect(blocks1).toBeDefined();
|
|
1291
|
+
expect(blocks1.map((b)=>b.number)).toEqual([
|
|
1292
|
+
1,
|
|
1293
|
+
2
|
|
1294
|
+
]);
|
|
1295
|
+
const blocks2 = await store.getBlocksForCheckpoint(CheckpointNumber(2));
|
|
1296
|
+
expect(blocks2).toBeDefined();
|
|
1297
|
+
expect(blocks2.map((b)=>b.number)).toEqual([
|
|
1298
|
+
3,
|
|
1299
|
+
4,
|
|
1300
|
+
5
|
|
1301
|
+
]);
|
|
1302
|
+
const blocks3 = await store.getBlocksForCheckpoint(CheckpointNumber(3));
|
|
1303
|
+
expect(blocks3).toBeDefined();
|
|
1304
|
+
expect(blocks3.map((b)=>b.number)).toEqual([
|
|
1305
|
+
6,
|
|
1306
|
+
7
|
|
1307
|
+
]);
|
|
1308
|
+
});
|
|
1309
|
+
it('returns undefined for non-existent checkpoint', async ()=>{
|
|
1310
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1311
|
+
numBlocks: 2,
|
|
1312
|
+
startBlockNumber: 1
|
|
1313
|
+
}), 10);
|
|
1314
|
+
await store.addCheckpoints([
|
|
1315
|
+
checkpoint
|
|
1316
|
+
]);
|
|
1317
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(5));
|
|
1318
|
+
expect(blocks).toBeUndefined();
|
|
1319
|
+
});
|
|
1320
|
+
it('returns undefined when no checkpoints exist', async ()=>{
|
|
1321
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1322
|
+
expect(blocks).toBeUndefined();
|
|
185
1323
|
});
|
|
186
1324
|
});
|
|
187
|
-
describe('
|
|
1325
|
+
describe('getRangeOfCheckpoints', ()=>{
|
|
1326
|
+
it('returns empty array when no checkpoints exist', async ()=>{
|
|
1327
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1328
|
+
expect(checkpoints).toEqual([]);
|
|
1329
|
+
});
|
|
1330
|
+
it('returns single checkpoint', async ()=>{
|
|
1331
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1332
|
+
numBlocks: 2,
|
|
1333
|
+
startBlockNumber: 1
|
|
1334
|
+
}), 10);
|
|
1335
|
+
await store.addCheckpoints([
|
|
1336
|
+
checkpoint
|
|
1337
|
+
]);
|
|
1338
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1339
|
+
expect(checkpoints.length).toBe(1);
|
|
1340
|
+
expect(checkpoints[0].checkpointNumber).toBe(1);
|
|
1341
|
+
expect(checkpoints[0].startBlock).toBe(1);
|
|
1342
|
+
expect(checkpoints[0].numBlocks).toBe(2);
|
|
1343
|
+
});
|
|
1344
|
+
it('returns multiple checkpoints in order', async ()=>{
|
|
1345
|
+
// Create checkpoints with chained archive roots
|
|
1346
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1347
|
+
numBlocks: 2,
|
|
1348
|
+
startBlockNumber: 1
|
|
1349
|
+
});
|
|
1350
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1351
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1352
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1353
|
+
numBlocks: 3,
|
|
1354
|
+
startBlockNumber: 3,
|
|
1355
|
+
previousArchive: previousArchive1
|
|
1356
|
+
});
|
|
1357
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1358
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1359
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1360
|
+
numBlocks: 1,
|
|
1361
|
+
startBlockNumber: 6,
|
|
1362
|
+
previousArchive: previousArchive2
|
|
1363
|
+
});
|
|
1364
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1365
|
+
await store.addCheckpoints([
|
|
1366
|
+
checkpoint1,
|
|
1367
|
+
checkpoint2,
|
|
1368
|
+
checkpoint3
|
|
1369
|
+
]);
|
|
1370
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1371
|
+
expect(checkpoints.length).toBe(3);
|
|
1372
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1373
|
+
1,
|
|
1374
|
+
2,
|
|
1375
|
+
3
|
|
1376
|
+
]);
|
|
1377
|
+
expect(checkpoints.map((c)=>c.startBlock)).toEqual([
|
|
1378
|
+
1,
|
|
1379
|
+
3,
|
|
1380
|
+
6
|
|
1381
|
+
]);
|
|
1382
|
+
expect(checkpoints.map((c)=>c.numBlocks)).toEqual([
|
|
1383
|
+
2,
|
|
1384
|
+
3,
|
|
1385
|
+
1
|
|
1386
|
+
]);
|
|
1387
|
+
});
|
|
1388
|
+
it('respects the from parameter', async ()=>{
|
|
1389
|
+
// Create checkpoints with chained archive roots
|
|
1390
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1391
|
+
numBlocks: 2,
|
|
1392
|
+
startBlockNumber: 1
|
|
1393
|
+
});
|
|
1394
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1395
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1396
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1397
|
+
numBlocks: 2,
|
|
1398
|
+
startBlockNumber: 3,
|
|
1399
|
+
previousArchive: previousArchive1
|
|
1400
|
+
});
|
|
1401
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1402
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1403
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1404
|
+
numBlocks: 2,
|
|
1405
|
+
startBlockNumber: 5,
|
|
1406
|
+
previousArchive: previousArchive2
|
|
1407
|
+
});
|
|
1408
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1409
|
+
await store.addCheckpoints([
|
|
1410
|
+
checkpoint1,
|
|
1411
|
+
checkpoint2,
|
|
1412
|
+
checkpoint3
|
|
1413
|
+
]);
|
|
1414
|
+
// Start from checkpoint 2
|
|
1415
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(2), 10);
|
|
1416
|
+
expect(checkpoints.length).toBe(2);
|
|
1417
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1418
|
+
2,
|
|
1419
|
+
3
|
|
1420
|
+
]);
|
|
1421
|
+
});
|
|
1422
|
+
it('respects the limit parameter', async ()=>{
|
|
1423
|
+
// Create checkpoints with chained archive roots
|
|
1424
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1425
|
+
numBlocks: 1,
|
|
1426
|
+
startBlockNumber: 1
|
|
1427
|
+
});
|
|
1428
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1429
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1430
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1431
|
+
numBlocks: 1,
|
|
1432
|
+
startBlockNumber: 2,
|
|
1433
|
+
previousArchive: previousArchive1
|
|
1434
|
+
});
|
|
1435
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1436
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1437
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1438
|
+
numBlocks: 1,
|
|
1439
|
+
startBlockNumber: 3,
|
|
1440
|
+
previousArchive: previousArchive2
|
|
1441
|
+
});
|
|
1442
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1443
|
+
const previousArchive3 = checkpoint3Cp.blocks.at(-1).archive;
|
|
1444
|
+
const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
|
|
1445
|
+
numBlocks: 1,
|
|
1446
|
+
startBlockNumber: 4,
|
|
1447
|
+
previousArchive: previousArchive3
|
|
1448
|
+
});
|
|
1449
|
+
const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
|
|
1450
|
+
await store.addCheckpoints([
|
|
1451
|
+
checkpoint1,
|
|
1452
|
+
checkpoint2,
|
|
1453
|
+
checkpoint3,
|
|
1454
|
+
checkpoint4
|
|
1455
|
+
]);
|
|
1456
|
+
// Only get 2 checkpoints
|
|
1457
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 2);
|
|
1458
|
+
expect(checkpoints.length).toBe(2);
|
|
1459
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1460
|
+
1,
|
|
1461
|
+
2
|
|
1462
|
+
]);
|
|
1463
|
+
});
|
|
1464
|
+
it('returns correct checkpoint data including L1 info', async ()=>{
|
|
1465
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1466
|
+
numBlocks: 3,
|
|
1467
|
+
startBlockNumber: 1
|
|
1468
|
+
}), 42);
|
|
1469
|
+
await store.addCheckpoints([
|
|
1470
|
+
checkpoint
|
|
1471
|
+
]);
|
|
1472
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 1);
|
|
1473
|
+
expect(checkpoints.length).toBe(1);
|
|
1474
|
+
const data = checkpoints[0];
|
|
1475
|
+
expect(data.checkpointNumber).toBe(1);
|
|
1476
|
+
expect(data.startBlock).toBe(1);
|
|
1477
|
+
expect(data.numBlocks).toBe(3);
|
|
1478
|
+
expect(data.l1.blockNumber).toBe(42n);
|
|
1479
|
+
expect(data.header.equals(checkpoint.checkpoint.header)).toBe(true);
|
|
1480
|
+
expect(data.archive.equals(checkpoint.checkpoint.archive)).toBe(true);
|
|
1481
|
+
});
|
|
1482
|
+
it('returns empty array when from is beyond available checkpoints', async ()=>{
|
|
1483
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1484
|
+
numBlocks: 2,
|
|
1485
|
+
startBlockNumber: 1
|
|
1486
|
+
}), 10);
|
|
1487
|
+
await store.addCheckpoints([
|
|
1488
|
+
checkpoint
|
|
1489
|
+
]);
|
|
1490
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(5), 10);
|
|
1491
|
+
expect(checkpoints).toEqual([]);
|
|
1492
|
+
});
|
|
1493
|
+
it('works correctly after unwinding checkpoints', async ()=>{
|
|
1494
|
+
// Create checkpoints with chained archive roots
|
|
1495
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1496
|
+
numBlocks: 2,
|
|
1497
|
+
startBlockNumber: 1
|
|
1498
|
+
});
|
|
1499
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1500
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1501
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1502
|
+
numBlocks: 2,
|
|
1503
|
+
startBlockNumber: 3,
|
|
1504
|
+
previousArchive: previousArchive1
|
|
1505
|
+
});
|
|
1506
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1507
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1508
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1509
|
+
numBlocks: 2,
|
|
1510
|
+
startBlockNumber: 5,
|
|
1511
|
+
previousArchive: previousArchive2
|
|
1512
|
+
});
|
|
1513
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1514
|
+
await store.addCheckpoints([
|
|
1515
|
+
checkpoint1,
|
|
1516
|
+
checkpoint2,
|
|
1517
|
+
checkpoint3
|
|
1518
|
+
]);
|
|
1519
|
+
// Unwind checkpoint 3
|
|
1520
|
+
await store.unwindCheckpoints(CheckpointNumber(3), 1);
|
|
1521
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1522
|
+
expect(checkpoints.length).toBe(2);
|
|
1523
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1524
|
+
1,
|
|
1525
|
+
2
|
|
1526
|
+
]);
|
|
1527
|
+
});
|
|
1528
|
+
});
|
|
1529
|
+
describe('getCheckpointedBlock', ()=>{
|
|
188
1530
|
beforeEach(async ()=>{
|
|
189
|
-
await store.
|
|
1531
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
1532
|
+
});
|
|
1533
|
+
it.each(blockNumberTests)('retrieves previously stored block %i', async (blockNumber, getExpectedBlock)=>{
|
|
1534
|
+
const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
|
|
1535
|
+
const expectedBlock = getExpectedBlock();
|
|
1536
|
+
const expectedCheckpoint = publishedCheckpoints[blockNumber - 1];
|
|
1537
|
+
expect(retrievedBlock).toBeDefined();
|
|
1538
|
+
expectCheckpointedBlockEquals(retrievedBlock, expectedBlock, expectedCheckpoint);
|
|
1539
|
+
});
|
|
1540
|
+
it('returns undefined if block is not found', async ()=>{
|
|
1541
|
+
await expect(store.getCheckpointedBlock(12)).resolves.toBeUndefined();
|
|
1542
|
+
});
|
|
1543
|
+
it('returns undefined for block number 0', async ()=>{
|
|
1544
|
+
await expect(store.getCheckpointedBlock(0)).resolves.toBeUndefined();
|
|
1545
|
+
});
|
|
1546
|
+
});
|
|
1547
|
+
describe('getCheckpointedBlockByHash', ()=>{
|
|
1548
|
+
beforeEach(async ()=>{
|
|
1549
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
190
1550
|
});
|
|
191
1551
|
it('retrieves a block by its hash', async ()=>{
|
|
192
|
-
const
|
|
193
|
-
const
|
|
194
|
-
const
|
|
1552
|
+
const expectedCheckpoint = publishedCheckpoints[5];
|
|
1553
|
+
const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
|
|
1554
|
+
const blockHash = await expectedBlock.header.hash();
|
|
1555
|
+
const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
|
|
195
1556
|
expect(retrievedBlock).toBeDefined();
|
|
196
|
-
|
|
197
|
-
retrievedBlock
|
|
198
|
-
], [
|
|
199
|
-
expectedBlock
|
|
200
|
-
]);
|
|
1557
|
+
expectCheckpointedBlockEquals(retrievedBlock, expectedBlock, expectedCheckpoint);
|
|
201
1558
|
});
|
|
202
1559
|
it('returns undefined for non-existent block hash', async ()=>{
|
|
203
1560
|
const nonExistentHash = Fr.random();
|
|
204
|
-
await expect(store.
|
|
1561
|
+
await expect(store.getCheckpointedBlockByHash(nonExistentHash)).resolves.toBeUndefined();
|
|
205
1562
|
});
|
|
206
1563
|
});
|
|
207
|
-
describe('
|
|
1564
|
+
describe('getCheckpointedBlockByArchive', ()=>{
|
|
208
1565
|
beforeEach(async ()=>{
|
|
209
|
-
await store.
|
|
1566
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
210
1567
|
});
|
|
211
1568
|
it('retrieves a block by its archive root', async ()=>{
|
|
212
|
-
const
|
|
213
|
-
const
|
|
214
|
-
const
|
|
1569
|
+
const expectedCheckpoint = publishedCheckpoints[3];
|
|
1570
|
+
const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
|
|
1571
|
+
const archive = expectedBlock.archive.root;
|
|
1572
|
+
const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
|
|
215
1573
|
expect(retrievedBlock).toBeDefined();
|
|
216
|
-
|
|
217
|
-
retrievedBlock
|
|
218
|
-
], [
|
|
219
|
-
expectedBlock
|
|
220
|
-
]);
|
|
1574
|
+
expectCheckpointedBlockEquals(retrievedBlock, expectedBlock, expectedCheckpoint);
|
|
221
1575
|
});
|
|
222
1576
|
it('returns undefined for non-existent archive root', async ()=>{
|
|
223
1577
|
const nonExistentArchive = Fr.random();
|
|
224
|
-
await expect(store.
|
|
1578
|
+
await expect(store.getCheckpointedBlockByArchive(nonExistentArchive)).resolves.toBeUndefined();
|
|
225
1579
|
});
|
|
226
1580
|
});
|
|
227
1581
|
describe('getBlockHeaderByHash', ()=>{
|
|
228
1582
|
beforeEach(async ()=>{
|
|
229
|
-
await store.
|
|
1583
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
230
1584
|
});
|
|
231
1585
|
it('retrieves a block header by its hash', async ()=>{
|
|
232
|
-
const expectedBlock =
|
|
233
|
-
const blockHash = await expectedBlock.
|
|
1586
|
+
const expectedBlock = publishedCheckpoints[7].checkpoint.blocks[0];
|
|
1587
|
+
const blockHash = await expectedBlock.header.hash();
|
|
234
1588
|
const retrievedHeader = await store.getBlockHeaderByHash(blockHash);
|
|
235
1589
|
expect(retrievedHeader).toBeDefined();
|
|
236
|
-
expect(retrievedHeader.equals(expectedBlock.
|
|
1590
|
+
expect(retrievedHeader.equals(expectedBlock.header)).toBe(true);
|
|
237
1591
|
});
|
|
238
1592
|
it('returns undefined for non-existent block hash', async ()=>{
|
|
239
1593
|
const nonExistentHash = Fr.random();
|
|
@@ -242,27 +1596,27 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
242
1596
|
});
|
|
243
1597
|
describe('getBlockHeaderByArchive', ()=>{
|
|
244
1598
|
beforeEach(async ()=>{
|
|
245
|
-
await store.
|
|
1599
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
246
1600
|
});
|
|
247
1601
|
it('retrieves a block header by its archive root', async ()=>{
|
|
248
|
-
const expectedBlock =
|
|
249
|
-
const archive = expectedBlock.
|
|
1602
|
+
const expectedBlock = publishedCheckpoints[2].checkpoint.blocks[0];
|
|
1603
|
+
const archive = expectedBlock.archive.root;
|
|
250
1604
|
const retrievedHeader = await store.getBlockHeaderByArchive(archive);
|
|
251
1605
|
expect(retrievedHeader).toBeDefined();
|
|
252
|
-
expect(retrievedHeader.equals(expectedBlock.
|
|
1606
|
+
expect(retrievedHeader.equals(expectedBlock.header)).toBe(true);
|
|
253
1607
|
});
|
|
254
1608
|
it('returns undefined for non-existent archive root', async ()=>{
|
|
255
1609
|
const nonExistentArchive = Fr.random();
|
|
256
1610
|
await expect(store.getBlockHeaderByArchive(nonExistentArchive)).resolves.toBeUndefined();
|
|
257
1611
|
});
|
|
258
1612
|
});
|
|
259
|
-
describe('
|
|
260
|
-
it('returns the
|
|
261
|
-
await expect(store.
|
|
1613
|
+
describe('getSynchedCheckpointNumber', ()=>{
|
|
1614
|
+
it('returns the checkpoint number before INITIAL_CHECKPOINT_NUMBER if no checkpoints have been added', async ()=>{
|
|
1615
|
+
await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(INITIAL_CHECKPOINT_NUMBER - 1);
|
|
262
1616
|
});
|
|
263
|
-
it(
|
|
264
|
-
await store.
|
|
265
|
-
await expect(store.
|
|
1617
|
+
it('returns the most recently added checkpoint number', async ()=>{
|
|
1618
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
1619
|
+
await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(publishedCheckpoints.at(-1).checkpoint.number);
|
|
266
1620
|
});
|
|
267
1621
|
});
|
|
268
1622
|
describe('getSynchPoint', ()=>{
|
|
@@ -273,7 +1627,7 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
273
1627
|
});
|
|
274
1628
|
});
|
|
275
1629
|
it('returns the L1 block number in which the most recent L2 block was published', async ()=>{
|
|
276
|
-
await store.
|
|
1630
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
277
1631
|
await expect(store.getSynchPoint()).resolves.toEqual({
|
|
278
1632
|
blocksSynchedTo: 19n,
|
|
279
1633
|
messagesSynchedTo: undefined
|
|
@@ -325,77 +1679,62 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
325
1679
|
});
|
|
326
1680
|
describe('addLogs', ()=>{
|
|
327
1681
|
it('adds private & public logs', async ()=>{
|
|
328
|
-
const
|
|
329
|
-
await
|
|
330
|
-
|
|
331
|
-
])).resolves.toEqual(true);
|
|
332
|
-
});
|
|
333
|
-
});
|
|
334
|
-
describe('deleteLogs', ()=>{
|
|
335
|
-
it('deletes private & public logs', async ()=>{
|
|
336
|
-
const block = blocks[0].block;
|
|
337
|
-
await store.addBlocks([
|
|
338
|
-
blocks[0]
|
|
339
|
-
]);
|
|
340
|
-
await expect(store.addLogs([
|
|
341
|
-
block
|
|
342
|
-
])).resolves.toEqual(true);
|
|
343
|
-
expect((await store.getPrivateLogs(BlockNumber(1), 1)).length).toEqual(block.body.txEffects.map((txEffect)=>txEffect.privateLogs).flat().length);
|
|
344
|
-
expect((await store.getPublicLogs({
|
|
345
|
-
fromBlock: BlockNumber(1)
|
|
346
|
-
})).logs.length).toEqual(block.body.txEffects.map((txEffect)=>txEffect.publicLogs).flat().length);
|
|
347
|
-
// This one is a pain for memory as we would never want to just delete memory in the middle.
|
|
348
|
-
await store.deleteLogs([
|
|
349
|
-
block
|
|
1682
|
+
const checkpoint = publishedCheckpoints[0];
|
|
1683
|
+
await store.addCheckpoints([
|
|
1684
|
+
checkpoint
|
|
350
1685
|
]);
|
|
351
|
-
expect(
|
|
352
|
-
expect((await store.getPublicLogs({
|
|
353
|
-
fromBlock: BlockNumber(1)
|
|
354
|
-
})).logs.length).toEqual(0);
|
|
1686
|
+
await expect(store.addLogs(checkpoint.checkpoint.blocks)).resolves.toEqual(true);
|
|
355
1687
|
});
|
|
356
1688
|
});
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
1689
|
+
it('deleteLogs', async ()=>{
|
|
1690
|
+
const block = publishedCheckpoints[0].checkpoint.blocks[0];
|
|
1691
|
+
await store.addBlocks([
|
|
1692
|
+
block
|
|
1693
|
+
]);
|
|
1694
|
+
await expect(store.addLogs([
|
|
1695
|
+
block
|
|
1696
|
+
])).resolves.toEqual(true);
|
|
1697
|
+
expect((await store.getPublicLogs({
|
|
1698
|
+
fromBlock: BlockNumber(1)
|
|
1699
|
+
})).logs.length).toEqual(block.body.txEffects.map((txEffect)=>txEffect.publicLogs).flat().length);
|
|
1700
|
+
// This one is a pain for memory as we would never want to just delete memory in the middle.
|
|
1701
|
+
await store.deleteLogs([
|
|
1702
|
+
block
|
|
1703
|
+
]);
|
|
1704
|
+
expect((await store.getPublicLogs({
|
|
1705
|
+
fromBlock: BlockNumber(1)
|
|
1706
|
+
})).logs.length).toEqual(0);
|
|
369
1707
|
});
|
|
370
1708
|
describe('getTxEffect', ()=>{
|
|
1709
|
+
const getBlock = (i)=>publishedCheckpoints[i].checkpoint.blocks[0];
|
|
371
1710
|
beforeEach(async ()=>{
|
|
372
|
-
await store.addLogs(
|
|
373
|
-
await store.
|
|
1711
|
+
await store.addLogs(publishedCheckpoints.flatMap((x)=>x.checkpoint.blocks));
|
|
1712
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
374
1713
|
});
|
|
375
1714
|
it.each([
|
|
376
1715
|
()=>({
|
|
377
|
-
data:
|
|
378
|
-
block:
|
|
1716
|
+
data: getBlock(0).body.txEffects[0],
|
|
1717
|
+
block: getBlock(0),
|
|
379
1718
|
txIndexInBlock: 0
|
|
380
1719
|
}),
|
|
381
1720
|
()=>({
|
|
382
|
-
data:
|
|
383
|
-
block:
|
|
1721
|
+
data: getBlock(9).body.txEffects[3],
|
|
1722
|
+
block: getBlock(9),
|
|
384
1723
|
txIndexInBlock: 3
|
|
385
1724
|
}),
|
|
386
1725
|
()=>({
|
|
387
|
-
data:
|
|
388
|
-
block:
|
|
1726
|
+
data: getBlock(3).body.txEffects[1],
|
|
1727
|
+
block: getBlock(3),
|
|
389
1728
|
txIndexInBlock: 1
|
|
390
1729
|
}),
|
|
391
1730
|
()=>({
|
|
392
|
-
data:
|
|
393
|
-
block:
|
|
1731
|
+
data: getBlock(5).body.txEffects[2],
|
|
1732
|
+
block: getBlock(5),
|
|
394
1733
|
txIndexInBlock: 2
|
|
395
1734
|
}),
|
|
396
1735
|
()=>({
|
|
397
|
-
data:
|
|
398
|
-
block:
|
|
1736
|
+
data: getBlock(1).body.txEffects[0],
|
|
1737
|
+
block: getBlock(1),
|
|
399
1738
|
txIndexInBlock: 0
|
|
400
1739
|
})
|
|
401
1740
|
])('retrieves a previously stored transaction', async (getExpectedTx)=>{
|
|
@@ -403,7 +1742,7 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
403
1742
|
const expectedTx = {
|
|
404
1743
|
data,
|
|
405
1744
|
l2BlockNumber: block.number,
|
|
406
|
-
l2BlockHash: L2BlockHash.fromField(await block.hash()),
|
|
1745
|
+
l2BlockHash: L2BlockHash.fromField(await block.header.hash()),
|
|
407
1746
|
txIndexInBlock
|
|
408
1747
|
};
|
|
409
1748
|
const actualTx = await store.getTxEffect(data.txHash);
|
|
@@ -413,36 +1752,36 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
413
1752
|
await expect(store.getTxEffect(TxHash.random())).resolves.toBeUndefined();
|
|
414
1753
|
});
|
|
415
1754
|
it.each([
|
|
416
|
-
()=>
|
|
417
|
-
()=>
|
|
418
|
-
()=>
|
|
419
|
-
()=>
|
|
420
|
-
()=>
|
|
421
|
-
])('tries to retrieves a previously stored transaction after deleted', async (
|
|
422
|
-
await store.
|
|
423
|
-
const
|
|
424
|
-
const actualTx = await store.getTxEffect(
|
|
1755
|
+
()=>getBlock(0).body.txEffects[0],
|
|
1756
|
+
()=>getBlock(9).body.txEffects[3],
|
|
1757
|
+
()=>getBlock(3).body.txEffects[1],
|
|
1758
|
+
()=>getBlock(5).body.txEffects[2],
|
|
1759
|
+
()=>getBlock(1).body.txEffects[0]
|
|
1760
|
+
])('tries to retrieves a previously stored transaction after deleted', async (getTxEffect)=>{
|
|
1761
|
+
await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
|
|
1762
|
+
const txEffect = getTxEffect();
|
|
1763
|
+
const actualTx = await store.getTxEffect(txEffect.txHash);
|
|
425
1764
|
expect(actualTx).toEqual(undefined);
|
|
426
1765
|
});
|
|
427
1766
|
it('returns undefined if tx is not found', async ()=>{
|
|
428
1767
|
await expect(store.getTxEffect(TxHash.random())).resolves.toBeUndefined();
|
|
429
1768
|
});
|
|
430
1769
|
it('does not fail if the block is unwound while requesting a tx', async ()=>{
|
|
431
|
-
const
|
|
1770
|
+
const txEffect = getBlock(1).body.txEffects[0];
|
|
432
1771
|
let done = false;
|
|
433
1772
|
void (async ()=>{
|
|
434
1773
|
while(!done){
|
|
435
|
-
void store.getTxEffect(
|
|
1774
|
+
void store.getTxEffect(txEffect.txHash);
|
|
436
1775
|
await sleep(1);
|
|
437
1776
|
}
|
|
438
1777
|
})();
|
|
439
|
-
await store.
|
|
1778
|
+
await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
|
|
440
1779
|
done = true;
|
|
441
|
-
expect(await store.getTxEffect(
|
|
1780
|
+
expect(await store.getTxEffect(txEffect.txHash)).toEqual(undefined);
|
|
442
1781
|
});
|
|
443
1782
|
});
|
|
444
1783
|
describe('L1 to L2 Messages', ()=>{
|
|
445
|
-
const
|
|
1784
|
+
const initialCheckpointNumber = CheckpointNumber(13);
|
|
446
1785
|
const checkMessages = async (msgs)=>{
|
|
447
1786
|
expect(await store.getLastL1ToL2Message()).toEqual(msgs.at(-1));
|
|
448
1787
|
expect(await toArray(store.iterateL1ToL2Messages())).toEqual(msgs);
|
|
@@ -450,11 +1789,11 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
450
1789
|
};
|
|
451
1790
|
const makeInboxMessagesWithFullBlocks = (blockCount, opts = {})=>makeInboxMessages(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * blockCount, {
|
|
452
1791
|
overrideFn: (msg, i)=>{
|
|
453
|
-
const
|
|
454
|
-
const index = InboxLeaf.
|
|
1792
|
+
const checkpointNumber = CheckpointNumber((opts.initialCheckpointNumber ?? initialCheckpointNumber) + Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
|
|
1793
|
+
const index = InboxLeaf.smallestIndexForCheckpoint(checkpointNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
455
1794
|
return {
|
|
456
1795
|
...msg,
|
|
457
|
-
|
|
1796
|
+
checkpointNumber,
|
|
458
1797
|
index
|
|
459
1798
|
};
|
|
460
1799
|
}
|
|
@@ -462,7 +1801,7 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
462
1801
|
it('stores first message ever', async ()=>{
|
|
463
1802
|
const msg = makeInboxMessage(Buffer16.ZERO, {
|
|
464
1803
|
index: 0n,
|
|
465
|
-
|
|
1804
|
+
checkpointNumber: CheckpointNumber(1)
|
|
466
1805
|
});
|
|
467
1806
|
await store.addL1ToL2Messages([
|
|
468
1807
|
msg
|
|
@@ -470,13 +1809,13 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
470
1809
|
await checkMessages([
|
|
471
1810
|
msg
|
|
472
1811
|
]);
|
|
473
|
-
expect(await store.getL1ToL2Messages(
|
|
1812
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([
|
|
474
1813
|
msg.leaf
|
|
475
1814
|
]);
|
|
476
1815
|
});
|
|
477
1816
|
it('stores single message', async ()=>{
|
|
478
1817
|
const msg = makeInboxMessage(Buffer16.ZERO, {
|
|
479
|
-
|
|
1818
|
+
checkpointNumber: CheckpointNumber(2)
|
|
480
1819
|
});
|
|
481
1820
|
await store.addL1ToL2Messages([
|
|
482
1821
|
msg
|
|
@@ -484,23 +1823,23 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
484
1823
|
await checkMessages([
|
|
485
1824
|
msg
|
|
486
1825
|
]);
|
|
487
|
-
expect(await store.getL1ToL2Messages(
|
|
1826
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toEqual([
|
|
488
1827
|
msg.leaf
|
|
489
1828
|
]);
|
|
490
1829
|
});
|
|
491
1830
|
it('stores and returns messages across different blocks', async ()=>{
|
|
492
1831
|
const msgs = makeInboxMessages(5, {
|
|
493
|
-
|
|
1832
|
+
initialCheckpointNumber
|
|
494
1833
|
});
|
|
495
1834
|
await store.addL1ToL2Messages(msgs);
|
|
496
1835
|
await checkMessages(msgs);
|
|
497
|
-
expect(await store.getL1ToL2Messages(
|
|
1836
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual([
|
|
498
1837
|
msgs[2]
|
|
499
1838
|
].map((m)=>m.leaf));
|
|
500
1839
|
});
|
|
501
1840
|
it('stores the same messages again', async ()=>{
|
|
502
1841
|
const msgs = makeInboxMessages(5, {
|
|
503
|
-
|
|
1842
|
+
initialCheckpointNumber
|
|
504
1843
|
});
|
|
505
1844
|
await store.addL1ToL2Messages(msgs);
|
|
506
1845
|
await store.addL1ToL2Messages(msgs.slice(2));
|
|
@@ -508,10 +1847,10 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
508
1847
|
});
|
|
509
1848
|
it('stores and returns messages across different blocks with gaps', async ()=>{
|
|
510
1849
|
const msgs1 = makeInboxMessages(3, {
|
|
511
|
-
|
|
1850
|
+
initialCheckpointNumber: CheckpointNumber(1)
|
|
512
1851
|
});
|
|
513
1852
|
const msgs2 = makeInboxMessages(3, {
|
|
514
|
-
|
|
1853
|
+
initialCheckpointNumber: CheckpointNumber(20),
|
|
515
1854
|
initialHash: msgs1.at(-1).rollingHash
|
|
516
1855
|
});
|
|
517
1856
|
await store.addL1ToL2Messages(msgs1);
|
|
@@ -520,22 +1859,22 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
520
1859
|
...msgs1,
|
|
521
1860
|
...msgs2
|
|
522
1861
|
]);
|
|
523
|
-
expect(await store.getL1ToL2Messages(
|
|
1862
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([
|
|
524
1863
|
msgs1[0].leaf
|
|
525
1864
|
]);
|
|
526
|
-
expect(await store.getL1ToL2Messages(
|
|
527
|
-
expect(await store.getL1ToL2Messages(
|
|
1865
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toEqual([]);
|
|
1866
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(20))).toEqual([
|
|
528
1867
|
msgs2[0].leaf
|
|
529
1868
|
]);
|
|
530
|
-
expect(await store.getL1ToL2Messages(
|
|
1869
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(24))).toEqual([]);
|
|
531
1870
|
});
|
|
532
1871
|
it('stores and returns messages with block numbers larger than a byte', async ()=>{
|
|
533
1872
|
const msgs = makeInboxMessages(5, {
|
|
534
|
-
|
|
1873
|
+
initialCheckpointNumber: CheckpointNumber(1000)
|
|
535
1874
|
});
|
|
536
1875
|
await store.addL1ToL2Messages(msgs);
|
|
537
1876
|
await checkMessages(msgs);
|
|
538
|
-
expect(await store.getL1ToL2Messages(
|
|
1877
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1002))).toEqual([
|
|
539
1878
|
msgs[2]
|
|
540
1879
|
].map((m)=>m.leaf));
|
|
541
1880
|
});
|
|
@@ -543,27 +1882,27 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
543
1882
|
const msgs = makeInboxMessagesWithFullBlocks(4);
|
|
544
1883
|
await store.addL1ToL2Messages(msgs);
|
|
545
1884
|
await checkMessages(msgs);
|
|
546
|
-
const blockMessages = await store.getL1ToL2Messages(
|
|
1885
|
+
const blockMessages = await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 1));
|
|
547
1886
|
expect(blockMessages).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
548
1887
|
expect(blockMessages).toEqual(msgs.slice(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2).map((m)=>m.leaf));
|
|
549
1888
|
});
|
|
550
1889
|
it('stores messages in multiple operations', async ()=>{
|
|
551
1890
|
const msgs = makeInboxMessages(20, {
|
|
552
|
-
|
|
1891
|
+
initialCheckpointNumber
|
|
553
1892
|
});
|
|
554
1893
|
await store.addL1ToL2Messages(msgs.slice(0, 10));
|
|
555
1894
|
await store.addL1ToL2Messages(msgs.slice(10, 20));
|
|
556
|
-
expect(await store.getL1ToL2Messages(
|
|
1895
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual([
|
|
557
1896
|
msgs[2]
|
|
558
1897
|
].map((m)=>m.leaf));
|
|
559
|
-
expect(await store.getL1ToL2Messages(
|
|
1898
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 12))).toEqual([
|
|
560
1899
|
msgs[12]
|
|
561
1900
|
].map((m)=>m.leaf));
|
|
562
1901
|
await checkMessages(msgs);
|
|
563
1902
|
});
|
|
564
1903
|
it('iterates over messages from start index', async ()=>{
|
|
565
1904
|
const msgs = makeInboxMessages(10, {
|
|
566
|
-
|
|
1905
|
+
initialCheckpointNumber
|
|
567
1906
|
});
|
|
568
1907
|
await store.addL1ToL2Messages(msgs);
|
|
569
1908
|
const iterated = await toArray(store.iterateL1ToL2Messages({
|
|
@@ -573,9 +1912,10 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
573
1912
|
});
|
|
574
1913
|
it('iterates over messages in reverse', async ()=>{
|
|
575
1914
|
const msgs = makeInboxMessages(10, {
|
|
576
|
-
|
|
1915
|
+
initialCheckpointNumber
|
|
577
1916
|
});
|
|
578
1917
|
await store.addL1ToL2Messages(msgs);
|
|
1918
|
+
initialCheckpointNumber;
|
|
579
1919
|
const iterated = await toArray(store.iterateL1ToL2Messages({
|
|
580
1920
|
reverse: true,
|
|
581
1921
|
end: msgs[3].index
|
|
@@ -593,9 +1933,9 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
593
1933
|
});
|
|
594
1934
|
it('throws if block number for the first message is out of order', async ()=>{
|
|
595
1935
|
const msgs = makeInboxMessages(4, {
|
|
596
|
-
|
|
1936
|
+
initialCheckpointNumber
|
|
597
1937
|
});
|
|
598
|
-
msgs[2].
|
|
1938
|
+
msgs[2].checkpointNumber = CheckpointNumber(initialCheckpointNumber - 1);
|
|
599
1939
|
await store.addL1ToL2Messages(msgs.slice(0, 2));
|
|
600
1940
|
await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
|
|
601
1941
|
});
|
|
@@ -607,29 +1947,29 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
607
1947
|
it('throws if rolling hash for first message is not correct', async ()=>{
|
|
608
1948
|
const msgs = makeInboxMessages(4);
|
|
609
1949
|
msgs[2].rollingHash = Buffer16.random();
|
|
610
|
-
await store.addL1ToL2Messages(msgs.slice(0,
|
|
1950
|
+
await store.addL1ToL2Messages(msgs.slice(0, CheckpointNumber(2)));
|
|
611
1951
|
await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
|
|
612
1952
|
});
|
|
613
1953
|
it('throws if index is not in the correct range', async ()=>{
|
|
614
1954
|
const msgs = makeInboxMessages(5, {
|
|
615
|
-
|
|
1955
|
+
initialCheckpointNumber
|
|
616
1956
|
});
|
|
617
1957
|
msgs.at(-1).index += 100n;
|
|
618
1958
|
await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
|
|
619
1959
|
});
|
|
620
1960
|
it('throws if first index in block has gaps', async ()=>{
|
|
621
1961
|
const msgs = makeInboxMessages(4, {
|
|
622
|
-
|
|
1962
|
+
initialCheckpointNumber
|
|
623
1963
|
});
|
|
624
1964
|
msgs[2].index++;
|
|
625
1965
|
await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
|
|
626
1966
|
});
|
|
627
1967
|
it('throws if index does not follow previous one', async ()=>{
|
|
628
1968
|
const msgs = makeInboxMessages(2, {
|
|
629
|
-
|
|
1969
|
+
initialCheckpointNumber,
|
|
630
1970
|
overrideFn: (msg, i)=>({
|
|
631
1971
|
...msg,
|
|
632
|
-
|
|
1972
|
+
checkpointNumber: CheckpointNumber(2),
|
|
633
1973
|
index: BigInt(i + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2)
|
|
634
1974
|
})
|
|
635
1975
|
});
|
|
@@ -638,24 +1978,24 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
638
1978
|
});
|
|
639
1979
|
it('removes messages up to the given block number', async ()=>{
|
|
640
1980
|
const msgs = makeInboxMessagesWithFullBlocks(4, {
|
|
641
|
-
|
|
1981
|
+
initialCheckpointNumber: CheckpointNumber(1)
|
|
642
1982
|
});
|
|
643
1983
|
await store.addL1ToL2Messages(msgs);
|
|
644
1984
|
await checkMessages(msgs);
|
|
645
|
-
expect(await store.getL1ToL2Messages(
|
|
646
|
-
expect(await store.getL1ToL2Messages(
|
|
647
|
-
expect(await store.getL1ToL2Messages(
|
|
648
|
-
expect(await store.getL1ToL2Messages(
|
|
649
|
-
await store.
|
|
650
|
-
expect(await store.getL1ToL2Messages(
|
|
651
|
-
expect(await store.getL1ToL2Messages(
|
|
652
|
-
expect(await store.getL1ToL2Messages(
|
|
653
|
-
expect(await store.getL1ToL2Messages(
|
|
1985
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1986
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1987
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1988
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1989
|
+
await store.rollbackL1ToL2MessagesToCheckpoint(CheckpointNumber(2));
|
|
1990
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1991
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1992
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(0);
|
|
1993
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(0);
|
|
654
1994
|
await checkMessages(msgs.slice(0, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2));
|
|
655
1995
|
});
|
|
656
1996
|
it('removes messages starting with the given index', async ()=>{
|
|
657
1997
|
const msgs = makeInboxMessagesWithFullBlocks(4, {
|
|
658
|
-
|
|
1998
|
+
initialCheckpointNumber: CheckpointNumber(1)
|
|
659
1999
|
});
|
|
660
2000
|
await store.addL1ToL2Messages(msgs);
|
|
661
2001
|
await store.removeL1ToL2Messages(msgs[13].index);
|
|
@@ -841,180 +2181,268 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
841
2181
|
expect(stored?.utilityFunctions).toEqual(fns);
|
|
842
2182
|
});
|
|
843
2183
|
});
|
|
844
|
-
describe('
|
|
845
|
-
const
|
|
2184
|
+
describe('getPrivateLogsByTags', ()=>{
|
|
2185
|
+
const numBlocksForLogs = 3;
|
|
846
2186
|
const numTxsPerBlock = 4;
|
|
847
2187
|
const numPrivateLogsPerTx = 3;
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
: new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * (isPublic ? 123 : 1));
|
|
2188
|
+
let logsCheckpoints;
|
|
2189
|
+
const makePrivateLogTag = (blockNumber, txIndex, logIndex)=>new SiloedTag(blockNumber === 1 && txIndex === 0 && logIndex === 0 ? Fr.ZERO // Shared tag
|
|
2190
|
+
: new Fr(blockNumber * 100 + txIndex * 10 + logIndex));
|
|
852
2191
|
const makePrivateLog = (tag)=>PrivateLog.from({
|
|
853
|
-
fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, (i)=>!i ? tag : new Fr(tag.
|
|
2192
|
+
fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, (i)=>!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i))),
|
|
854
2193
|
emittedLength: PRIVATE_LOG_SIZE_IN_FIELDS
|
|
855
2194
|
});
|
|
856
|
-
const makePublicLog = (tag)=>PublicLog.from({
|
|
857
|
-
contractAddress: AztecAddress.fromNumber(1),
|
|
858
|
-
// Arbitrary length
|
|
859
|
-
fields: new Array(10).fill(null).map((_, i)=>!i ? tag : new Fr(tag.toNumber() + i))
|
|
860
|
-
});
|
|
861
2195
|
const mockPrivateLogs = (blockNumber, txIndex)=>{
|
|
862
2196
|
return times(numPrivateLogsPerTx, (logIndex)=>{
|
|
863
|
-
const tag =
|
|
2197
|
+
const tag = makePrivateLogTag(blockNumber, txIndex, logIndex);
|
|
864
2198
|
return makePrivateLog(tag);
|
|
865
2199
|
});
|
|
866
2200
|
};
|
|
867
|
-
const
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
2201
|
+
const mockCheckpointWithLogs = async (blockNumber, previousArchive)=>{
|
|
2202
|
+
const block = await L2BlockNew.random(BlockNumber(blockNumber), {
|
|
2203
|
+
checkpointNumber: CheckpointNumber(blockNumber),
|
|
2204
|
+
indexWithinCheckpoint: 0,
|
|
2205
|
+
state: makeStateForBlock(blockNumber, numTxsPerBlock),
|
|
2206
|
+
...previousArchive ? {
|
|
2207
|
+
lastArchive: previousArchive
|
|
2208
|
+
} : {}
|
|
871
2209
|
});
|
|
872
|
-
};
|
|
873
|
-
const mockBlockWithLogs = async (blockNumber)=>{
|
|
874
|
-
const block = await L2Block.random(BlockNumber(blockNumber));
|
|
875
2210
|
block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
|
|
876
2211
|
block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex)=>{
|
|
877
2212
|
const txEffect = await TxEffect.random();
|
|
878
2213
|
txEffect.privateLogs = mockPrivateLogs(blockNumber, txIndex);
|
|
879
|
-
txEffect.publicLogs =
|
|
2214
|
+
txEffect.publicLogs = []; // No public logs needed for private log tests
|
|
880
2215
|
return txEffect;
|
|
881
2216
|
});
|
|
882
|
-
|
|
883
|
-
block
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
blockNumber: BigInt(blockNumber),
|
|
887
|
-
blockHash: makeBlockHash(blockNumber),
|
|
888
|
-
timestamp: BigInt(blockNumber)
|
|
889
|
-
}
|
|
890
|
-
});
|
|
2217
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
2218
|
+
block
|
|
2219
|
+
], CheckpointNumber(blockNumber));
|
|
2220
|
+
return makePublishedCheckpoint(checkpoint, blockNumber);
|
|
891
2221
|
};
|
|
892
2222
|
beforeEach(async ()=>{
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
2223
|
+
// Create checkpoints sequentially to chain archive roots
|
|
2224
|
+
logsCheckpoints = [];
|
|
2225
|
+
for(let i = 0; i < numBlocksForLogs; i++){
|
|
2226
|
+
const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
2227
|
+
logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
|
|
2228
|
+
}
|
|
2229
|
+
await store.addCheckpoints(logsCheckpoints);
|
|
2230
|
+
await store.addLogs(logsCheckpoints.flatMap((p)=>p.checkpoint.blocks));
|
|
896
2231
|
});
|
|
897
2232
|
it('is possible to batch request private logs via tags', async ()=>{
|
|
898
2233
|
const tags = [
|
|
899
|
-
|
|
900
|
-
|
|
2234
|
+
makePrivateLogTag(2, 1, 2),
|
|
2235
|
+
makePrivateLogTag(1, 2, 0)
|
|
901
2236
|
];
|
|
902
|
-
const logsByTags = await store.
|
|
2237
|
+
const logsByTags = await store.getPrivateLogsByTags(tags);
|
|
903
2238
|
expect(logsByTags).toEqual([
|
|
904
2239
|
[
|
|
905
2240
|
expect.objectContaining({
|
|
906
2241
|
blockNumber: 2,
|
|
907
|
-
|
|
908
|
-
isFromPublic: false
|
|
2242
|
+
logData: makePrivateLog(tags[0]).getEmittedFields()
|
|
909
2243
|
})
|
|
910
2244
|
],
|
|
911
2245
|
[
|
|
912
2246
|
expect.objectContaining({
|
|
913
2247
|
blockNumber: 1,
|
|
914
|
-
|
|
915
|
-
isFromPublic: false
|
|
2248
|
+
logData: makePrivateLog(tags[1]).getEmittedFields()
|
|
916
2249
|
})
|
|
917
2250
|
]
|
|
918
2251
|
]);
|
|
919
2252
|
});
|
|
920
|
-
it('is possible to batch request
|
|
921
|
-
// Tag(1, 0, 0) is shared with the first private log and the first public log.
|
|
2253
|
+
it('is possible to batch request logs that have the same tag but different content', async ()=>{
|
|
922
2254
|
const tags = [
|
|
923
|
-
|
|
2255
|
+
makePrivateLogTag(1, 2, 1)
|
|
924
2256
|
];
|
|
925
|
-
|
|
2257
|
+
// Create a checkpoint containing logs that have the same tag as the checkpoints before.
|
|
2258
|
+
// Chain from the last checkpoint's archive
|
|
2259
|
+
const newBlockNumber = numBlocksForLogs + 1;
|
|
2260
|
+
const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
|
|
2261
|
+
const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
|
|
2262
|
+
const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1];
|
|
2263
|
+
newLog.fields[0] = tags[0].value;
|
|
2264
|
+
newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1] = newLog;
|
|
2265
|
+
await store.addCheckpoints([
|
|
2266
|
+
newCheckpoint
|
|
2267
|
+
]);
|
|
2268
|
+
await store.addLogs([
|
|
2269
|
+
newCheckpoint.checkpoint.blocks[0]
|
|
2270
|
+
]);
|
|
2271
|
+
const logsByTags = await store.getPrivateLogsByTags(tags);
|
|
926
2272
|
expect(logsByTags).toEqual([
|
|
927
2273
|
[
|
|
928
2274
|
expect.objectContaining({
|
|
929
2275
|
blockNumber: 1,
|
|
930
|
-
|
|
931
|
-
isFromPublic: false
|
|
2276
|
+
logData: makePrivateLog(tags[0]).getEmittedFields()
|
|
932
2277
|
}),
|
|
2278
|
+
expect.objectContaining({
|
|
2279
|
+
blockNumber: newBlockNumber,
|
|
2280
|
+
logData: newLog.getEmittedFields()
|
|
2281
|
+
})
|
|
2282
|
+
]
|
|
2283
|
+
]);
|
|
2284
|
+
});
|
|
2285
|
+
it('is possible to request logs for non-existing tags and determine their position', async ()=>{
|
|
2286
|
+
const tags = [
|
|
2287
|
+
makePrivateLogTag(99, 88, 77),
|
|
2288
|
+
makePrivateLogTag(1, 1, 1)
|
|
2289
|
+
];
|
|
2290
|
+
const logsByTags = await store.getPrivateLogsByTags(tags);
|
|
2291
|
+
expect(logsByTags).toEqual([
|
|
2292
|
+
[],
|
|
2293
|
+
[
|
|
933
2294
|
expect.objectContaining({
|
|
934
2295
|
blockNumber: 1,
|
|
935
|
-
|
|
936
|
-
|
|
2296
|
+
logData: makePrivateLog(tags[1]).getEmittedFields()
|
|
2297
|
+
})
|
|
2298
|
+
]
|
|
2299
|
+
]);
|
|
2300
|
+
});
|
|
2301
|
+
});
|
|
2302
|
+
describe('getPublicLogsByTagsFromContract', ()=>{
|
|
2303
|
+
const numBlocksForLogs = 3;
|
|
2304
|
+
const numTxsPerBlock = 4;
|
|
2305
|
+
const numPublicLogsPerTx = 2;
|
|
2306
|
+
const contractAddress = AztecAddress.fromNumber(543254);
|
|
2307
|
+
let logsCheckpoints;
|
|
2308
|
+
const makePublicLogTag = (blockNumber, txIndex, logIndex)=>new Tag(blockNumber === 1 && txIndex === 0 && logIndex === 0 ? Fr.ZERO // Shared tag
|
|
2309
|
+
: new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * 123));
|
|
2310
|
+
const makePublicLog = (tag)=>PublicLog.from({
|
|
2311
|
+
contractAddress: contractAddress,
|
|
2312
|
+
// Arbitrary length
|
|
2313
|
+
fields: new Array(10).fill(null).map((_, i)=>!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))
|
|
2314
|
+
});
|
|
2315
|
+
const mockPublicLogs = (blockNumber, txIndex)=>{
|
|
2316
|
+
return times(numPublicLogsPerTx, (logIndex)=>{
|
|
2317
|
+
const tag = makePublicLogTag(blockNumber, txIndex, logIndex);
|
|
2318
|
+
return makePublicLog(tag);
|
|
2319
|
+
});
|
|
2320
|
+
};
|
|
2321
|
+
const mockCheckpointWithLogs = async (blockNumber, previousArchive)=>{
|
|
2322
|
+
const block = await L2BlockNew.random(BlockNumber(blockNumber), {
|
|
2323
|
+
checkpointNumber: CheckpointNumber(blockNumber),
|
|
2324
|
+
indexWithinCheckpoint: 0,
|
|
2325
|
+
state: makeStateForBlock(blockNumber, numTxsPerBlock),
|
|
2326
|
+
...previousArchive ? {
|
|
2327
|
+
lastArchive: previousArchive
|
|
2328
|
+
} : {}
|
|
2329
|
+
});
|
|
2330
|
+
block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
|
|
2331
|
+
block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex)=>{
|
|
2332
|
+
const txEffect = await TxEffect.random();
|
|
2333
|
+
txEffect.privateLogs = []; // No private logs needed for public log tests
|
|
2334
|
+
txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex);
|
|
2335
|
+
return txEffect;
|
|
2336
|
+
});
|
|
2337
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
2338
|
+
block
|
|
2339
|
+
], CheckpointNumber(blockNumber));
|
|
2340
|
+
return makePublishedCheckpoint(checkpoint, blockNumber);
|
|
2341
|
+
};
|
|
2342
|
+
beforeEach(async ()=>{
|
|
2343
|
+
// Create checkpoints sequentially to chain archive roots
|
|
2344
|
+
logsCheckpoints = [];
|
|
2345
|
+
for(let i = 0; i < numBlocksForLogs; i++){
|
|
2346
|
+
const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
2347
|
+
logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
|
|
2348
|
+
}
|
|
2349
|
+
await store.addCheckpoints(logsCheckpoints);
|
|
2350
|
+
await store.addLogs(logsCheckpoints.flatMap((p)=>p.checkpoint.blocks));
|
|
2351
|
+
});
|
|
2352
|
+
it('is possible to batch request public logs via tags', async ()=>{
|
|
2353
|
+
const tags = [
|
|
2354
|
+
makePublicLogTag(2, 1, 1),
|
|
2355
|
+
makePublicLogTag(1, 2, 0)
|
|
2356
|
+
];
|
|
2357
|
+
const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
2358
|
+
expect(logsByTags).toEqual([
|
|
2359
|
+
[
|
|
2360
|
+
expect.objectContaining({
|
|
2361
|
+
blockNumber: 2,
|
|
2362
|
+
logData: makePublicLog(tags[0]).getEmittedFields()
|
|
2363
|
+
})
|
|
2364
|
+
],
|
|
2365
|
+
[
|
|
2366
|
+
expect.objectContaining({
|
|
2367
|
+
blockNumber: 1,
|
|
2368
|
+
logData: makePublicLog(tags[1]).getEmittedFields()
|
|
937
2369
|
})
|
|
938
2370
|
]
|
|
939
2371
|
]);
|
|
940
2372
|
});
|
|
941
2373
|
it('is possible to batch request logs that have the same tag but different content', async ()=>{
|
|
942
2374
|
const tags = [
|
|
943
|
-
|
|
2375
|
+
makePublicLogTag(1, 2, 1)
|
|
944
2376
|
];
|
|
945
|
-
// Create a
|
|
946
|
-
|
|
947
|
-
const
|
|
948
|
-
const
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
2377
|
+
// Create a checkpoint containing logs that have the same tag as the checkpoints before.
|
|
2378
|
+
// Chain from the last checkpoint's archive
|
|
2379
|
+
const newBlockNumber = numBlocksForLogs + 1;
|
|
2380
|
+
const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
|
|
2381
|
+
const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
|
|
2382
|
+
const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1];
|
|
2383
|
+
newLog.fields[0] = tags[0].value;
|
|
2384
|
+
newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1] = newLog;
|
|
2385
|
+
await store.addCheckpoints([
|
|
2386
|
+
newCheckpoint
|
|
953
2387
|
]);
|
|
954
2388
|
await store.addLogs([
|
|
955
|
-
|
|
2389
|
+
newCheckpoint.checkpoint.blocks[0]
|
|
956
2390
|
]);
|
|
957
|
-
const logsByTags = await store.
|
|
2391
|
+
const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
958
2392
|
expect(logsByTags).toEqual([
|
|
959
2393
|
[
|
|
960
2394
|
expect.objectContaining({
|
|
961
2395
|
blockNumber: 1,
|
|
962
|
-
|
|
963
|
-
isFromPublic: false
|
|
2396
|
+
logData: makePublicLog(tags[0]).getEmittedFields()
|
|
964
2397
|
}),
|
|
965
2398
|
expect.objectContaining({
|
|
966
2399
|
blockNumber: newBlockNumber,
|
|
967
|
-
|
|
968
|
-
isFromPublic: false
|
|
2400
|
+
logData: newLog.getEmittedFields()
|
|
969
2401
|
})
|
|
970
2402
|
]
|
|
971
2403
|
]);
|
|
972
2404
|
});
|
|
973
2405
|
it('is possible to request logs for non-existing tags and determine their position', async ()=>{
|
|
974
2406
|
const tags = [
|
|
975
|
-
|
|
976
|
-
|
|
2407
|
+
makePublicLogTag(99, 88, 77),
|
|
2408
|
+
makePublicLogTag(1, 1, 0)
|
|
977
2409
|
];
|
|
978
|
-
const logsByTags = await store.
|
|
2410
|
+
const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
979
2411
|
expect(logsByTags).toEqual([
|
|
980
2412
|
[],
|
|
981
2413
|
[
|
|
982
2414
|
expect.objectContaining({
|
|
983
2415
|
blockNumber: 1,
|
|
984
|
-
|
|
985
|
-
isFromPublic: false
|
|
2416
|
+
logData: makePublicLog(tags[1]).getEmittedFields()
|
|
986
2417
|
})
|
|
987
2418
|
]
|
|
988
2419
|
]);
|
|
989
2420
|
});
|
|
990
2421
|
});
|
|
991
2422
|
describe('getPublicLogs', ()=>{
|
|
992
|
-
const
|
|
993
|
-
|
|
994
|
-
const
|
|
995
|
-
|
|
996
|
-
|
|
2423
|
+
const numBlocksForPublicLogs = 10;
|
|
2424
|
+
// Helper to get total public logs per tx from a block
|
|
2425
|
+
const getPublicLogsPerTx = (block, txIndex)=>block.body.txEffects[txIndex].publicLogs.length;
|
|
2426
|
+
// Helper to get number of txs in a block
|
|
2427
|
+
const getTxsPerBlock = (block)=>block.body.txEffects.length;
|
|
997
2428
|
beforeEach(async ()=>{
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
attestations: times(3, CommitteeAttestation.random)
|
|
1006
|
-
}));
|
|
1007
|
-
await store.addBlocks(blocks);
|
|
1008
|
-
await store.addLogs(blocks.map((b)=>b.block));
|
|
2429
|
+
// Use the outer publishedCheckpoints for log tests
|
|
2430
|
+
for(let i = 0; i < numBlocksForPublicLogs; i++){
|
|
2431
|
+
await store.addCheckpoints([
|
|
2432
|
+
publishedCheckpoints[i]
|
|
2433
|
+
]);
|
|
2434
|
+
await store.addLogs(publishedCheckpoints[i].checkpoint.blocks);
|
|
2435
|
+
}
|
|
1009
2436
|
});
|
|
1010
2437
|
it('no logs returned if deleted ("txHash" filter param is respected variant)', async ()=>{
|
|
1011
2438
|
// get random tx
|
|
1012
|
-
const targetBlockIndex = randomInt(
|
|
1013
|
-
const
|
|
1014
|
-
const
|
|
2439
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2440
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2441
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2442
|
+
const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
|
|
1015
2443
|
await Promise.all([
|
|
1016
|
-
store.
|
|
1017
|
-
store.deleteLogs(
|
|
2444
|
+
store.unwindCheckpoints(CheckpointNumber(numBlocksForPublicLogs), numBlocksForPublicLogs),
|
|
2445
|
+
store.deleteLogs(publishedCheckpoints.slice(0, numBlocksForPublicLogs).flatMap((b)=>b.checkpoint.blocks))
|
|
1018
2446
|
]);
|
|
1019
2447
|
const response = await store.getPublicLogs({
|
|
1020
2448
|
txHash: targetTxHash
|
|
@@ -1025,15 +2453,16 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1025
2453
|
});
|
|
1026
2454
|
it('"txHash" filter param is respected', async ()=>{
|
|
1027
2455
|
// get random tx
|
|
1028
|
-
const targetBlockIndex = randomInt(
|
|
1029
|
-
const
|
|
1030
|
-
const
|
|
2456
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2457
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2458
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2459
|
+
const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
|
|
1031
2460
|
const response = await store.getPublicLogs({
|
|
1032
2461
|
txHash: targetTxHash
|
|
1033
2462
|
});
|
|
1034
2463
|
const logs = response.logs;
|
|
1035
2464
|
expect(response.maxLogsHit).toBeFalsy();
|
|
1036
|
-
const expectedNumLogs =
|
|
2465
|
+
const expectedNumLogs = getPublicLogsPerTx(targetBlock, targetTxIndex);
|
|
1037
2466
|
expect(logs.length).toEqual(expectedNumLogs);
|
|
1038
2467
|
const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM;
|
|
1039
2468
|
for (const log of logs){
|
|
@@ -1041,6 +2470,16 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1041
2470
|
expect(log.id.txIndex).toEqual(targetTxIndex);
|
|
1042
2471
|
}
|
|
1043
2472
|
});
|
|
2473
|
+
it('returns block hash on public log ids', async ()=>{
|
|
2474
|
+
const targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
|
|
2475
|
+
const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2476
|
+
const logs = (await store.getPublicLogs({
|
|
2477
|
+
fromBlock: targetBlock.number,
|
|
2478
|
+
toBlock: targetBlock.number + 1
|
|
2479
|
+
})).logs;
|
|
2480
|
+
expect(logs.length).toBeGreaterThan(0);
|
|
2481
|
+
expect(logs.every((log)=>log.id.blockHash.equals(expectedBlockHash))).toBe(true);
|
|
2482
|
+
});
|
|
1044
2483
|
it('"fromBlock" and "toBlock" filter params are respected', async ()=>{
|
|
1045
2484
|
// Set "fromBlock" and "toBlock"
|
|
1046
2485
|
const fromBlock = 3;
|
|
@@ -1051,7 +2490,12 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1051
2490
|
});
|
|
1052
2491
|
const logs = response.logs;
|
|
1053
2492
|
expect(response.maxLogsHit).toBeFalsy();
|
|
1054
|
-
|
|
2493
|
+
// Compute expected logs from the blocks in range
|
|
2494
|
+
let expectedNumLogs = 0;
|
|
2495
|
+
for(let i = fromBlock - 1; i < toBlock - 1; i++){
|
|
2496
|
+
const block = publishedCheckpoints[i].checkpoint.blocks[0];
|
|
2497
|
+
expectedNumLogs += block.body.txEffects.reduce((sum, tx)=>sum + tx.publicLogs.length, 0);
|
|
2498
|
+
}
|
|
1055
2499
|
expect(logs.length).toEqual(expectedNumLogs);
|
|
1056
2500
|
for (const log of logs){
|
|
1057
2501
|
const blockNumber = log.id.blockNumber;
|
|
@@ -1061,10 +2505,11 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1061
2505
|
});
|
|
1062
2506
|
it('"contractAddress" filter param is respected', async ()=>{
|
|
1063
2507
|
// Get a random contract address from the logs
|
|
1064
|
-
const targetBlockIndex = randomInt(
|
|
1065
|
-
const
|
|
1066
|
-
const
|
|
1067
|
-
const
|
|
2508
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2509
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2510
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2511
|
+
const targetLogIndex = randomInt(getPublicLogsPerTx(targetBlock, targetTxIndex));
|
|
2512
|
+
const targetContractAddress = targetBlock.body.txEffects[targetTxIndex].publicLogs[targetLogIndex].contractAddress;
|
|
1068
2513
|
const response = await store.getPublicLogs({
|
|
1069
2514
|
contractAddress: targetContractAddress
|
|
1070
2515
|
});
|
|
@@ -1075,10 +2520,13 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1075
2520
|
});
|
|
1076
2521
|
it('"afterLog" filter param is respected', async ()=>{
|
|
1077
2522
|
// Get a random log as reference
|
|
1078
|
-
const targetBlockIndex = randomInt(
|
|
1079
|
-
const
|
|
1080
|
-
const
|
|
1081
|
-
const
|
|
2523
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2524
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2525
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2526
|
+
const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
|
|
2527
|
+
const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
|
|
2528
|
+
const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2529
|
+
const afterLog = new LogId(BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM), targetBlockHash, targetTxIndex, targetLogIndex);
|
|
1082
2530
|
const response = await store.getPublicLogs({
|
|
1083
2531
|
afterLog
|
|
1084
2532
|
});
|
|
@@ -1098,7 +2546,7 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1098
2546
|
it('"txHash" filter param is ignored when "afterLog" is set', async ()=>{
|
|
1099
2547
|
// Get random txHash
|
|
1100
2548
|
const txHash = TxHash.random();
|
|
1101
|
-
const afterLog = new LogId(BlockNumber(1), 0, 0);
|
|
2549
|
+
const afterLog = new LogId(BlockNumber(1), L2BlockHash.random(), 0, 0);
|
|
1102
2550
|
const response = await store.getPublicLogs({
|
|
1103
2551
|
txHash,
|
|
1104
2552
|
afterLog
|
|
@@ -1140,12 +2588,12 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1140
2588
|
toBlock: +100
|
|
1141
2589
|
})).logs;
|
|
1142
2590
|
blockNumbers = new Set(logs.map((log)=>log.id.blockNumber));
|
|
1143
|
-
expect(blockNumbers.size).toBe(
|
|
2591
|
+
expect(blockNumbers.size).toBe(numBlocksForPublicLogs);
|
|
1144
2592
|
// intersecting with "afterLog" works
|
|
1145
2593
|
logs = (await store.getPublicLogs({
|
|
1146
2594
|
fromBlock: BlockNumber(2),
|
|
1147
2595
|
toBlock: BlockNumber(5),
|
|
1148
|
-
afterLog: new LogId(BlockNumber(4), 0, 0)
|
|
2596
|
+
afterLog: new LogId(BlockNumber(4), L2BlockHash.random(), 0, 0)
|
|
1149
2597
|
})).logs;
|
|
1150
2598
|
blockNumbers = new Set(logs.map((log)=>log.id.blockNumber));
|
|
1151
2599
|
expect(blockNumbers).toEqual(new Set([
|
|
@@ -1153,22 +2601,25 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1153
2601
|
]));
|
|
1154
2602
|
logs = (await store.getPublicLogs({
|
|
1155
2603
|
toBlock: BlockNumber(5),
|
|
1156
|
-
afterLog: new LogId(BlockNumber(5), 1, 0)
|
|
2604
|
+
afterLog: new LogId(BlockNumber(5), L2BlockHash.random(), 1, 0)
|
|
1157
2605
|
})).logs;
|
|
1158
2606
|
expect(logs.length).toBe(0);
|
|
1159
2607
|
logs = (await store.getPublicLogs({
|
|
1160
2608
|
fromBlock: BlockNumber(2),
|
|
1161
2609
|
toBlock: BlockNumber(5),
|
|
1162
|
-
afterLog: new LogId(BlockNumber(100), 0, 0)
|
|
2610
|
+
afterLog: new LogId(BlockNumber(100), L2BlockHash.random(), 0, 0)
|
|
1163
2611
|
})).logs;
|
|
1164
2612
|
expect(logs.length).toBe(0);
|
|
1165
2613
|
});
|
|
1166
2614
|
it('"txIndex" and "logIndex" are respected when "afterLog.blockNumber" is equal to "fromBlock"', async ()=>{
|
|
1167
2615
|
// Get a random log as reference
|
|
1168
|
-
const targetBlockIndex = randomInt(
|
|
1169
|
-
const
|
|
1170
|
-
const
|
|
1171
|
-
const
|
|
2616
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2617
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2618
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2619
|
+
const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
|
|
2620
|
+
const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
|
|
2621
|
+
const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2622
|
+
const afterLog = new LogId(BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM), targetBlockHash, targetTxIndex, targetLogIndex);
|
|
1172
2623
|
const response = await store.getPublicLogs({
|
|
1173
2624
|
afterLog,
|
|
1174
2625
|
fromBlock: afterLog.blockNumber
|
|
@@ -1187,6 +2638,36 @@ import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
|
1187
2638
|
}
|
|
1188
2639
|
});
|
|
1189
2640
|
});
|
|
2641
|
+
describe('getContractClassLogs', ()=>{
|
|
2642
|
+
let targetBlock;
|
|
2643
|
+
let expectedContractClassLog;
|
|
2644
|
+
beforeEach(async ()=>{
|
|
2645
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
2646
|
+
targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
|
|
2647
|
+
expectedContractClassLog = await ContractClassLog.random();
|
|
2648
|
+
targetBlock.body.txEffects.forEach((txEffect, index)=>{
|
|
2649
|
+
txEffect.contractClassLogs = index === 0 ? [
|
|
2650
|
+
expectedContractClassLog
|
|
2651
|
+
] : [];
|
|
2652
|
+
});
|
|
2653
|
+
await store.addLogs([
|
|
2654
|
+
targetBlock
|
|
2655
|
+
]);
|
|
2656
|
+
});
|
|
2657
|
+
it('returns block hash on contract class log ids', async ()=>{
|
|
2658
|
+
const result = await store.getContractClassLogs({
|
|
2659
|
+
fromBlock: targetBlock.number,
|
|
2660
|
+
toBlock: targetBlock.number + 1
|
|
2661
|
+
});
|
|
2662
|
+
expect(result.maxLogsHit).toBeFalsy();
|
|
2663
|
+
expect(result.logs).toHaveLength(1);
|
|
2664
|
+
const [{ id, log }] = result.logs;
|
|
2665
|
+
const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2666
|
+
expect(id.blockHash.equals(expectedBlockHash)).toBe(true);
|
|
2667
|
+
expect(id.blockNumber).toEqual(targetBlock.number);
|
|
2668
|
+
expect(log).toEqual(expectedContractClassLog);
|
|
2669
|
+
});
|
|
2670
|
+
});
|
|
1190
2671
|
describe('pendingChainValidationStatus', ()=>{
|
|
1191
2672
|
it('should return undefined when no status is set', async ()=>{
|
|
1192
2673
|
const status = await store.getPendingChainValidationStatus();
|