@aztec/archiver 4.0.0-nightly.20250907 → 4.0.0-nightly.20260108

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. package/README.md +27 -6
  2. package/dest/archiver/archiver.d.ts +127 -84
  3. package/dest/archiver/archiver.d.ts.map +1 -1
  4. package/dest/archiver/archiver.js +1150 -382
  5. package/dest/archiver/archiver_store.d.ts +122 -45
  6. package/dest/archiver/archiver_store.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  8. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  9. package/dest/archiver/archiver_store_test_suite.js +2013 -343
  10. package/dest/archiver/config.d.ts +7 -20
  11. package/dest/archiver/config.d.ts.map +1 -1
  12. package/dest/archiver/config.js +21 -5
  13. package/dest/archiver/errors.d.ts +25 -1
  14. package/dest/archiver/errors.d.ts.map +1 -1
  15. package/dest/archiver/errors.js +37 -0
  16. package/dest/archiver/index.d.ts +2 -2
  17. package/dest/archiver/index.d.ts.map +1 -1
  18. package/dest/archiver/instrumentation.d.ts +5 -3
  19. package/dest/archiver/instrumentation.d.ts.map +1 -1
  20. package/dest/archiver/instrumentation.js +14 -0
  21. package/dest/archiver/kv_archiver_store/block_store.d.ts +83 -15
  22. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  23. package/dest/archiver/kv_archiver_store/block_store.js +396 -73
  24. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +2 -2
  25. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  26. package/dest/archiver/kv_archiver_store/contract_class_store.js +1 -1
  27. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +2 -2
  28. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  29. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +51 -55
  30. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  31. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +82 -46
  32. package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
  33. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  34. package/dest/archiver/kv_archiver_store/log_store.js +149 -84
  35. package/dest/archiver/kv_archiver_store/message_store.d.ts +6 -5
  36. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  37. package/dest/archiver/kv_archiver_store/message_store.js +15 -14
  38. package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
  39. package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
  40. package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
  41. package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
  42. package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
  43. package/dest/archiver/l1/calldata_retriever.js +471 -0
  44. package/dest/archiver/l1/data_retrieval.d.ts +90 -0
  45. package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
  46. package/dest/archiver/l1/data_retrieval.js +331 -0
  47. package/dest/archiver/l1/debug_tx.d.ts +19 -0
  48. package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
  49. package/dest/archiver/l1/debug_tx.js +73 -0
  50. package/dest/archiver/l1/spire_proposer.d.ts +70 -0
  51. package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
  52. package/dest/archiver/l1/spire_proposer.js +157 -0
  53. package/dest/archiver/l1/trace_tx.d.ts +97 -0
  54. package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
  55. package/dest/archiver/l1/trace_tx.js +91 -0
  56. package/dest/archiver/l1/types.d.ts +12 -0
  57. package/dest/archiver/l1/types.d.ts.map +1 -0
  58. package/dest/archiver/l1/types.js +3 -0
  59. package/dest/archiver/l1/validate_trace.d.ts +29 -0
  60. package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
  61. package/dest/archiver/l1/validate_trace.js +150 -0
  62. package/dest/archiver/structs/data_retrieval.d.ts +1 -1
  63. package/dest/archiver/structs/inbox_message.d.ts +4 -4
  64. package/dest/archiver/structs/inbox_message.d.ts.map +1 -1
  65. package/dest/archiver/structs/inbox_message.js +6 -5
  66. package/dest/archiver/structs/published.d.ts +2 -2
  67. package/dest/archiver/structs/published.d.ts.map +1 -1
  68. package/dest/archiver/validation.d.ts +10 -4
  69. package/dest/archiver/validation.d.ts.map +1 -1
  70. package/dest/archiver/validation.js +66 -44
  71. package/dest/factory.d.ts +4 -6
  72. package/dest/factory.d.ts.map +1 -1
  73. package/dest/factory.js +5 -4
  74. package/dest/index.d.ts +2 -2
  75. package/dest/index.d.ts.map +1 -1
  76. package/dest/index.js +1 -1
  77. package/dest/rpc/index.d.ts +2 -2
  78. package/dest/test/index.d.ts +1 -1
  79. package/dest/test/mock_archiver.d.ts +16 -8
  80. package/dest/test/mock_archiver.d.ts.map +1 -1
  81. package/dest/test/mock_archiver.js +19 -14
  82. package/dest/test/mock_l1_to_l2_message_source.d.ts +7 -6
  83. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  84. package/dest/test/mock_l1_to_l2_message_source.js +10 -9
  85. package/dest/test/mock_l2_block_source.d.ts +31 -20
  86. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  87. package/dest/test/mock_l2_block_source.js +85 -18
  88. package/dest/test/mock_structs.d.ts +3 -2
  89. package/dest/test/mock_structs.d.ts.map +1 -1
  90. package/dest/test/mock_structs.js +9 -8
  91. package/package.json +18 -17
  92. package/src/archiver/archiver.ts +990 -481
  93. package/src/archiver/archiver_store.ts +141 -44
  94. package/src/archiver/archiver_store_test_suite.ts +2114 -331
  95. package/src/archiver/config.ts +30 -35
  96. package/src/archiver/errors.ts +64 -0
  97. package/src/archiver/index.ts +1 -1
  98. package/src/archiver/instrumentation.ts +19 -2
  99. package/src/archiver/kv_archiver_store/block_store.ts +541 -83
  100. package/src/archiver/kv_archiver_store/contract_class_store.ts +1 -1
  101. package/src/archiver/kv_archiver_store/contract_instance_store.ts +1 -1
  102. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +107 -67
  103. package/src/archiver/kv_archiver_store/log_store.ts +209 -99
  104. package/src/archiver/kv_archiver_store/message_store.ts +21 -18
  105. package/src/archiver/l1/README.md +98 -0
  106. package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
  107. package/src/archiver/l1/calldata_retriever.ts +641 -0
  108. package/src/archiver/l1/data_retrieval.ts +512 -0
  109. package/src/archiver/l1/debug_tx.ts +99 -0
  110. package/src/archiver/l1/spire_proposer.ts +160 -0
  111. package/src/archiver/l1/trace_tx.ts +128 -0
  112. package/src/archiver/l1/types.ts +13 -0
  113. package/src/archiver/l1/validate_trace.ts +211 -0
  114. package/src/archiver/structs/inbox_message.ts +8 -8
  115. package/src/archiver/structs/published.ts +1 -1
  116. package/src/archiver/validation.ts +86 -32
  117. package/src/factory.ts +6 -7
  118. package/src/index.ts +1 -1
  119. package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
  120. package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
  121. package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
  122. package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
  123. package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
  124. package/src/test/fixtures/trace_transaction-proxied.json +128 -0
  125. package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
  126. package/src/test/mock_archiver.ts +22 -16
  127. package/src/test/mock_l1_to_l2_message_source.ts +10 -9
  128. package/src/test/mock_l2_block_source.ts +114 -27
  129. package/src/test/mock_structs.ts +10 -9
  130. package/dest/archiver/data_retrieval.d.ts +0 -78
  131. package/dest/archiver/data_retrieval.d.ts.map +0 -1
  132. package/dest/archiver/data_retrieval.js +0 -354
  133. package/src/archiver/data_retrieval.ts +0 -535
@@ -1,184 +1,1675 @@
1
1
  import {
2
+ INITIAL_CHECKPOINT_NUMBER,
2
3
  INITIAL_L2_BLOCK_NUM,
4
+ MAX_NOTE_HASHES_PER_TX,
3
5
  NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
4
6
  PRIVATE_LOG_SIZE_IN_FIELDS,
5
- PUBLIC_LOG_SIZE_IN_FIELDS,
6
7
  } from '@aztec/constants';
7
8
  import { makeTuple } from '@aztec/foundation/array';
9
+ import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
8
10
  import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
9
11
  import { times, timesParallel } from '@aztec/foundation/collection';
10
- import { randomInt } from '@aztec/foundation/crypto';
11
- import { Fr } from '@aztec/foundation/fields';
12
+ import { randomInt } from '@aztec/foundation/crypto/random';
13
+ import { Fr } from '@aztec/foundation/curves/bn254';
12
14
  import { toArray } from '@aztec/foundation/iterable';
13
15
  import { sleep } from '@aztec/foundation/sleep';
14
16
  import { AztecAddress } from '@aztec/stdlib/aztec-address';
15
- import { CommitteeAttestation, L2Block, L2BlockHash, wrapInBlock } from '@aztec/stdlib/block';
17
+ import {
18
+ CheckpointedL2Block,
19
+ CommitteeAttestation,
20
+ EthAddress,
21
+ L2BlockHash,
22
+ L2BlockNew,
23
+ type ValidateBlockResult,
24
+ randomBlockInfo,
25
+ } from '@aztec/stdlib/block';
26
+ import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
16
27
  import {
17
28
  type ContractClassPublic,
18
29
  type ContractInstanceWithAddress,
19
30
  SerializableContractInstance,
20
31
  computePublicBytecodeCommitment,
21
32
  } from '@aztec/stdlib/contract';
22
- import { LogId, PrivateLog, PublicLog } from '@aztec/stdlib/logs';
33
+ import { ContractClassLog, LogId, PrivateLog, PublicLog, SiloedTag, Tag } from '@aztec/stdlib/logs';
23
34
  import { InboxLeaf } from '@aztec/stdlib/messaging';
35
+ import { CheckpointHeader } from '@aztec/stdlib/rollup';
24
36
  import {
25
37
  makeContractClassPublic,
26
38
  makeExecutablePrivateFunctionWithMembershipProof,
27
39
  makeUtilityFunctionWithMembershipProof,
28
40
  } from '@aztec/stdlib/testing';
29
41
  import '@aztec/stdlib/testing/jest';
30
- import { type IndexedTxEffect, TxEffect, TxHash } from '@aztec/stdlib/tx';
42
+ import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
43
+ import { type IndexedTxEffect, PartialStateReference, StateReference, TxEffect, TxHash } from '@aztec/stdlib/tx';
31
44
 
32
45
  import { makeInboxMessage, makeInboxMessages } from '../test/mock_structs.js';
33
46
  import type { ArchiverDataStore, ArchiverL1SynchPoint } from './archiver_store.js';
34
- import { BlockNumberNotSequentialError, InitialBlockNumberNotSequentialError } from './errors.js';
47
+ import {
48
+ BlockArchiveNotConsistentError,
49
+ BlockIndexNotSequentialError,
50
+ BlockNumberNotSequentialError,
51
+ CheckpointNumberNotConsistentError,
52
+ CheckpointNumberNotSequentialError,
53
+ InitialBlockNumberNotSequentialError,
54
+ InitialCheckpointNumberNotSequentialError,
55
+ } from './errors.js';
35
56
  import { MessageStoreError } from './kv_archiver_store/message_store.js';
36
57
  import type { InboxMessage } from './structs/inbox_message.js';
37
- import type { PublishedL2Block } from './structs/published.js';
38
58
 
39
- /**
40
- * @param testName - The name of the test suite.
41
- * @param getStore - Returns an instance of a store that's already been initialized.
42
- */
43
- export function describeArchiverDataStore(
44
- testName: string,
45
- getStore: () => ArchiverDataStore | Promise<ArchiverDataStore>,
46
- ) {
47
- describe(testName, () => {
48
- let store: ArchiverDataStore;
49
- let blocks: PublishedL2Block[];
50
-
51
- const blockTests: [number, number, () => PublishedL2Block[]][] = [
52
- [1, 1, () => blocks.slice(0, 1)],
53
- [10, 1, () => blocks.slice(9, 10)],
54
- [1, 10, () => blocks.slice(0, 10)],
55
- [2, 5, () => blocks.slice(1, 6)],
56
- [5, 2, () => blocks.slice(4, 6)],
57
- ];
59
+ /**
60
+ * @param testName - The name of the test suite.
61
+ * @param getStore - Returns an instance of a store that's already been initialized.
62
+ */
63
+ export function describeArchiverDataStore(
64
+ testName: string,
65
+ getStore: () => ArchiverDataStore | Promise<ArchiverDataStore>,
66
+ ) {
67
+ describe(testName, () => {
68
+ let store: ArchiverDataStore;
69
+ let publishedCheckpoints: PublishedCheckpoint[];
70
+
71
+ const blockNumberTests: [number, () => L2BlockNew][] = [
72
+ [1, () => publishedCheckpoints[0].checkpoint.blocks[0]],
73
+ [10, () => publishedCheckpoints[9].checkpoint.blocks[0]],
74
+ [5, () => publishedCheckpoints[4].checkpoint.blocks[0]],
75
+ ];
76
+
77
+ const makeBlockHash = (blockNumber: number) => `0x${blockNumber.toString(16).padStart(64, '0')}`;
78
+
79
+ // Create a state reference with properly calculated noteHashTree.nextAvailableLeafIndex
80
+ // This is needed because the log store calculates dataStartIndexForBlock as:
81
+ // noteHashTree.nextAvailableLeafIndex - txEffects.length * MAX_NOTE_HASHES_PER_TX
82
+ // If nextAvailableLeafIndex is too small (random values 0-1000), this becomes negative
83
+ const makeStateForBlock = (blockNumber: number, txsPerBlock: number): StateReference => {
84
+ // Ensure nextAvailableLeafIndex is large enough for all blocks up to this point
85
+ const noteHashIndex = blockNumber * txsPerBlock * MAX_NOTE_HASHES_PER_TX;
86
+ return new StateReference(
87
+ AppendOnlyTreeSnapshot.random(),
88
+ new PartialStateReference(
89
+ new AppendOnlyTreeSnapshot(Fr.random(), noteHashIndex),
90
+ AppendOnlyTreeSnapshot.random(),
91
+ AppendOnlyTreeSnapshot.random(),
92
+ ),
93
+ );
94
+ };
95
+
96
+ const makePublishedCheckpoint = (checkpoint: Checkpoint, l1BlockNumber: number): PublishedCheckpoint => {
97
+ return new PublishedCheckpoint(
98
+ checkpoint,
99
+ new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)),
100
+ times(3, CommitteeAttestation.random),
101
+ );
102
+ };
103
+
104
+ const expectCheckpointedBlockEquals = (
105
+ actual: CheckpointedL2Block,
106
+ expectedBlock: L2BlockNew,
107
+ expectedCheckpoint: PublishedCheckpoint,
108
+ ) => {
109
+ expect(actual.l1).toEqual(expectedCheckpoint.l1);
110
+ expect(actual.block.header.equals(expectedBlock.header)).toBe(true);
111
+ expect(actual.checkpointNumber).toEqual(expectedCheckpoint.checkpoint.number);
112
+ expect(actual.attestations.every((a, i) => a.equals(expectedCheckpoint.attestations[i]))).toBe(true);
113
+ };
114
+
115
+ beforeEach(async () => {
116
+ store = await getStore();
117
+ // Create checkpoints sequentially to ensure archive roots are chained properly.
118
+ // Each block's header.lastArchive must equal the previous block's archive.
119
+ publishedCheckpoints = [];
120
+ const txsPerBlock = 4;
121
+ for (let i = 0; i < 10; i++) {
122
+ const blockNumber = i + 1;
123
+ const previousArchive = i > 0 ? publishedCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
124
+ const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
125
+ numBlocks: 1,
126
+ startBlockNumber: blockNumber,
127
+ previousArchive,
128
+ txsPerBlock,
129
+ state: makeStateForBlock(blockNumber, txsPerBlock),
130
+ // Ensure each tx has public logs for getPublicLogs tests
131
+ txOptions: { numPublicCallsPerTx: 2, numPublicLogsPerCall: 2 },
132
+ });
133
+ publishedCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
134
+ }
135
+ });
136
+
137
+ describe('addCheckpoints', () => {
138
+ it('returns success when adding checkpoints', async () => {
139
+ await expect(store.addCheckpoints(publishedCheckpoints)).resolves.toBe(true);
140
+ });
141
+
142
+ it('throws on duplicate checkpoints', async () => {
143
+ await store.addCheckpoints(publishedCheckpoints);
144
+ await expect(store.addCheckpoints(publishedCheckpoints)).rejects.toThrow(
145
+ InitialCheckpointNumberNotSequentialError,
146
+ );
147
+ });
148
+
149
+ it('throws an error if the previous block does not exist in the store', async () => {
150
+ const checkpoint = await Checkpoint.random(CheckpointNumber(2), { numBlocks: 1, startBlockNumber: 2 });
151
+ const block = makePublishedCheckpoint(checkpoint, 2);
152
+ await expect(store.addCheckpoints([block])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
153
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
154
+ });
155
+
156
+ it('throws an error if there is a gap in the blocks being added', async () => {
157
+ const checkpoint1 = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 1, startBlockNumber: 1 });
158
+ const checkpoint3 = await Checkpoint.random(CheckpointNumber(3), { numBlocks: 1, startBlockNumber: 3 });
159
+ const checkpoints = [makePublishedCheckpoint(checkpoint1, 1), makePublishedCheckpoint(checkpoint3, 3)];
160
+ await expect(store.addCheckpoints(checkpoints)).rejects.toThrow(CheckpointNumberNotSequentialError);
161
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
162
+ });
163
+
164
+ it('throws an error if blocks within a checkpoint are not sequential', async () => {
165
+ // Create a checkpoint with non-sequential block numbers (block 1 and block 3, skipping block 2)
166
+ const block1 = await L2BlockNew.random(BlockNumber(1), { checkpointNumber: CheckpointNumber(1) });
167
+ const block3 = await L2BlockNew.random(BlockNumber(3), { checkpointNumber: CheckpointNumber(1) });
168
+
169
+ const checkpoint = new Checkpoint(
170
+ AppendOnlyTreeSnapshot.random(),
171
+ CheckpointHeader.random(),
172
+ [block1, block3],
173
+ CheckpointNumber(1),
174
+ );
175
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
176
+
177
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockNumberNotSequentialError);
178
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
179
+ });
180
+
181
+ it('throws an error if blocks within a checkpoint do not have sequential indexes', async () => {
182
+ // Create a checkpoint with non-sequential indexes
183
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
184
+ checkpointNumber: CheckpointNumber(1),
185
+ indexWithinCheckpoint: 0,
186
+ });
187
+ const block3 = await L2BlockNew.random(BlockNumber(2), {
188
+ checkpointNumber: CheckpointNumber(1),
189
+ indexWithinCheckpoint: 2,
190
+ });
191
+
192
+ const checkpoint = new Checkpoint(
193
+ AppendOnlyTreeSnapshot.random(),
194
+ CheckpointHeader.random(),
195
+ [block1, block3],
196
+ CheckpointNumber(1),
197
+ );
198
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
199
+
200
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockIndexNotSequentialError);
201
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
202
+ });
203
+
204
+ it('throws an error if blocks within a checkpoint do not start from index 0', async () => {
205
+ // Create a checkpoint with non-sequential indexes
206
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
207
+ checkpointNumber: CheckpointNumber(1),
208
+ indexWithinCheckpoint: 1,
209
+ });
210
+ const block3 = await L2BlockNew.random(BlockNumber(2), {
211
+ checkpointNumber: CheckpointNumber(1),
212
+ indexWithinCheckpoint: 2,
213
+ });
214
+
215
+ const checkpoint = new Checkpoint(
216
+ AppendOnlyTreeSnapshot.random(),
217
+ CheckpointHeader.random(),
218
+ [block1, block3],
219
+ CheckpointNumber(1),
220
+ );
221
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
222
+
223
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockIndexNotSequentialError);
224
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
225
+ });
226
+
227
+ it('throws an error if block has invalid checkpoint index', async () => {
228
+ // Create a block wit an invalid checkpoint index
229
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
230
+ checkpointNumber: CheckpointNumber(1),
231
+ indexWithinCheckpoint: -1,
232
+ });
233
+
234
+ const checkpoint = new Checkpoint(
235
+ AppendOnlyTreeSnapshot.random(),
236
+ CheckpointHeader.random(),
237
+ [block1],
238
+ CheckpointNumber(1),
239
+ );
240
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
241
+
242
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockIndexNotSequentialError);
243
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
244
+ });
245
+
246
+ it('throws an error if checkpoint has invalid initial number', async () => {
247
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
248
+ checkpointNumber: CheckpointNumber(2),
249
+ indexWithinCheckpoint: 0,
250
+ });
251
+
252
+ const checkpoint = new Checkpoint(
253
+ AppendOnlyTreeSnapshot.random(),
254
+ CheckpointHeader.random(),
255
+ [block1],
256
+ CheckpointNumber(2),
257
+ );
258
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
259
+
260
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(
261
+ InitialCheckpointNumberNotSequentialError,
262
+ );
263
+ });
264
+
265
+ it('allows the correct initial checkpoint', async () => {
266
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
267
+ checkpointNumber: CheckpointNumber(1),
268
+ indexWithinCheckpoint: 0,
269
+ });
270
+
271
+ const checkpoint = new Checkpoint(
272
+ AppendOnlyTreeSnapshot.random(),
273
+ CheckpointHeader.random(),
274
+ [block1],
275
+ CheckpointNumber(1),
276
+ );
277
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
278
+
279
+ await expect(store.addCheckpoints([publishedCheckpoint])).resolves.toBe(true);
280
+ });
281
+
282
+ it('throws on duplicate initial checkpoint', async () => {
283
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
284
+ checkpointNumber: CheckpointNumber(1),
285
+ indexWithinCheckpoint: 0,
286
+ });
287
+
288
+ const block2 = await L2BlockNew.random(BlockNumber(1), {
289
+ checkpointNumber: CheckpointNumber(1),
290
+ indexWithinCheckpoint: 0,
291
+ });
292
+
293
+ const checkpoint = new Checkpoint(
294
+ AppendOnlyTreeSnapshot.random(),
295
+ CheckpointHeader.random(),
296
+ [block1],
297
+ CheckpointNumber(1),
298
+ );
299
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
300
+
301
+ const checkpoint2 = new Checkpoint(
302
+ AppendOnlyTreeSnapshot.random(),
303
+ CheckpointHeader.random(),
304
+ [block2],
305
+ CheckpointNumber(1),
306
+ );
307
+ const publishedCheckpoint2 = makePublishedCheckpoint(checkpoint2, 10);
308
+
309
+ await expect(store.addCheckpoints([publishedCheckpoint])).resolves.toBe(true);
310
+ await expect(store.addCheckpoints([publishedCheckpoint2])).rejects.toThrow(
311
+ InitialCheckpointNumberNotSequentialError,
312
+ );
313
+ });
314
+ });
315
+
316
+ describe('unwindcheckpoints', () => {
317
+ it('unwinding checkpoints will remove checkpoints from the chain', async () => {
318
+ await store.addCheckpoints(publishedCheckpoints);
319
+ const checkpointNumber = await store.getSynchedCheckpointNumber();
320
+ const lastCheckpoint = publishedCheckpoints.at(-1)!;
321
+ const lastBlockNumber = lastCheckpoint.checkpoint.blocks[0].number;
322
+
323
+ // Verify block exists before unwinding
324
+ const retrievedBlock = await store.getCheckpointedBlock(lastBlockNumber);
325
+ expect(retrievedBlock).toBeDefined();
326
+ expect(retrievedBlock!.block.header.equals(lastCheckpoint.checkpoint.blocks[0].header)).toBe(true);
327
+ expect(retrievedBlock!.checkpointNumber).toEqual(checkpointNumber);
328
+
329
+ await store.unwindCheckpoints(checkpointNumber, 1);
330
+
331
+ expect(await store.getSynchedCheckpointNumber()).toBe(checkpointNumber - 1);
332
+ await expect(store.getCheckpointedBlock(lastBlockNumber)).resolves.toBeUndefined();
333
+ });
334
+
335
+ it('can unwind multiple empty blocks', async () => {
336
+ // Create checkpoints sequentially to chain archive roots
337
+ const emptyCheckpoints: PublishedCheckpoint[] = [];
338
+ for (let i = 0; i < 10; i++) {
339
+ const previousArchive = i > 0 ? emptyCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
340
+ const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
341
+ numBlocks: 1,
342
+ startBlockNumber: i + 1,
343
+ txsPerBlock: 0,
344
+ previousArchive,
345
+ });
346
+ emptyCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
347
+ }
348
+ await store.addCheckpoints(emptyCheckpoints);
349
+ expect(await store.getSynchedCheckpointNumber()).toBe(10);
350
+
351
+ await store.unwindCheckpoints(CheckpointNumber(10), 3);
352
+ expect(await store.getSynchedCheckpointNumber()).toBe(7);
353
+ expect((await store.getRangeOfCheckpoints(CheckpointNumber(1), 10)).map(b => b.checkpointNumber)).toEqual([
354
+ 1, 2, 3, 4, 5, 6, 7,
355
+ ]);
356
+ });
357
+
358
+ it('refuses to unwind checkpoints if the tip is not the last checkpoint', async () => {
359
+ await store.addCheckpoints(publishedCheckpoints);
360
+ await expect(store.unwindCheckpoints(CheckpointNumber(5), 1)).rejects.toThrow(
361
+ /can only unwind checkpoints from the tip/i,
362
+ );
363
+ });
364
+
365
+ it('unwound blocks and headers cannot be retrieved by hash or archive', async () => {
366
+ await store.addCheckpoints(publishedCheckpoints);
367
+ const lastCheckpoint = publishedCheckpoints[publishedCheckpoints.length - 1];
368
+ const lastBlock = lastCheckpoint.checkpoint.blocks[0];
369
+ const blockHash = await lastBlock.header.hash();
370
+ const archive = lastBlock.archive.root;
371
+
372
+ // Verify block and header exist before unwinding
373
+ const retrievedByHash = await store.getCheckpointedBlockByHash(blockHash);
374
+ expect(retrievedByHash).toBeDefined();
375
+ expect(retrievedByHash!.block.header.equals(lastBlock.header)).toBe(true);
376
+
377
+ const retrievedByArchive = await store.getCheckpointedBlockByArchive(archive);
378
+ expect(retrievedByArchive).toBeDefined();
379
+ expect(retrievedByArchive!.block.header.equals(lastBlock.header)).toBe(true);
380
+
381
+ const headerByHash = await store.getBlockHeaderByHash(blockHash);
382
+ expect(headerByHash).toBeDefined();
383
+ expect(headerByHash!.equals(lastBlock.header)).toBe(true);
384
+
385
+ const headerByArchive = await store.getBlockHeaderByArchive(archive);
386
+ expect(headerByArchive).toBeDefined();
387
+ expect(headerByArchive!.equals(lastBlock.header)).toBe(true);
388
+
389
+ // Unwind the checkpoint
390
+ await store.unwindCheckpoints(lastCheckpoint.checkpoint.number, 1);
391
+
392
+ // Verify neither block nor header can be retrieved after unwinding
393
+ expect(await store.getCheckpointedBlockByHash(blockHash)).toBeUndefined();
394
+ expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
395
+ expect(await store.getBlockHeaderByHash(blockHash)).toBeUndefined();
396
+ expect(await store.getBlockHeaderByArchive(archive)).toBeUndefined();
397
+ });
398
+ });
399
+
400
+ describe('multi-block checkpoints', () => {
401
+ it('block number increases correctly when adding checkpoints with multiple blocks', async () => {
402
+ // Create 3 checkpoints: first with 2 blocks, second with 3 blocks, third with 1 block
403
+ // Total blocks: 6, spanning block numbers 1-6
404
+ // Chain archive roots across checkpoints
405
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
406
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
407
+
408
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
409
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
410
+ numBlocks: 3,
411
+ startBlockNumber: 3,
412
+ previousArchive: previousArchive1,
413
+ });
414
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
415
+
416
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
417
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
418
+ numBlocks: 1,
419
+ startBlockNumber: 6,
420
+ previousArchive: previousArchive2,
421
+ });
422
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
423
+
424
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
425
+
426
+ // Checkpoint number should be 3 (the last checkpoint number)
427
+ expect(await store.getSynchedCheckpointNumber()).toBe(3);
428
+ // Block number should be 6 (the last block number across all checkpoints)
429
+ expect(await store.getLatestBlockNumber()).toBe(6);
430
+ });
431
+
432
+ it('block number decreases correctly when unwinding checkpoints with multiple blocks', async () => {
433
+ // Create 3 checkpoints with varying block counts, chaining archive roots
434
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
435
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
436
+
437
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
438
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
439
+ numBlocks: 3,
440
+ startBlockNumber: 3,
441
+ previousArchive: previousArchive1,
442
+ });
443
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
444
+
445
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
446
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
447
+ numBlocks: 2,
448
+ startBlockNumber: 6,
449
+ previousArchive: previousArchive2,
450
+ });
451
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
452
+
453
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
454
+
455
+ expect(await store.getSynchedCheckpointNumber()).toBe(3);
456
+ expect(await store.getLatestBlockNumber()).toBe(7);
457
+
458
+ // Unwind the last checkpoint (which has 2 blocks)
459
+ await store.unwindCheckpoints(CheckpointNumber(3), 1);
460
+
461
+ expect(await store.getSynchedCheckpointNumber()).toBe(2);
462
+ expect(await store.getLatestBlockNumber()).toBe(5);
463
+
464
+ // Unwind another checkpoint (which has 3 blocks)
465
+ await store.unwindCheckpoints(CheckpointNumber(2), 1);
466
+
467
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
468
+ expect(await store.getLatestBlockNumber()).toBe(2);
469
+ });
470
+
471
+ it('unwinding multiple checkpoints with multiple blocks in one go', async () => {
472
+ // Create 4 checkpoints with varying block counts, chaining archive roots
473
+ // Checkpoint 1: blocks 1-2 (2 blocks)
474
+ // Checkpoint 2: blocks 3-5 (3 blocks)
475
+ // Checkpoint 3: blocks 6-7 (2 blocks)
476
+ // Checkpoint 4: blocks 8-10 (3 blocks)
477
+ // Total: 10 blocks across 4 checkpoints
478
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
479
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
480
+
481
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
482
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
483
+ numBlocks: 3,
484
+ startBlockNumber: 3,
485
+ previousArchive: previousArchive1,
486
+ });
487
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
488
+
489
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
490
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
491
+ numBlocks: 2,
492
+ startBlockNumber: 6,
493
+ previousArchive: previousArchive2,
494
+ });
495
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
496
+
497
+ const previousArchive3 = checkpoint3Cp.blocks.at(-1)!.archive;
498
+ const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
499
+ numBlocks: 3,
500
+ startBlockNumber: 8,
501
+ previousArchive: previousArchive3,
502
+ });
503
+ const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
504
+
505
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3, checkpoint4]);
506
+
507
+ expect(await store.getSynchedCheckpointNumber()).toBe(4);
508
+ expect(await store.getLatestBlockNumber()).toBe(10);
509
+
510
+ // Unwind 2 checkpoints at once (checkpoints 3 and 4, which together have 5 blocks)
511
+ await store.unwindCheckpoints(CheckpointNumber(4), 2);
512
+
513
+ expect(await store.getSynchedCheckpointNumber()).toBe(2);
514
+ expect(await store.getLatestBlockNumber()).toBe(5);
515
+
516
+ // Verify blocks 1-5 still exist (from checkpoints 1 and 2)
517
+ for (let blockNumber = 1; blockNumber <= 5; blockNumber++) {
518
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
519
+ }
520
+
521
+ // Verify blocks 6-10 are gone (from checkpoints 3 and 4)
522
+ for (let blockNumber = 6; blockNumber <= 10; blockNumber++) {
523
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
524
+ }
525
+
526
+ // Unwind remaining 2 checkpoints at once (checkpoints 1 and 2, which together have 5 blocks)
527
+ await store.unwindCheckpoints(CheckpointNumber(2), 2);
528
+
529
+ expect(await store.getSynchedCheckpointNumber()).toBe(0);
530
+ expect(await store.getLatestBlockNumber()).toBe(0);
531
+
532
+ // Verify all blocks are gone
533
+ for (let blockNumber = 1; blockNumber <= 10; blockNumber++) {
534
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
535
+ }
536
+ });
537
+
538
+ it('getCheckpointedBlock returns correct checkpoint info for blocks within multi-block checkpoints', async () => {
539
+ // Create checkpoints with chained archive roots
540
+ // Create a checkpoint with 3 blocks
541
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 });
542
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
543
+
544
+ // Create another checkpoint with 2 blocks
545
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
546
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
547
+ numBlocks: 2,
548
+ startBlockNumber: 4,
549
+ previousArchive: previousArchive1,
550
+ });
551
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
552
+
553
+ await store.addCheckpoints([checkpoint1, checkpoint2]);
554
+
555
+ // Check blocks from the first checkpoint (blocks 1, 2, 3)
556
+ for (let i = 0; i < 3; i++) {
557
+ const blockNumber = i + 1;
558
+ const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
559
+
560
+ expect(retrievedBlock).toBeDefined();
561
+ expect(retrievedBlock!.checkpointNumber).toBe(1);
562
+ expect(retrievedBlock!.block.number).toBe(blockNumber);
563
+ expect(retrievedBlock!.l1).toEqual(checkpoint1.l1);
564
+ expect(retrievedBlock!.attestations.every((a, j) => a.equals(checkpoint1.attestations[j]))).toBe(true);
565
+ }
566
+
567
+ // Check blocks from the second checkpoint (blocks 4, 5)
568
+ for (let i = 0; i < 2; i++) {
569
+ const blockNumber = i + 4;
570
+ const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
571
+
572
+ expect(retrievedBlock).toBeDefined();
573
+ expect(retrievedBlock!.checkpointNumber).toBe(2);
574
+ expect(retrievedBlock!.block.number).toBe(blockNumber);
575
+ expect(retrievedBlock!.l1).toEqual(checkpoint2.l1);
576
+ expect(retrievedBlock!.attestations.every((a, j) => a.equals(checkpoint2.attestations[j]))).toBe(true);
577
+ }
578
+ });
579
+
580
+ it('getCheckpointedBlockByHash returns correct checkpoint info for blocks within multi-block checkpoints', async () => {
581
+ const checkpoint = makePublishedCheckpoint(
582
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
583
+ 10,
584
+ );
585
+
586
+ await store.addCheckpoints([checkpoint]);
587
+
588
+ // Check each block by its hash
589
+ for (let i = 0; i < checkpoint.checkpoint.blocks.length; i++) {
590
+ const block = checkpoint.checkpoint.blocks[i];
591
+ const blockHash = await block.header.hash();
592
+ const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
593
+
594
+ expect(retrievedBlock).toBeDefined();
595
+ expect(retrievedBlock!.checkpointNumber).toBe(1);
596
+ expect(retrievedBlock!.block.number).toBe(i + 1);
597
+ expect(retrievedBlock!.l1).toEqual(checkpoint.l1);
598
+ }
599
+ });
600
+
601
+ it('getCheckpointedBlockByArchive returns correct checkpoint info for blocks within multi-block checkpoints', async () => {
602
+ const checkpoint = makePublishedCheckpoint(
603
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
604
+ 10,
605
+ );
606
+
607
+ await store.addCheckpoints([checkpoint]);
608
+
609
+ // Check each block by its archive root
610
+ for (let i = 0; i < checkpoint.checkpoint.blocks.length; i++) {
611
+ const block = checkpoint.checkpoint.blocks[i];
612
+ const archive = block.archive.root;
613
+ const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
614
+
615
+ expect(retrievedBlock).toBeDefined();
616
+ expect(retrievedBlock!.checkpointNumber).toBe(1);
617
+ expect(retrievedBlock!.block.number).toBe(i + 1);
618
+ expect(retrievedBlock!.l1).toEqual(checkpoint.l1);
619
+ }
620
+ });
621
+
622
+ it('unwinding a multi-block checkpoint removes all its blocks', async () => {
623
+ const checkpoint = makePublishedCheckpoint(
624
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
625
+ 10,
626
+ );
627
+
628
+ await store.addCheckpoints([checkpoint]);
629
+
630
+ // Verify all 3 blocks exist
631
+ for (let blockNumber = 1; blockNumber <= 3; blockNumber++) {
632
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
633
+ }
634
+
635
+ // Unwind the checkpoint
636
+ await store.unwindCheckpoints(CheckpointNumber(1), 1);
637
+
638
+ // Verify all 3 blocks are removed
639
+ for (let blockNumber = 1; blockNumber <= 3; blockNumber++) {
640
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
641
+ }
642
+
643
+ expect(await store.getSynchedCheckpointNumber()).toBe(0);
644
+ expect(await store.getLatestBlockNumber()).toBe(0);
645
+ });
646
+ });
647
+
648
+ describe('uncheckpointed blocks', () => {
649
+ it('can add blocks independently before a checkpoint arrives', async () => {
650
+ // First, establish some checkpointed blocks (checkpoint 1 with blocks 1-3)
651
+ const checkpoint1 = makePublishedCheckpoint(
652
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
653
+ 10,
654
+ );
655
+ await store.addCheckpoints([checkpoint1]);
656
+
657
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
658
+ expect(await store.getLatestBlockNumber()).toBe(3);
659
+
660
+ // Now add blocks 4, 5, 6 independently (without a checkpoint) for upcoming checkpoint 2
661
+ // Chain archive roots from the last block of checkpoint 1
662
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
663
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
664
+ checkpointNumber: CheckpointNumber(2),
665
+ indexWithinCheckpoint: 0,
666
+ lastArchive: lastBlockArchive,
667
+ });
668
+ const block5 = await L2BlockNew.random(BlockNumber(5), {
669
+ checkpointNumber: CheckpointNumber(2),
670
+ indexWithinCheckpoint: 1,
671
+ lastArchive: block4.archive,
672
+ });
673
+ const block6 = await L2BlockNew.random(BlockNumber(6), {
674
+ checkpointNumber: CheckpointNumber(2),
675
+ indexWithinCheckpoint: 2,
676
+ lastArchive: block5.archive,
677
+ });
678
+
679
+ await store.addBlocks([block4, block5, block6]);
680
+
681
+ // Checkpoint number should still be 1 (no new checkpoint added)
682
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
683
+ // But latest block number should be 6
684
+ expect(await store.getLatestBlockNumber()).toBe(6);
685
+ });
686
+
687
+ it('getBlock retrieves uncheckpointed blocks', async () => {
688
+ // First, establish some checkpointed blocks
689
+ const checkpoint1 = makePublishedCheckpoint(
690
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
691
+ 10,
692
+ );
693
+ await store.addCheckpoints([checkpoint1]);
694
+
695
+ // Add uncheckpointed blocks for upcoming checkpoint 2, chaining archive roots
696
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
697
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
698
+ checkpointNumber: CheckpointNumber(2),
699
+ indexWithinCheckpoint: 0,
700
+ lastArchive: lastBlockArchive,
701
+ });
702
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
703
+ checkpointNumber: CheckpointNumber(2),
704
+ indexWithinCheckpoint: 1,
705
+ lastArchive: block3.archive,
706
+ });
707
+ await store.addBlocks([block3, block4]);
708
+
709
+ // getBlock should work for both checkpointed and uncheckpointed blocks
710
+ expect((await store.getBlock(1))?.number).toBe(1);
711
+ expect((await store.getBlock(2))?.number).toBe(2);
712
+ expect((await store.getBlock(3))?.equals(block3)).toBe(true);
713
+ expect((await store.getBlock(4))?.equals(block4)).toBe(true);
714
+ expect(await store.getBlock(5)).toBeUndefined();
715
+
716
+ const block5 = await L2BlockNew.random(BlockNumber(5), {
717
+ checkpointNumber: CheckpointNumber(2),
718
+ indexWithinCheckpoint: 2,
719
+ lastArchive: block4.archive,
720
+ });
721
+ await store.addBlocks([block5]);
722
+
723
+ // Verify the uncheckpointed blocks have correct data
724
+ const retrieved3 = await store.getBlock(3);
725
+ expect(retrieved3!.number).toBe(3);
726
+ expect(retrieved3!.equals(block3)).toBe(true);
727
+ const retrieved4 = await store.getBlock(4);
728
+ expect(retrieved4!.number).toBe(4);
729
+ expect(retrieved4!.equals(block4)).toBe(true);
730
+ const retrieved5 = await store.getBlock(5);
731
+ expect(retrieved5!.number).toBe(5);
732
+ expect(retrieved5!.equals(block5)).toBe(true);
733
+ });
734
+
735
+ it('getBlockByHash retrieves uncheckpointed blocks', async () => {
736
+ // Add uncheckpointed blocks (no checkpoints at all) for initial checkpoint 1, chaining archive roots
737
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
738
+ checkpointNumber: CheckpointNumber(1),
739
+ indexWithinCheckpoint: 0,
740
+ });
741
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
742
+ checkpointNumber: CheckpointNumber(1),
743
+ indexWithinCheckpoint: 1,
744
+ lastArchive: block1.archive,
745
+ });
746
+ await store.addBlocks([block1, block2]);
747
+
748
+ // getBlockByHash should work for uncheckpointed blocks
749
+ const hash1 = await block1.header.hash();
750
+ const hash2 = await block2.header.hash();
751
+
752
+ const retrieved1 = await store.getBlockByHash(hash1);
753
+ expect(retrieved1!.equals(block1)).toBe(true);
754
+
755
+ const retrieved2 = await store.getBlockByHash(hash2);
756
+ expect(retrieved2!.equals(block2)).toBe(true);
757
+ });
758
+
759
+ it('getBlockByArchive retrieves uncheckpointed blocks', async () => {
760
+ // Add uncheckpointed blocks for initial checkpoint 1, chaining archive roots
761
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
762
+ checkpointNumber: CheckpointNumber(1),
763
+ indexWithinCheckpoint: 0,
764
+ });
765
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
766
+ checkpointNumber: CheckpointNumber(1),
767
+ indexWithinCheckpoint: 1,
768
+ lastArchive: block1.archive,
769
+ });
770
+ await store.addBlocks([block1, block2]);
771
+
772
+ // getBlockByArchive should work for uncheckpointed blocks
773
+ const archive1 = block1.archive.root;
774
+ const archive2 = block2.archive.root;
775
+
776
+ const retrieved1 = await store.getBlockByArchive(archive1);
777
+ expect(retrieved1!.equals(block1)).toBe(true);
778
+
779
+ const retrieved2 = await store.getBlockByArchive(archive2);
780
+ expect(retrieved2!.equals(block2)).toBe(true);
781
+ });
782
+
783
+ it('getCheckpointedBlock returns undefined for uncheckpointed blocks', async () => {
784
+ // Add a checkpoint with blocks 1-2
785
+ const checkpoint1 = makePublishedCheckpoint(
786
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
787
+ 10,
788
+ );
789
+ await store.addCheckpoints([checkpoint1]);
790
+
791
+ // Add uncheckpointed blocks 3-4 for upcoming checkpoint 2, chaining archive roots
792
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
793
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
794
+ checkpointNumber: CheckpointNumber(2),
795
+ indexWithinCheckpoint: 0,
796
+ lastArchive: lastBlockArchive,
797
+ });
798
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
799
+ checkpointNumber: CheckpointNumber(2),
800
+ indexWithinCheckpoint: 1,
801
+ lastArchive: block3.archive,
802
+ });
803
+ await store.addBlocks([block3, block4]);
804
+
805
+ // getCheckpointedBlock should work for checkpointed blocks
806
+ expect((await store.getCheckpointedBlock(1))?.block.number).toBe(1);
807
+ expect((await store.getCheckpointedBlock(2))?.block.number).toBe(2);
808
+
809
+ // getCheckpointedBlock should return undefined for uncheckpointed blocks
810
+ expect(await store.getCheckpointedBlock(3)).toBeUndefined();
811
+ expect(await store.getCheckpointedBlock(4)).toBeUndefined();
812
+
813
+ // But getBlock should work for all blocks
814
+ expect((await store.getBlock(3))?.equals(block3)).toBe(true);
815
+ expect((await store.getBlock(4))?.equals(block4)).toBe(true);
816
+ });
817
+
818
+ it('getCheckpointedBlockByHash returns undefined for uncheckpointed blocks', async () => {
819
+ // Add uncheckpointed blocks for initial checkpoint 1
820
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
821
+ checkpointNumber: CheckpointNumber(1),
822
+ indexWithinCheckpoint: 0,
823
+ });
824
+ await store.addBlocks([block1]);
825
+
826
+ const hash = await block1.header.hash();
827
+
828
+ // getCheckpointedBlockByHash should return undefined
829
+ expect(await store.getCheckpointedBlockByHash(hash)).toBeUndefined();
830
+
831
+ // But getBlockByHash should work
832
+ expect((await store.getBlockByHash(hash))?.equals(block1)).toBe(true);
833
+ });
834
+
835
+ it('getCheckpointedBlockByArchive returns undefined for uncheckpointed blocks', async () => {
836
+ // Add uncheckpointed blocks for initial checkpoint 1
837
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
838
+ checkpointNumber: CheckpointNumber(1),
839
+ indexWithinCheckpoint: 0,
840
+ });
841
+ await store.addBlocks([block1]);
842
+
843
+ const archive = block1.archive.root;
844
+
845
+ // getCheckpointedBlockByArchive should return undefined
846
+ expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
847
+
848
+ // But getBlockByArchive should work
849
+ expect((await store.getBlockByArchive(archive))?.equals(block1)).toBe(true);
850
+ });
851
+
852
+ it('checkpoint adopts previously added uncheckpointed blocks', async () => {
853
+ // Add blocks 1-3 without a checkpoint (for initial checkpoint 1), chaining archive roots
854
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
855
+ checkpointNumber: CheckpointNumber(1),
856
+ indexWithinCheckpoint: 0,
857
+ });
858
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
859
+ checkpointNumber: CheckpointNumber(1),
860
+ indexWithinCheckpoint: 1,
861
+ lastArchive: block1.archive,
862
+ });
863
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
864
+ checkpointNumber: CheckpointNumber(1),
865
+ indexWithinCheckpoint: 2,
866
+ lastArchive: block2.archive,
867
+ });
868
+ await store.addBlocks([block1, block2, block3]);
869
+
870
+ expect(await store.getSynchedCheckpointNumber()).toBe(0);
871
+ expect(await store.getLatestBlockNumber()).toBe(3);
872
+
873
+ // getCheckpointedBlock should return undefined for all
874
+ expect(await store.getCheckpointedBlock(1)).toBeUndefined();
875
+ expect(await store.getCheckpointedBlock(2)).toBeUndefined();
876
+ expect(await store.getCheckpointedBlock(3)).toBeUndefined();
877
+
878
+ // Now add a checkpoint that covers blocks 1-3
879
+ const checkpoint1 = makePublishedCheckpoint(
880
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
881
+ 10,
882
+ );
883
+ await store.addCheckpoints([checkpoint1]);
884
+
885
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
886
+ expect(await store.getLatestBlockNumber()).toBe(3);
887
+
888
+ // Now getCheckpointedBlock should work for all blocks
889
+ const checkpointed1 = await store.getCheckpointedBlock(1);
890
+ expect(checkpointed1).toBeDefined();
891
+ expect(checkpointed1!.checkpointNumber).toBe(1);
892
+ expect(checkpointed1!.l1).toEqual(checkpoint1.l1);
893
+
894
+ const checkpointed2 = await store.getCheckpointedBlock(2);
895
+ expect(checkpointed2).toBeDefined();
896
+ expect(checkpointed2!.checkpointNumber).toBe(1);
897
+
898
+ const checkpointed3 = await store.getCheckpointedBlock(3);
899
+ expect(checkpointed3).toBeDefined();
900
+ expect(checkpointed3!.checkpointNumber).toBe(1);
901
+ });
902
+
903
+ it('can add more uncheckpointed blocks after a checkpoint and then checkpoint them', async () => {
904
+ // Start with checkpoint 1 covering blocks 1-2
905
+ const checkpoint1 = makePublishedCheckpoint(
906
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
907
+ 10,
908
+ );
909
+ await store.addCheckpoints([checkpoint1]);
910
+
911
+ // Add uncheckpointed blocks 3-5 for the upcoming checkpoint 2, chaining archive roots
912
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
913
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
914
+ checkpointNumber: CheckpointNumber(2),
915
+ indexWithinCheckpoint: 0,
916
+ lastArchive: lastBlockArchive,
917
+ });
918
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
919
+ checkpointNumber: CheckpointNumber(2),
920
+ indexWithinCheckpoint: 1,
921
+ lastArchive: block3.archive,
922
+ });
923
+ const block5 = await L2BlockNew.random(BlockNumber(5), {
924
+ checkpointNumber: CheckpointNumber(2),
925
+ indexWithinCheckpoint: 2,
926
+ lastArchive: block4.archive,
927
+ });
928
+ await store.addBlocks([block3, block4, block5]);
929
+
930
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
931
+ expect(await store.getLatestBlockNumber()).toBe(5);
932
+
933
+ // Blocks 3-5 are not checkpointed yet
934
+ expect(await store.getCheckpointedBlock(3)).toBeUndefined();
935
+ expect(await store.getCheckpointedBlock(4)).toBeUndefined();
936
+ expect(await store.getCheckpointedBlock(5)).toBeUndefined();
937
+
938
+ // Add checkpoint 2 covering blocks 3-5, chaining from checkpoint1
939
+ const checkpoint2 = makePublishedCheckpoint(
940
+ await Checkpoint.random(CheckpointNumber(2), {
941
+ numBlocks: 3,
942
+ startBlockNumber: 3,
943
+ previousArchive: lastBlockArchive,
944
+ }),
945
+ 11,
946
+ );
947
+ await store.addCheckpoints([checkpoint2]);
948
+
949
+ expect(await store.getSynchedCheckpointNumber()).toBe(2);
950
+ expect(await store.getLatestBlockNumber()).toBe(5);
951
+
952
+ // Now blocks 3-5 should be checkpointed with checkpoint 2's info
953
+ const checkpointed3 = await store.getCheckpointedBlock(3);
954
+ expect(checkpointed3).toBeDefined();
955
+ expect(checkpointed3!.checkpointNumber).toBe(2);
956
+ expect(checkpointed3!.l1).toEqual(checkpoint2.l1);
957
+
958
+ const checkpointed4 = await store.getCheckpointedBlock(4);
959
+ expect(checkpointed4).toBeDefined();
960
+ expect(checkpointed4!.checkpointNumber).toBe(2);
961
+
962
+ const checkpointed5 = await store.getCheckpointedBlock(5);
963
+ expect(checkpointed5).toBeDefined();
964
+ expect(checkpointed5!.checkpointNumber).toBe(2);
965
+ });
966
+
967
+ it('getBlocks retrieves both checkpointed and uncheckpointed blocks', async () => {
968
+ // Add checkpoint with blocks 1-2
969
+ const checkpoint1 = makePublishedCheckpoint(
970
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
971
+ 10,
972
+ );
973
+ await store.addCheckpoints([checkpoint1]);
974
+
975
+ // Add uncheckpointed blocks 3-4 for the upcoming checkpoint 2, chaining archive roots
976
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
977
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
978
+ checkpointNumber: CheckpointNumber(2),
979
+ indexWithinCheckpoint: 0,
980
+ lastArchive: lastBlockArchive,
981
+ });
982
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
983
+ checkpointNumber: CheckpointNumber(2),
984
+ indexWithinCheckpoint: 1,
985
+ lastArchive: block3.archive,
986
+ });
987
+ await store.addBlocks([block3, block4]);
988
+
989
+ // getBlocks should retrieve all blocks
990
+ const allBlocks = await store.getBlocks(1, 10);
991
+ expect(allBlocks.length).toBe(4);
992
+ expect(allBlocks.map(b => b.number)).toEqual([1, 2, 3, 4]);
993
+ });
994
+ });
995
+
996
+ describe('addBlocks validation', () => {
997
+ it('throws if blocks have different checkpoint numbers', async () => {
998
+ // First, establish checkpoint 1 with blocks 1-2
999
+ const checkpoint1 = makePublishedCheckpoint(
1000
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1001
+ 10,
1002
+ );
1003
+ await store.addCheckpoints([checkpoint1]);
1004
+
1005
+ // Try to add blocks 3 and 4 with different checkpoint numbers
1006
+ // Chain archives correctly to test the checkpoint number validation
1007
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1008
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1009
+ checkpointNumber: CheckpointNumber(2),
1010
+ indexWithinCheckpoint: 0,
1011
+ lastArchive: lastBlockArchive,
1012
+ });
1013
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1014
+ checkpointNumber: CheckpointNumber(3),
1015
+ indexWithinCheckpoint: 1,
1016
+ lastArchive: block3.archive,
1017
+ });
1018
+
1019
+ await expect(store.addBlocks([block3, block4])).rejects.toThrow(CheckpointNumberNotConsistentError);
1020
+ });
1021
+
1022
+ it('throws if checkpoint number is not the current checkpoint', async () => {
1023
+ // First, establish checkpoint 1 with blocks 1-2
1024
+ const checkpoint1 = makePublishedCheckpoint(
1025
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1026
+ 10,
1027
+ );
1028
+ await store.addCheckpoints([checkpoint1]);
1029
+
1030
+ // Try to add blocks for checkpoint 3 (skipping checkpoint 2)
1031
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1032
+ checkpointNumber: CheckpointNumber(3),
1033
+ indexWithinCheckpoint: 0,
1034
+ });
1035
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1036
+ checkpointNumber: CheckpointNumber(3),
1037
+ indexWithinCheckpoint: 1,
1038
+ });
1039
+
1040
+ await expect(store.addBlocks([block3, block4])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
1041
+ });
1042
+
1043
+ it('allows blocks with the same checkpoint number for the current checkpoint', async () => {
1044
+ // First, establish checkpoint 1 with blocks 1-2
1045
+ const checkpoint1 = makePublishedCheckpoint(
1046
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1047
+ 10,
1048
+ );
1049
+ await store.addCheckpoints([checkpoint1]);
1050
+
1051
+ // Add blocks 3 and 4 with consistent checkpoint number (2), chaining archive roots
1052
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1053
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1054
+ checkpointNumber: CheckpointNumber(2),
1055
+ indexWithinCheckpoint: 0,
1056
+ lastArchive: lastBlockArchive,
1057
+ });
1058
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1059
+ checkpointNumber: CheckpointNumber(2),
1060
+ indexWithinCheckpoint: 1,
1061
+ lastArchive: block3.archive,
1062
+ });
1063
+
1064
+ await expect(store.addBlocks([block3, block4])).resolves.toBe(true);
1065
+
1066
+ // Verify blocks were added
1067
+ expect((await store.getBlock(3))?.equals(block3)).toBe(true);
1068
+ expect((await store.getBlock(4))?.equals(block4)).toBe(true);
1069
+ });
1070
+
1071
+ it('allows blocks for the initial checkpoint when store is empty', async () => {
1072
+ // Add blocks for the initial checkpoint (1), chaining archive roots
1073
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
1074
+ checkpointNumber: CheckpointNumber(1),
1075
+ indexWithinCheckpoint: 0,
1076
+ });
1077
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
1078
+ checkpointNumber: CheckpointNumber(1),
1079
+ indexWithinCheckpoint: 1,
1080
+ lastArchive: block1.archive,
1081
+ });
1082
+
1083
+ await expect(store.addBlocks([block1, block2])).resolves.toBe(true);
1084
+
1085
+ // Verify blocks were added
1086
+ expect((await store.getBlock(1))?.equals(block1)).toBe(true);
1087
+ expect((await store.getBlock(2))?.equals(block2)).toBe(true);
1088
+ expect(await store.getLatestBlockNumber()).toBe(2);
1089
+ });
1090
+
1091
+ it('throws if initial block is duplicated across calls', async () => {
1092
+ // Add blocks for the initial checkpoint (1)
1093
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
1094
+ checkpointNumber: CheckpointNumber(1),
1095
+ indexWithinCheckpoint: 0,
1096
+ });
1097
+ const block2 = await L2BlockNew.random(BlockNumber(1), {
1098
+ checkpointNumber: CheckpointNumber(1),
1099
+ indexWithinCheckpoint: 0,
1100
+ });
1101
+
1102
+ await expect(store.addBlocks([block1])).resolves.toBe(true);
1103
+ await expect(store.addBlocks([block2])).rejects.toThrow(InitialBlockNumberNotSequentialError);
1104
+ });
1105
+
1106
+ it('throws if first block has wrong checkpoint number when store is empty', async () => {
1107
+ // Try to add blocks for checkpoint 2 when store is empty (should start at 1)
1108
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
1109
+ checkpointNumber: CheckpointNumber(2),
1110
+ indexWithinCheckpoint: 0,
1111
+ });
1112
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
1113
+ checkpointNumber: CheckpointNumber(2),
1114
+ indexWithinCheckpoint: 1,
1115
+ });
1116
+
1117
+ await expect(store.addBlocks([block1, block2])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
1118
+ });
1119
+
1120
+ it('allows adding more blocks to the same checkpoint in separate calls', async () => {
1121
+ // First, establish checkpoint 1 with blocks 1-2
1122
+ const checkpoint1 = makePublishedCheckpoint(
1123
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1124
+ 10,
1125
+ );
1126
+ await store.addCheckpoints([checkpoint1]);
1127
+
1128
+ // Add block 3 for checkpoint 2, chaining archive roots
1129
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1130
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1131
+ checkpointNumber: CheckpointNumber(2),
1132
+ indexWithinCheckpoint: 0,
1133
+ lastArchive: lastBlockArchive,
1134
+ });
1135
+ await expect(store.addBlocks([block3])).resolves.toBe(true);
1136
+
1137
+ // Add block 4 for the same checkpoint 2 in a separate call
1138
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1139
+ checkpointNumber: CheckpointNumber(2),
1140
+ indexWithinCheckpoint: 1,
1141
+ lastArchive: block3.archive,
1142
+ });
1143
+ await expect(store.addBlocks([block4])).resolves.toBe(true);
1144
+
1145
+ expect(await store.getLatestBlockNumber()).toBe(4);
1146
+ });
1147
+
1148
+ it('throws if adding blocks in separate calls with non-consecutive indexes', async () => {
1149
+ // First, establish checkpoint 1 with blocks 1-2
1150
+ const checkpoint1 = makePublishedCheckpoint(
1151
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1152
+ 10,
1153
+ );
1154
+ await store.addCheckpoints([checkpoint1]);
1155
+
1156
+ // Add block 3 for checkpoint 2, chaining archive roots
1157
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1158
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1159
+ checkpointNumber: CheckpointNumber(2),
1160
+ indexWithinCheckpoint: 0,
1161
+ lastArchive: lastBlockArchive,
1162
+ });
1163
+ await expect(store.addBlocks([block3])).resolves.toBe(true);
1164
+
1165
+ // Add block 4 for the same checkpoint 2 in a separate call but with a missing index
1166
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1167
+ checkpointNumber: CheckpointNumber(2),
1168
+ indexWithinCheckpoint: 2,
1169
+ lastArchive: block3.archive,
1170
+ });
1171
+ await expect(store.addBlocks([block4])).rejects.toThrow(BlockIndexNotSequentialError);
1172
+
1173
+ expect(await store.getLatestBlockNumber()).toBe(3);
1174
+ });
1175
+
1176
+ it('throws if second batch of blocks has different checkpoint number than first batch', async () => {
1177
+ // First, establish checkpoint 1 with blocks 1-2
1178
+ const checkpoint1 = makePublishedCheckpoint(
1179
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1180
+ 10,
1181
+ );
1182
+ await store.addCheckpoints([checkpoint1]);
1183
+
1184
+ // Add block 3 for checkpoint 2, chaining archive roots
1185
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1186
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1187
+ checkpointNumber: CheckpointNumber(2),
1188
+ indexWithinCheckpoint: 0,
1189
+ lastArchive: lastBlockArchive,
1190
+ });
1191
+ await store.addBlocks([block3]);
1192
+
1193
+ // Try to add block 4 for checkpoint 3 (should fail because current checkpoint is still 2)
1194
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1195
+ checkpointNumber: CheckpointNumber(3),
1196
+ indexWithinCheckpoint: 0,
1197
+ lastArchive: block3.archive,
1198
+ });
1199
+ await expect(store.addBlocks([block4])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
1200
+ });
1201
+
1202
+ it('force option bypasses checkpoint number validation', async () => {
1203
+ // First, establish checkpoint 1 with blocks 1-2
1204
+ const checkpoint1 = makePublishedCheckpoint(
1205
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1206
+ 10,
1207
+ );
1208
+ await store.addCheckpoints([checkpoint1]);
1209
+
1210
+ // Add blocks with different checkpoint numbers using force option, chaining archive roots
1211
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1212
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1213
+ checkpointNumber: CheckpointNumber(2),
1214
+ indexWithinCheckpoint: 0,
1215
+ lastArchive: lastBlockArchive,
1216
+ });
1217
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1218
+ checkpointNumber: CheckpointNumber(5),
1219
+ indexWithinCheckpoint: 0,
1220
+ lastArchive: block3.archive,
1221
+ });
1222
+
1223
+ await expect(store.addBlocks([block3, block4], { force: true })).resolves.toBe(true);
1224
+ });
1225
+
1226
+ it('force option bypasses blockindex number validation', async () => {
1227
+ // First, establish checkpoint 1 with blocks 1-2
1228
+ const checkpoint1 = makePublishedCheckpoint(
1229
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1230
+ 10,
1231
+ );
1232
+ await store.addCheckpoints([checkpoint1]);
1233
+
1234
+ // Add blocks with different checkpoint numbers using force option, chaining archive roots
1235
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1236
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1237
+ checkpointNumber: CheckpointNumber(2),
1238
+ indexWithinCheckpoint: 0,
1239
+ lastArchive: lastBlockArchive,
1240
+ });
1241
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1242
+ checkpointNumber: CheckpointNumber(2),
1243
+ indexWithinCheckpoint: 2,
1244
+ lastArchive: block3.archive,
1245
+ });
1246
+
1247
+ await expect(store.addBlocks([block3, block4], { force: true })).resolves.toBe(true);
1248
+ });
1249
+
1250
+ it('throws if adding blocks with non-consecutive archives', async () => {
1251
+ // First, establish checkpoint 1 with blocks 1-2
1252
+ const checkpoint1 = makePublishedCheckpoint(
1253
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1254
+ 10,
1255
+ );
1256
+ await store.addCheckpoints([checkpoint1]);
1257
+
1258
+ // Add block 3 for checkpoint 2 with incorrect archive
1259
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1260
+ checkpointNumber: CheckpointNumber(2),
1261
+ indexWithinCheckpoint: 0,
1262
+ });
1263
+ await expect(store.addBlocks([block3])).rejects.toThrow(BlockArchiveNotConsistentError);
58
1264
 
59
- const makeBlockHash = (blockNumber: number) => `0x${blockNumber.toString(16).padStart(64, '0')}`;
1265
+ expect(await store.getLatestBlockNumber()).toBe(2);
1266
+ });
1267
+
1268
+ it('throws if adding blocks with non-consecutive archives across calls', async () => {
1269
+ // First, establish checkpoint 1 with blocks 1-2
1270
+ const checkpoint1 = makePublishedCheckpoint(
1271
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1272
+ 10,
1273
+ );
1274
+ await store.addCheckpoints([checkpoint1]);
1275
+
1276
+ // Add block 3 for checkpoint 2 with correct archive
1277
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1278
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1279
+ checkpointNumber: CheckpointNumber(2),
1280
+ indexWithinCheckpoint: 0,
1281
+ lastArchive: lastBlockArchive,
1282
+ });
1283
+ await expect(store.addBlocks([block3])).resolves.toBe(true);
1284
+
1285
+ // Add block 4 with incorrect archive (should fail)
1286
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1287
+ checkpointNumber: CheckpointNumber(2),
1288
+ indexWithinCheckpoint: 1,
1289
+ lastArchive: AppendOnlyTreeSnapshot.random(),
1290
+ });
1291
+ await expect(store.addBlocks([block4])).rejects.toThrow(BlockArchiveNotConsistentError);
60
1292
 
61
- const makePublished = (block: L2Block, l1BlockNumber: number): PublishedL2Block => ({
62
- block: block,
63
- l1: {
64
- blockNumber: BigInt(l1BlockNumber),
65
- blockHash: makeBlockHash(l1BlockNumber),
66
- timestamp: BigInt(l1BlockNumber * 1000),
67
- },
68
- attestations: times(3, CommitteeAttestation.random),
1293
+ expect(await store.getLatestBlockNumber()).toBe(3);
1294
+ });
69
1295
  });
70
1296
 
71
- const expectBlocksEqual = (actual: PublishedL2Block[], expected: PublishedL2Block[]) => {
72
- expect(actual.length).toEqual(expected.length);
73
- for (let i = 0; i < expected.length; i++) {
74
- const expectedBlock = expected[i];
75
- const actualBlock = actual[i];
76
- expect(actualBlock.l1).toEqual(expectedBlock.l1);
77
- expect(actualBlock.block.equals(expectedBlock.block)).toBe(true);
78
- expect(actualBlock.attestations.every((a, i) => a.equals(expectedBlock.attestations[i]))).toBe(true);
79
- }
80
- };
1297
+ describe('getBlocksForCheckpoint', () => {
1298
+ it('returns blocks for a single-block checkpoint', async () => {
1299
+ const checkpoint = makePublishedCheckpoint(
1300
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 1, startBlockNumber: 1 }),
1301
+ 10,
1302
+ );
1303
+ await store.addCheckpoints([checkpoint]);
81
1304
 
82
- beforeEach(async () => {
83
- store = await getStore();
84
- blocks = await timesParallel(10, async i => makePublished(await L2Block.random(i + 1), i + 10));
1305
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1306
+ expect(blocks).toBeDefined();
1307
+ expect(blocks!.length).toBe(1);
1308
+ expect(blocks![0].number).toBe(1);
1309
+ });
1310
+
1311
+ it('returns all blocks for a multi-block checkpoint', async () => {
1312
+ const checkpoint = makePublishedCheckpoint(
1313
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 4, startBlockNumber: 1 }),
1314
+ 10,
1315
+ );
1316
+ await store.addCheckpoints([checkpoint]);
1317
+
1318
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1319
+ expect(blocks).toBeDefined();
1320
+ expect(blocks!.length).toBe(4);
1321
+ expect(blocks!.map(b => b.number)).toEqual([1, 2, 3, 4]);
1322
+ });
1323
+
1324
+ it('returns correct blocks for different checkpoints', async () => {
1325
+ // Create checkpoints with chained archive roots
1326
+ // Checkpoint 1: blocks 1-2
1327
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1328
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1329
+
1330
+ // Checkpoint 2: blocks 3-5
1331
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1332
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1333
+ numBlocks: 3,
1334
+ startBlockNumber: 3,
1335
+ previousArchive: previousArchive1,
1336
+ });
1337
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1338
+
1339
+ // Checkpoint 3: blocks 6-7
1340
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1341
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1342
+ numBlocks: 2,
1343
+ startBlockNumber: 6,
1344
+ previousArchive: previousArchive2,
1345
+ });
1346
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1347
+
1348
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1349
+
1350
+ const blocks1 = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1351
+ expect(blocks1).toBeDefined();
1352
+ expect(blocks1!.map(b => b.number)).toEqual([1, 2]);
1353
+
1354
+ const blocks2 = await store.getBlocksForCheckpoint(CheckpointNumber(2));
1355
+ expect(blocks2).toBeDefined();
1356
+ expect(blocks2!.map(b => b.number)).toEqual([3, 4, 5]);
1357
+
1358
+ const blocks3 = await store.getBlocksForCheckpoint(CheckpointNumber(3));
1359
+ expect(blocks3).toBeDefined();
1360
+ expect(blocks3!.map(b => b.number)).toEqual([6, 7]);
1361
+ });
1362
+
1363
+ it('returns undefined for non-existent checkpoint', async () => {
1364
+ const checkpoint = makePublishedCheckpoint(
1365
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1366
+ 10,
1367
+ );
1368
+ await store.addCheckpoints([checkpoint]);
1369
+
1370
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(5));
1371
+ expect(blocks).toBeUndefined();
1372
+ });
1373
+
1374
+ it('returns undefined when no checkpoints exist', async () => {
1375
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1376
+ expect(blocks).toBeUndefined();
1377
+ });
85
1378
  });
86
1379
 
87
- describe('addBlocks', () => {
88
- it('returns success when adding blocks', async () => {
89
- await expect(store.addBlocks(blocks)).resolves.toBe(true);
1380
+ describe('getRangeOfCheckpoints', () => {
1381
+ it('returns empty array when no checkpoints exist', async () => {
1382
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1383
+ expect(checkpoints).toEqual([]);
90
1384
  });
91
1385
 
92
- it('allows duplicate blocks', async () => {
93
- await store.addBlocks(blocks);
94
- await expect(store.addBlocks(blocks)).resolves.toBe(true);
1386
+ it('returns single checkpoint', async () => {
1387
+ const checkpoint = makePublishedCheckpoint(
1388
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1389
+ 10,
1390
+ );
1391
+ await store.addCheckpoints([checkpoint]);
1392
+
1393
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1394
+ expect(checkpoints.length).toBe(1);
1395
+ expect(checkpoints[0].checkpointNumber).toBe(1);
1396
+ expect(checkpoints[0].startBlock).toBe(1);
1397
+ expect(checkpoints[0].numBlocks).toBe(2);
95
1398
  });
96
1399
 
97
- it('throws an error if the previous block does not exist in the store', async () => {
98
- const block = makePublished(await L2Block.random(2), 2);
99
- await expect(store.addBlocks([block])).rejects.toThrow(InitialBlockNumberNotSequentialError);
100
- await expect(store.getPublishedBlocks(1, 10)).resolves.toEqual([]);
1400
+ it('returns multiple checkpoints in order', async () => {
1401
+ // Create checkpoints with chained archive roots
1402
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1403
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1404
+
1405
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1406
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1407
+ numBlocks: 3,
1408
+ startBlockNumber: 3,
1409
+ previousArchive: previousArchive1,
1410
+ });
1411
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1412
+
1413
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1414
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1415
+ numBlocks: 1,
1416
+ startBlockNumber: 6,
1417
+ previousArchive: previousArchive2,
1418
+ });
1419
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1420
+
1421
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1422
+
1423
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1424
+ expect(checkpoints.length).toBe(3);
1425
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([1, 2, 3]);
1426
+ expect(checkpoints.map(c => c.startBlock)).toEqual([1, 3, 6]);
1427
+ expect(checkpoints.map(c => c.numBlocks)).toEqual([2, 3, 1]);
101
1428
  });
102
1429
 
103
- it('throws an error if there is a gap in the blocks being added', async () => {
104
- const blocks = [makePublished(await L2Block.random(1), 1), makePublished(await L2Block.random(3), 3)];
105
- await expect(store.addBlocks(blocks)).rejects.toThrow(BlockNumberNotSequentialError);
106
- await expect(store.getPublishedBlocks(1, 10)).resolves.toEqual([]);
1430
+ it('respects the from parameter', async () => {
1431
+ // Create checkpoints with chained archive roots
1432
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1433
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1434
+
1435
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1436
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1437
+ numBlocks: 2,
1438
+ startBlockNumber: 3,
1439
+ previousArchive: previousArchive1,
1440
+ });
1441
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1442
+
1443
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1444
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1445
+ numBlocks: 2,
1446
+ startBlockNumber: 5,
1447
+ previousArchive: previousArchive2,
1448
+ });
1449
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1450
+
1451
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1452
+
1453
+ // Start from checkpoint 2
1454
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(2), 10);
1455
+ expect(checkpoints.length).toBe(2);
1456
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([2, 3]);
1457
+ });
1458
+
1459
+ it('respects the limit parameter', async () => {
1460
+ // Create checkpoints with chained archive roots
1461
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 1, startBlockNumber: 1 });
1462
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1463
+
1464
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1465
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1466
+ numBlocks: 1,
1467
+ startBlockNumber: 2,
1468
+ previousArchive: previousArchive1,
1469
+ });
1470
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1471
+
1472
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1473
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1474
+ numBlocks: 1,
1475
+ startBlockNumber: 3,
1476
+ previousArchive: previousArchive2,
1477
+ });
1478
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1479
+
1480
+ const previousArchive3 = checkpoint3Cp.blocks.at(-1)!.archive;
1481
+ const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
1482
+ numBlocks: 1,
1483
+ startBlockNumber: 4,
1484
+ previousArchive: previousArchive3,
1485
+ });
1486
+ const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
1487
+
1488
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3, checkpoint4]);
1489
+
1490
+ // Only get 2 checkpoints
1491
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 2);
1492
+ expect(checkpoints.length).toBe(2);
1493
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([1, 2]);
1494
+ });
1495
+
1496
+ it('returns correct checkpoint data including L1 info', async () => {
1497
+ const checkpoint = makePublishedCheckpoint(
1498
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
1499
+ 42,
1500
+ );
1501
+ await store.addCheckpoints([checkpoint]);
1502
+
1503
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 1);
1504
+ expect(checkpoints.length).toBe(1);
1505
+
1506
+ const data = checkpoints[0];
1507
+ expect(data.checkpointNumber).toBe(1);
1508
+ expect(data.startBlock).toBe(1);
1509
+ expect(data.numBlocks).toBe(3);
1510
+ expect(data.l1.blockNumber).toBe(42n);
1511
+ expect(data.header.equals(checkpoint.checkpoint.header)).toBe(true);
1512
+ expect(data.archive.equals(checkpoint.checkpoint.archive)).toBe(true);
1513
+ });
1514
+
1515
+ it('returns empty array when from is beyond available checkpoints', async () => {
1516
+ const checkpoint = makePublishedCheckpoint(
1517
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1518
+ 10,
1519
+ );
1520
+ await store.addCheckpoints([checkpoint]);
1521
+
1522
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(5), 10);
1523
+ expect(checkpoints).toEqual([]);
1524
+ });
1525
+
1526
+ it('works correctly after unwinding checkpoints', async () => {
1527
+ // Create checkpoints with chained archive roots
1528
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1529
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1530
+
1531
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1532
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1533
+ numBlocks: 2,
1534
+ startBlockNumber: 3,
1535
+ previousArchive: previousArchive1,
1536
+ });
1537
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1538
+
1539
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1540
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1541
+ numBlocks: 2,
1542
+ startBlockNumber: 5,
1543
+ previousArchive: previousArchive2,
1544
+ });
1545
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1546
+
1547
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1548
+
1549
+ // Unwind checkpoint 3
1550
+ await store.unwindCheckpoints(CheckpointNumber(3), 1);
1551
+
1552
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1553
+ expect(checkpoints.length).toBe(2);
1554
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([1, 2]);
107
1555
  });
108
1556
  });
109
1557
 
110
- describe('unwindBlocks', () => {
111
- it('unwinding blocks will remove blocks from the chain', async () => {
112
- await store.addBlocks(blocks);
113
- const blockNumber = await store.getSynchedL2BlockNumber();
1558
+ describe('getCheckpointedBlock', () => {
1559
+ beforeEach(async () => {
1560
+ await store.addCheckpoints(publishedCheckpoints);
1561
+ });
1562
+
1563
+ it.each(blockNumberTests)('retrieves previously stored block %i', async (blockNumber, getExpectedBlock) => {
1564
+ const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
1565
+ const expectedBlock = getExpectedBlock();
1566
+ const expectedCheckpoint = publishedCheckpoints[blockNumber - 1];
114
1567
 
115
- expectBlocksEqual(await store.getPublishedBlocks(blockNumber, 1), [blocks[blocks.length - 1]]);
1568
+ expect(retrievedBlock).toBeDefined();
1569
+ expectCheckpointedBlockEquals(retrievedBlock!, expectedBlock, expectedCheckpoint);
1570
+ });
116
1571
 
117
- await store.unwindBlocks(blockNumber, 1);
1572
+ it('returns undefined if block is not found', async () => {
1573
+ await expect(store.getCheckpointedBlock(12)).resolves.toBeUndefined();
1574
+ });
118
1575
 
119
- expect(await store.getSynchedL2BlockNumber()).toBe(blockNumber - 1);
120
- expect(await store.getPublishedBlocks(blockNumber, 1)).toEqual([]);
1576
+ it('returns undefined for block number 0', async () => {
1577
+ await expect(store.getCheckpointedBlock(0)).resolves.toBeUndefined();
121
1578
  });
1579
+ });
122
1580
 
123
- it('can unwind multiple empty blocks', async () => {
124
- const emptyBlocks = await timesParallel(10, async i => makePublished(await L2Block.random(i + 1, 0), i + 10));
125
- await store.addBlocks(emptyBlocks);
126
- expect(await store.getSynchedL2BlockNumber()).toBe(10);
1581
+ describe('getCheckpointedBlockByHash', () => {
1582
+ beforeEach(async () => {
1583
+ await store.addCheckpoints(publishedCheckpoints);
1584
+ });
127
1585
 
128
- await store.unwindBlocks(10, 3);
129
- expect(await store.getSynchedL2BlockNumber()).toBe(7);
130
- expect((await store.getPublishedBlocks(1, 10)).map(b => b.block.number)).toEqual([1, 2, 3, 4, 5, 6, 7]);
1586
+ it('retrieves a block by its hash', async () => {
1587
+ const expectedCheckpoint = publishedCheckpoints[5];
1588
+ const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
1589
+ const blockHash = await expectedBlock.header.hash();
1590
+ const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
1591
+
1592
+ expect(retrievedBlock).toBeDefined();
1593
+ expectCheckpointedBlockEquals(retrievedBlock!, expectedBlock, expectedCheckpoint);
131
1594
  });
132
1595
 
133
- it('refuses to unwind blocks if the tip is not the last block', async () => {
134
- await store.addBlocks(blocks);
135
- await expect(store.unwindBlocks(5, 1)).rejects.toThrow(/can only unwind blocks from the tip/i);
1596
+ it('returns undefined for non-existent block hash', async () => {
1597
+ const nonExistentHash = Fr.random();
1598
+ await expect(store.getCheckpointedBlockByHash(nonExistentHash)).resolves.toBeUndefined();
136
1599
  });
137
1600
  });
138
1601
 
139
- describe('getBlocks', () => {
1602
+ describe('getCheckpointedBlockByArchive', () => {
140
1603
  beforeEach(async () => {
141
- await store.addBlocks(blocks);
1604
+ await store.addCheckpoints(publishedCheckpoints);
1605
+ });
1606
+
1607
+ it('retrieves a block by its archive root', async () => {
1608
+ const expectedCheckpoint = publishedCheckpoints[3];
1609
+ const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
1610
+ const archive = expectedBlock.archive.root;
1611
+ const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
1612
+
1613
+ expect(retrievedBlock).toBeDefined();
1614
+ expectCheckpointedBlockEquals(retrievedBlock!, expectedBlock, expectedCheckpoint);
142
1615
  });
143
1616
 
144
- it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => {
145
- expectBlocksEqual(await store.getPublishedBlocks(start, limit), getExpectedBlocks());
1617
+ it('returns undefined for non-existent archive root', async () => {
1618
+ const nonExistentArchive = Fr.random();
1619
+ await expect(store.getCheckpointedBlockByArchive(nonExistentArchive)).resolves.toBeUndefined();
146
1620
  });
1621
+ });
147
1622
 
148
- it('returns an empty array if no blocks are found', async () => {
149
- await expect(store.getPublishedBlocks(12, 1)).resolves.toEqual([]);
1623
+ describe('getBlockHeaderByHash', () => {
1624
+ beforeEach(async () => {
1625
+ await store.addCheckpoints(publishedCheckpoints);
150
1626
  });
151
1627
 
152
- it('throws an error if limit is invalid', async () => {
153
- await expect(store.getPublishedBlocks(1, 0)).rejects.toThrow('Invalid limit: 0');
1628
+ it('retrieves a block header by its hash', async () => {
1629
+ const expectedBlock = publishedCheckpoints[7].checkpoint.blocks[0];
1630
+ const blockHash = await expectedBlock.header.hash();
1631
+ const retrievedHeader = await store.getBlockHeaderByHash(blockHash);
1632
+
1633
+ expect(retrievedHeader).toBeDefined();
1634
+ expect(retrievedHeader!.equals(expectedBlock.header)).toBe(true);
154
1635
  });
155
1636
 
156
- it('throws an error if `from` it is out of range', async () => {
157
- await expect(store.getPublishedBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).rejects.toThrow('Invalid start: -99');
1637
+ it('returns undefined for non-existent block hash', async () => {
1638
+ const nonExistentHash = Fr.random();
1639
+ await expect(store.getBlockHeaderByHash(nonExistentHash)).resolves.toBeUndefined();
158
1640
  });
1641
+ });
159
1642
 
160
- it('throws an error if unexpected initial block number is found', async () => {
161
- await store.addBlocks([makePublished(await L2Block.random(21), 31)], { force: true });
162
- await expect(store.getPublishedBlocks(20, 1)).rejects.toThrow(`mismatch`);
1643
+ describe('getBlockHeaderByArchive', () => {
1644
+ beforeEach(async () => {
1645
+ await store.addCheckpoints(publishedCheckpoints);
163
1646
  });
164
1647
 
165
- it('throws an error if a gap is found', async () => {
166
- await store.addBlocks(
167
- [makePublished(await L2Block.random(20), 30), makePublished(await L2Block.random(22), 32)],
168
- { force: true },
169
- );
170
- await expect(store.getPublishedBlocks(20, 2)).rejects.toThrow(`mismatch`);
1648
+ it('retrieves a block header by its archive root', async () => {
1649
+ const expectedBlock = publishedCheckpoints[2].checkpoint.blocks[0];
1650
+ const archive = expectedBlock.archive.root;
1651
+ const retrievedHeader = await store.getBlockHeaderByArchive(archive);
1652
+
1653
+ expect(retrievedHeader).toBeDefined();
1654
+ expect(retrievedHeader!.equals(expectedBlock.header)).toBe(true);
1655
+ });
1656
+
1657
+ it('returns undefined for non-existent archive root', async () => {
1658
+ const nonExistentArchive = Fr.random();
1659
+ await expect(store.getBlockHeaderByArchive(nonExistentArchive)).resolves.toBeUndefined();
171
1660
  });
172
1661
  });
173
1662
 
174
- describe('getSyncedL2BlockNumber', () => {
175
- it('returns the block number before INITIAL_L2_BLOCK_NUM if no blocks have been added', async () => {
176
- await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(INITIAL_L2_BLOCK_NUM - 1);
1663
+ describe('getSynchedCheckpointNumber', () => {
1664
+ it('returns the checkpoint number before INITIAL_CHECKPOINT_NUMBER if no checkpoints have been added', async () => {
1665
+ await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(INITIAL_CHECKPOINT_NUMBER - 1);
177
1666
  });
178
1667
 
179
- it("returns the most recently added block's number", async () => {
180
- await store.addBlocks(blocks);
181
- await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.at(-1)!.block.number);
1668
+ it('returns the most recently added checkpoint number', async () => {
1669
+ await store.addCheckpoints(publishedCheckpoints);
1670
+ await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(
1671
+ publishedCheckpoints.at(-1)!.checkpoint.number,
1672
+ );
182
1673
  });
183
1674
  });
184
1675
 
@@ -191,7 +1682,7 @@ export function describeArchiverDataStore(
191
1682
  });
192
1683
 
193
1684
  it('returns the L1 block number in which the most recent L2 block was published', async () => {
194
- await store.addBlocks(blocks);
1685
+ await store.addCheckpoints(publishedCheckpoints);
195
1686
  await expect(store.getSynchPoint()).resolves.toEqual({
196
1687
  blocksSynchedTo: 19n,
197
1688
  messagesSynchedTo: undefined,
@@ -224,61 +1715,47 @@ export function describeArchiverDataStore(
224
1715
 
225
1716
  describe('addLogs', () => {
226
1717
  it('adds private & public logs', async () => {
227
- const block = blocks[0].block;
228
- await expect(store.addLogs([block])).resolves.toEqual(true);
1718
+ const checkpoint = publishedCheckpoints[0];
1719
+ await store.addCheckpoints([checkpoint]);
1720
+ await expect(store.addLogs(checkpoint.checkpoint.blocks)).resolves.toEqual(true);
229
1721
  });
230
1722
  });
231
1723
 
232
- describe('deleteLogs', () => {
233
- it('deletes private & public logs', async () => {
234
- const block = blocks[0].block;
235
- await store.addBlocks([blocks[0]]);
236
- await expect(store.addLogs([block])).resolves.toEqual(true);
237
-
238
- expect((await store.getPrivateLogs(1, 1)).length).toEqual(
239
- block.body.txEffects.map(txEffect => txEffect.privateLogs).flat().length,
240
- );
241
- expect((await store.getPublicLogs({ fromBlock: 1 })).logs.length).toEqual(
242
- block.body.txEffects.map(txEffect => txEffect.publicLogs).flat().length,
243
- );
244
-
245
- // This one is a pain for memory as we would never want to just delete memory in the middle.
246
- await store.deleteLogs([block]);
1724
+ it('deleteLogs', async () => {
1725
+ const block = publishedCheckpoints[0].checkpoint.blocks[0];
1726
+ await store.addBlocks([block]);
1727
+ await expect(store.addLogs([block])).resolves.toEqual(true);
247
1728
 
248
- expect((await store.getPrivateLogs(1, 1)).length).toEqual(0);
249
- expect((await store.getPublicLogs({ fromBlock: 1 })).logs.length).toEqual(0);
250
- });
251
- });
1729
+ expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual(
1730
+ block.body.txEffects.map(txEffect => txEffect.publicLogs).flat().length,
1731
+ );
252
1732
 
253
- describe('getPrivateLogs', () => {
254
- it('gets added private logs', async () => {
255
- const block = blocks[0].block;
256
- await store.addBlocks([blocks[0]]);
257
- await store.addLogs([block]);
1733
+ // This one is a pain for memory as we would never want to just delete memory in the middle.
1734
+ await store.deleteLogs([block]);
258
1735
 
259
- const privateLogs = await store.getPrivateLogs(1, 1);
260
- expect(privateLogs).toEqual(block.body.txEffects.map(txEffect => txEffect.privateLogs).flat());
261
- });
1736
+ expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual(0);
262
1737
  });
263
1738
 
264
1739
  describe('getTxEffect', () => {
1740
+ const getBlock = (i: number) => publishedCheckpoints[i].checkpoint.blocks[0];
1741
+
265
1742
  beforeEach(async () => {
266
- await store.addLogs(blocks.map(b => b.block));
267
- await store.addBlocks(blocks);
1743
+ await store.addLogs(publishedCheckpoints.flatMap(x => x.checkpoint.blocks));
1744
+ await store.addCheckpoints(publishedCheckpoints);
268
1745
  });
269
1746
 
270
1747
  it.each([
271
- () => ({ data: blocks[0].block.body.txEffects[0], block: blocks[0].block, txIndexInBlock: 0 }),
272
- () => ({ data: blocks[9].block.body.txEffects[3], block: blocks[9].block, txIndexInBlock: 3 }),
273
- () => ({ data: blocks[3].block.body.txEffects[1], block: blocks[3].block, txIndexInBlock: 1 }),
274
- () => ({ data: blocks[5].block.body.txEffects[2], block: blocks[5].block, txIndexInBlock: 2 }),
275
- () => ({ data: blocks[1].block.body.txEffects[0], block: blocks[1].block, txIndexInBlock: 0 }),
1748
+ () => ({ data: getBlock(0).body.txEffects[0], block: getBlock(0), txIndexInBlock: 0 }),
1749
+ () => ({ data: getBlock(9).body.txEffects[3], block: getBlock(9), txIndexInBlock: 3 }),
1750
+ () => ({ data: getBlock(3).body.txEffects[1], block: getBlock(3), txIndexInBlock: 1 }),
1751
+ () => ({ data: getBlock(5).body.txEffects[2], block: getBlock(5), txIndexInBlock: 2 }),
1752
+ () => ({ data: getBlock(1).body.txEffects[0], block: getBlock(1), txIndexInBlock: 0 }),
276
1753
  ])('retrieves a previously stored transaction', async getExpectedTx => {
277
1754
  const { data, block, txIndexInBlock } = getExpectedTx();
278
1755
  const expectedTx: IndexedTxEffect = {
279
1756
  data,
280
1757
  l2BlockNumber: block.number,
281
- l2BlockHash: L2BlockHash.fromField(await block.hash()),
1758
+ l2BlockHash: L2BlockHash.fromField(await block.header.hash()),
282
1759
  txIndexInBlock,
283
1760
  };
284
1761
  const actualTx = await store.getTxEffect(data.txHash);
@@ -290,16 +1767,16 @@ export function describeArchiverDataStore(
290
1767
  });
291
1768
 
292
1769
  it.each([
293
- () => wrapInBlock(blocks[0].block.body.txEffects[0], blocks[0].block),
294
- () => wrapInBlock(blocks[9].block.body.txEffects[3], blocks[9].block),
295
- () => wrapInBlock(blocks[3].block.body.txEffects[1], blocks[3].block),
296
- () => wrapInBlock(blocks[5].block.body.txEffects[2], blocks[5].block),
297
- () => wrapInBlock(blocks[1].block.body.txEffects[0], blocks[1].block),
298
- ])('tries to retrieves a previously stored transaction after deleted', async getExpectedTx => {
299
- await store.unwindBlocks(blocks.length, blocks.length);
300
-
301
- const expectedTx = await getExpectedTx();
302
- const actualTx = await store.getTxEffect(expectedTx.data.txHash);
1770
+ () => getBlock(0).body.txEffects[0],
1771
+ () => getBlock(9).body.txEffects[3],
1772
+ () => getBlock(3).body.txEffects[1],
1773
+ () => getBlock(5).body.txEffects[2],
1774
+ () => getBlock(1).body.txEffects[0],
1775
+ ])('tries to retrieves a previously stored transaction after deleted', async getTxEffect => {
1776
+ await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
1777
+
1778
+ const txEffect = getTxEffect();
1779
+ const actualTx = await store.getTxEffect(txEffect.txHash);
303
1780
  expect(actualTx).toEqual(undefined);
304
1781
  });
305
1782
 
@@ -308,22 +1785,22 @@ export function describeArchiverDataStore(
308
1785
  });
309
1786
 
310
1787
  it('does not fail if the block is unwound while requesting a tx', async () => {
311
- const expectedTx = await wrapInBlock(blocks[1].block.body.txEffects[0], blocks[1].block);
1788
+ const txEffect = getBlock(1).body.txEffects[0];
312
1789
  let done = false;
313
1790
  void (async () => {
314
1791
  while (!done) {
315
- void store.getTxEffect(expectedTx.data.txHash);
1792
+ void store.getTxEffect(txEffect.txHash);
316
1793
  await sleep(1);
317
1794
  }
318
1795
  })();
319
- await store.unwindBlocks(blocks.length, blocks.length);
1796
+ await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
320
1797
  done = true;
321
- expect(await store.getTxEffect(expectedTx.data.txHash)).toEqual(undefined);
1798
+ expect(await store.getTxEffect(txEffect.txHash)).toEqual(undefined);
322
1799
  });
323
1800
  });
324
1801
 
325
1802
  describe('L1 to L2 Messages', () => {
326
- const initialL2BlockNumber = 13;
1803
+ const initialCheckpointNumber = CheckpointNumber(13);
327
1804
 
328
1805
  const checkMessages = async (msgs: InboxMessage[]) => {
329
1806
  expect(await store.getLastL1ToL2Message()).toEqual(msgs.at(-1));
@@ -331,43 +1808,50 @@ export function describeArchiverDataStore(
331
1808
  expect(await store.getTotalL1ToL2MessageCount()).toEqual(BigInt(msgs.length));
332
1809
  };
333
1810
 
334
- const makeInboxMessagesWithFullBlocks = (blockCount: number, opts: { initialL2BlockNumber?: number } = {}) =>
1811
+ const makeInboxMessagesWithFullBlocks = (
1812
+ blockCount: number,
1813
+ opts: { initialCheckpointNumber?: CheckpointNumber } = {},
1814
+ ) =>
335
1815
  makeInboxMessages(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * blockCount, {
336
1816
  overrideFn: (msg, i) => {
337
- const l2BlockNumber =
338
- (opts.initialL2BlockNumber ?? initialL2BlockNumber) + Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1817
+ const checkpointNumber = CheckpointNumber(
1818
+ (opts.initialCheckpointNumber ?? initialCheckpointNumber) +
1819
+ Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
1820
+ );
339
1821
  const index =
340
- InboxLeaf.smallestIndexFromL2Block(l2BlockNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
341
- return { ...msg, l2BlockNumber, index };
1822
+ InboxLeaf.smallestIndexForCheckpoint(checkpointNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1823
+ return { ...msg, checkpointNumber, index };
342
1824
  },
343
1825
  });
344
1826
 
345
1827
  it('stores first message ever', async () => {
346
- const msg = makeInboxMessage(Buffer16.ZERO, { index: 0n, l2BlockNumber: 1 });
1828
+ const msg = makeInboxMessage(Buffer16.ZERO, { index: 0n, checkpointNumber: CheckpointNumber(1) });
347
1829
  await store.addL1ToL2Messages([msg]);
348
1830
 
349
1831
  await checkMessages([msg]);
350
- expect(await store.getL1ToL2Messages(1)).toEqual([msg.leaf]);
1832
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([msg.leaf]);
351
1833
  });
352
1834
 
353
1835
  it('stores single message', async () => {
354
- const msg = makeInboxMessage(Buffer16.ZERO, { l2BlockNumber: 2 });
1836
+ const msg = makeInboxMessage(Buffer16.ZERO, { checkpointNumber: CheckpointNumber(2) });
355
1837
  await store.addL1ToL2Messages([msg]);
356
1838
 
357
1839
  await checkMessages([msg]);
358
- expect(await store.getL1ToL2Messages(2)).toEqual([msg.leaf]);
1840
+ expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toEqual([msg.leaf]);
359
1841
  });
360
1842
 
361
1843
  it('stores and returns messages across different blocks', async () => {
362
- const msgs = makeInboxMessages(5, { initialL2BlockNumber });
1844
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber });
363
1845
  await store.addL1ToL2Messages(msgs);
364
1846
 
365
1847
  await checkMessages(msgs);
366
- expect(await store.getL1ToL2Messages(initialL2BlockNumber + 2)).toEqual([msgs[2]].map(m => m.leaf));
1848
+ expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual(
1849
+ [msgs[2]].map(m => m.leaf),
1850
+ );
367
1851
  });
368
1852
 
369
1853
  it('stores the same messages again', async () => {
370
- const msgs = makeInboxMessages(5, { initialL2BlockNumber });
1854
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber });
371
1855
  await store.addL1ToL2Messages(msgs);
372
1856
  await store.addL1ToL2Messages(msgs.slice(2));
373
1857
 
@@ -375,26 +1859,29 @@ export function describeArchiverDataStore(
375
1859
  });
376
1860
 
377
1861
  it('stores and returns messages across different blocks with gaps', async () => {
378
- const msgs1 = makeInboxMessages(3, { initialL2BlockNumber: 1 });
379
- const msgs2 = makeInboxMessages(3, { initialL2BlockNumber: 20, initialHash: msgs1.at(-1)!.rollingHash });
1862
+ const msgs1 = makeInboxMessages(3, { initialCheckpointNumber: CheckpointNumber(1) });
1863
+ const msgs2 = makeInboxMessages(3, {
1864
+ initialCheckpointNumber: CheckpointNumber(20),
1865
+ initialHash: msgs1.at(-1)!.rollingHash,
1866
+ });
380
1867
 
381
1868
  await store.addL1ToL2Messages(msgs1);
382
1869
  await store.addL1ToL2Messages(msgs2);
383
1870
 
384
1871
  await checkMessages([...msgs1, ...msgs2]);
385
1872
 
386
- expect(await store.getL1ToL2Messages(1)).toEqual([msgs1[0].leaf]);
387
- expect(await store.getL1ToL2Messages(4)).toEqual([]);
388
- expect(await store.getL1ToL2Messages(20)).toEqual([msgs2[0].leaf]);
389
- expect(await store.getL1ToL2Messages(24)).toEqual([]);
1873
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([msgs1[0].leaf]);
1874
+ expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toEqual([]);
1875
+ expect(await store.getL1ToL2Messages(CheckpointNumber(20))).toEqual([msgs2[0].leaf]);
1876
+ expect(await store.getL1ToL2Messages(CheckpointNumber(24))).toEqual([]);
390
1877
  });
391
1878
 
392
1879
  it('stores and returns messages with block numbers larger than a byte', async () => {
393
- const msgs = makeInboxMessages(5, { initialL2BlockNumber: 1000 });
1880
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber: CheckpointNumber(1000) });
394
1881
  await store.addL1ToL2Messages(msgs);
395
1882
 
396
1883
  await checkMessages(msgs);
397
- expect(await store.getL1ToL2Messages(1002)).toEqual([msgs[2]].map(m => m.leaf));
1884
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1002))).toEqual([msgs[2]].map(m => m.leaf));
398
1885
  });
399
1886
 
400
1887
  it('stores and returns multiple messages per block', async () => {
@@ -402,7 +1889,7 @@ export function describeArchiverDataStore(
402
1889
  await store.addL1ToL2Messages(msgs);
403
1890
 
404
1891
  await checkMessages(msgs);
405
- const blockMessages = await store.getL1ToL2Messages(initialL2BlockNumber + 1);
1892
+ const blockMessages = await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 1));
406
1893
  expect(blockMessages).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
407
1894
  expect(blockMessages).toEqual(
408
1895
  msgs.slice(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2).map(m => m.leaf),
@@ -410,17 +1897,21 @@ export function describeArchiverDataStore(
410
1897
  });
411
1898
 
412
1899
  it('stores messages in multiple operations', async () => {
413
- const msgs = makeInboxMessages(20, { initialL2BlockNumber });
1900
+ const msgs = makeInboxMessages(20, { initialCheckpointNumber });
414
1901
  await store.addL1ToL2Messages(msgs.slice(0, 10));
415
1902
  await store.addL1ToL2Messages(msgs.slice(10, 20));
416
1903
 
417
- expect(await store.getL1ToL2Messages(initialL2BlockNumber + 2)).toEqual([msgs[2]].map(m => m.leaf));
418
- expect(await store.getL1ToL2Messages(initialL2BlockNumber + 12)).toEqual([msgs[12]].map(m => m.leaf));
1904
+ expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual(
1905
+ [msgs[2]].map(m => m.leaf),
1906
+ );
1907
+ expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 12))).toEqual(
1908
+ [msgs[12]].map(m => m.leaf),
1909
+ );
419
1910
  await checkMessages(msgs);
420
1911
  });
421
1912
 
422
1913
  it('iterates over messages from start index', async () => {
423
- const msgs = makeInboxMessages(10, { initialL2BlockNumber });
1914
+ const msgs = makeInboxMessages(10, { initialCheckpointNumber });
424
1915
  await store.addL1ToL2Messages(msgs);
425
1916
 
426
1917
  const iterated = await toArray(store.iterateL1ToL2Messages({ start: msgs[3].index }));
@@ -428,8 +1919,9 @@ export function describeArchiverDataStore(
428
1919
  });
429
1920
 
430
1921
  it('iterates over messages in reverse', async () => {
431
- const msgs = makeInboxMessages(10, { initialL2BlockNumber });
1922
+ const msgs = makeInboxMessages(10, { initialCheckpointNumber });
432
1923
  await store.addL1ToL2Messages(msgs);
1924
+ initialCheckpointNumber;
433
1925
 
434
1926
  const iterated = await toArray(store.iterateL1ToL2Messages({ reverse: true, end: msgs[3].index }));
435
1927
  expect(iterated).toEqual(msgs.slice(0, 4).reverse());
@@ -441,8 +1933,8 @@ export function describeArchiverDataStore(
441
1933
  });
442
1934
 
443
1935
  it('throws if block number for the first message is out of order', async () => {
444
- const msgs = makeInboxMessages(4, { initialL2BlockNumber });
445
- msgs[2].l2BlockNumber = initialL2BlockNumber - 1;
1936
+ const msgs = makeInboxMessages(4, { initialCheckpointNumber });
1937
+ msgs[2].checkpointNumber = CheckpointNumber(initialCheckpointNumber - 1);
446
1938
  await store.addL1ToL2Messages(msgs.slice(0, 2));
447
1939
  await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
448
1940
  });
@@ -456,28 +1948,28 @@ export function describeArchiverDataStore(
456
1948
  it('throws if rolling hash for first message is not correct', async () => {
457
1949
  const msgs = makeInboxMessages(4);
458
1950
  msgs[2].rollingHash = Buffer16.random();
459
- await store.addL1ToL2Messages(msgs.slice(0, 2));
1951
+ await store.addL1ToL2Messages(msgs.slice(0, CheckpointNumber(2)));
460
1952
  await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
461
1953
  });
462
1954
 
463
1955
  it('throws if index is not in the correct range', async () => {
464
- const msgs = makeInboxMessages(5, { initialL2BlockNumber });
1956
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber });
465
1957
  msgs.at(-1)!.index += 100n;
466
1958
  await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
467
1959
  });
468
1960
 
469
1961
  it('throws if first index in block has gaps', async () => {
470
- const msgs = makeInboxMessages(4, { initialL2BlockNumber });
1962
+ const msgs = makeInboxMessages(4, { initialCheckpointNumber });
471
1963
  msgs[2].index++;
472
1964
  await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
473
1965
  });
474
1966
 
475
1967
  it('throws if index does not follow previous one', async () => {
476
1968
  const msgs = makeInboxMessages(2, {
477
- initialL2BlockNumber,
1969
+ initialCheckpointNumber,
478
1970
  overrideFn: (msg, i) => ({
479
1971
  ...msg,
480
- l2BlockNumber: 2,
1972
+ checkpointNumber: CheckpointNumber(2),
481
1973
  index: BigInt(i + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2),
482
1974
  }),
483
1975
  });
@@ -486,28 +1978,28 @@ export function describeArchiverDataStore(
486
1978
  });
487
1979
 
488
1980
  it('removes messages up to the given block number', async () => {
489
- const msgs = makeInboxMessagesWithFullBlocks(4, { initialL2BlockNumber: 1 });
1981
+ const msgs = makeInboxMessagesWithFullBlocks(4, { initialCheckpointNumber: CheckpointNumber(1) });
490
1982
 
491
1983
  await store.addL1ToL2Messages(msgs);
492
1984
  await checkMessages(msgs);
493
1985
 
494
- expect(await store.getL1ToL2Messages(1)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
495
- expect(await store.getL1ToL2Messages(2)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
496
- expect(await store.getL1ToL2Messages(3)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
497
- expect(await store.getL1ToL2Messages(4)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1986
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1987
+ expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1988
+ expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1989
+ expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
498
1990
 
499
- await store.rollbackL1ToL2MessagesToL2Block(2);
1991
+ await store.rollbackL1ToL2MessagesToCheckpoint(CheckpointNumber(2));
500
1992
 
501
- expect(await store.getL1ToL2Messages(1)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
502
- expect(await store.getL1ToL2Messages(2)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
503
- expect(await store.getL1ToL2Messages(3)).toHaveLength(0);
504
- expect(await store.getL1ToL2Messages(4)).toHaveLength(0);
1993
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1994
+ expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1995
+ expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(0);
1996
+ expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(0);
505
1997
 
506
1998
  await checkMessages(msgs.slice(0, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2));
507
1999
  });
508
2000
 
509
2001
  it('removes messages starting with the given index', async () => {
510
- const msgs = makeInboxMessagesWithFullBlocks(4, { initialL2BlockNumber: 1 });
2002
+ const msgs = makeInboxMessagesWithFullBlocks(4, { initialCheckpointNumber: CheckpointNumber(1) });
511
2003
  await store.addL1ToL2Messages(msgs);
512
2004
 
513
2005
  await store.removeL1ToL2Messages(msgs[13].index);
@@ -527,7 +2019,7 @@ export function describeArchiverDataStore(
527
2019
  originalContractClassId: classId,
528
2020
  });
529
2021
  contractInstance = { ...randomInstance, address: await AztecAddress.random() };
530
- await store.addContractInstances([contractInstance], blockNum);
2022
+ await store.addContractInstances([contractInstance], BlockNumber(blockNum));
531
2023
  });
532
2024
 
533
2025
  it('returns previously stored contract instances', async () => {
@@ -541,7 +2033,7 @@ export function describeArchiverDataStore(
541
2033
  });
542
2034
 
543
2035
  it('returns undefined if previously stored contract instances was deleted', async () => {
544
- await store.deleteContractInstances([contractInstance], blockNum);
2036
+ await store.deleteContractInstances([contractInstance], BlockNumber(blockNum));
545
2037
  await expect(store.getContractInstance(contractInstance.address, timestamp)).resolves.toBeUndefined();
546
2038
  });
547
2039
  });
@@ -560,7 +2052,7 @@ export function describeArchiverDataStore(
560
2052
  originalContractClassId: classId,
561
2053
  });
562
2054
  contractInstance = { ...randomInstance, address: await AztecAddress.random() };
563
- await store.addContractInstances([contractInstance], 1);
2055
+ await store.addContractInstances([contractInstance], BlockNumber(1));
564
2056
  await store.addContractInstanceUpdates(
565
2057
  [
566
2058
  {
@@ -602,7 +2094,7 @@ export function describeArchiverDataStore(
602
2094
  ...randomInstance,
603
2095
  address: await AztecAddress.random(),
604
2096
  };
605
- await store.addContractInstances([otherContractInstance], 1);
2097
+ await store.addContractInstances([otherContractInstance], BlockNumber(1));
606
2098
 
607
2099
  const fetchedInstance = await store.getContractInstance(otherContractInstance.address, timestampOfChange + 1n);
608
2100
  expect(fetchedInstance?.originalContractClassId).toEqual(otherClassId);
@@ -620,7 +2112,7 @@ export function describeArchiverDataStore(
620
2112
  ...randomInstance,
621
2113
  address: await AztecAddress.random(),
622
2114
  };
623
- await store.addContractInstances([otherContractInstance], 1);
2115
+ await store.addContractInstances([otherContractInstance], BlockNumber(1));
624
2116
  await store.addContractInstanceUpdates(
625
2117
  [
626
2118
  {
@@ -648,7 +2140,7 @@ export function describeArchiverDataStore(
648
2140
  await store.addContractClasses(
649
2141
  [contractClass],
650
2142
  [await computePublicBytecodeCommitment(contractClass.packedBytecode)],
651
- blockNum,
2143
+ BlockNumber(blockNum),
652
2144
  );
653
2145
  });
654
2146
 
@@ -657,7 +2149,7 @@ export function describeArchiverDataStore(
657
2149
  });
658
2150
 
659
2151
  it('returns undefined if the initial deployed contract class was deleted', async () => {
660
- await store.deleteContractClasses([contractClass], blockNum);
2152
+ await store.deleteContractClasses([contractClass], BlockNumber(blockNum));
661
2153
  await expect(store.getContractClass(contractClass.id)).resolves.toBeUndefined();
662
2154
  });
663
2155
 
@@ -665,9 +2157,9 @@ export function describeArchiverDataStore(
665
2157
  await store.addContractClasses(
666
2158
  [contractClass],
667
2159
  [await computePublicBytecodeCommitment(contractClass.packedBytecode)],
668
- blockNum + 1,
2160
+ BlockNumber(blockNum + 1),
669
2161
  );
670
- await store.deleteContractClasses([contractClass], blockNum + 1);
2162
+ await store.deleteContractClasses([contractClass], BlockNumber(blockNum + 1));
671
2163
  await expect(store.getContractClass(contractClass.id)).resolves.toMatchObject(contractClass);
672
2164
  });
673
2165
 
@@ -706,154 +2198,269 @@ export function describeArchiverDataStore(
706
2198
  });
707
2199
  });
708
2200
 
709
- describe('getLogsByTags', () => {
710
- const numBlocks = 3;
2201
+ describe('getPrivateLogsByTags', () => {
2202
+ const numBlocksForLogs = 3;
711
2203
  const numTxsPerBlock = 4;
712
2204
  const numPrivateLogsPerTx = 3;
713
- const numPublicLogsPerTx = 2;
714
2205
 
715
- let blocks: PublishedL2Block[];
2206
+ let logsCheckpoints: PublishedCheckpoint[];
716
2207
 
717
- const makeTag = (blockNumber: number, txIndex: number, logIndex: number, isPublic = false) =>
718
- blockNumber === 1 && txIndex === 0 && logIndex === 0
719
- ? Fr.ZERO // Shared tag
720
- : new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * (isPublic ? 123 : 1));
2208
+ const makePrivateLogTag = (blockNumber: number, txIndex: number, logIndex: number): SiloedTag =>
2209
+ new SiloedTag(
2210
+ blockNumber === 1 && txIndex === 0 && logIndex === 0
2211
+ ? Fr.ZERO // Shared tag
2212
+ : new Fr(blockNumber * 100 + txIndex * 10 + logIndex),
2213
+ );
721
2214
 
722
- const makePrivateLog = (tag: Fr) =>
2215
+ const makePrivateLog = (tag: SiloedTag) =>
723
2216
  PrivateLog.from({
724
- fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, i => (!i ? tag : new Fr(tag.toNumber() + i))),
2217
+ fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, i =>
2218
+ !i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)),
2219
+ ),
725
2220
  emittedLength: PRIVATE_LOG_SIZE_IN_FIELDS,
726
2221
  });
727
2222
 
728
- const makePublicLog = (tag: Fr) =>
729
- PublicLog.from({
730
- contractAddress: AztecAddress.fromNumber(1),
731
- fields: makeTuple(PUBLIC_LOG_SIZE_IN_FIELDS, i => (!i ? tag : new Fr(tag.toNumber() + i))),
732
- emittedLength: PUBLIC_LOG_SIZE_IN_FIELDS,
733
- });
734
-
735
2223
  const mockPrivateLogs = (blockNumber: number, txIndex: number) => {
736
2224
  return times(numPrivateLogsPerTx, (logIndex: number) => {
737
- const tag = makeTag(blockNumber, txIndex, logIndex);
2225
+ const tag = makePrivateLogTag(blockNumber, txIndex, logIndex);
738
2226
  return makePrivateLog(tag);
739
2227
  });
740
2228
  };
741
2229
 
742
- const mockPublicLogs = (blockNumber: number, txIndex: number) => {
743
- return times(numPublicLogsPerTx, (logIndex: number) => {
744
- const tag = makeTag(blockNumber, txIndex, logIndex, /* isPublic */ true);
745
- return makePublicLog(tag);
2230
+ const mockCheckpointWithLogs = async (
2231
+ blockNumber: number,
2232
+ previousArchive?: AppendOnlyTreeSnapshot,
2233
+ ): Promise<PublishedCheckpoint> => {
2234
+ const block = await L2BlockNew.random(BlockNumber(blockNumber), {
2235
+ checkpointNumber: CheckpointNumber(blockNumber),
2236
+ indexWithinCheckpoint: 0,
2237
+ state: makeStateForBlock(blockNumber, numTxsPerBlock),
2238
+ ...(previousArchive ? { lastArchive: previousArchive } : {}),
746
2239
  });
747
- };
748
-
749
- const mockBlockWithLogs = async (blockNumber: number): Promise<PublishedL2Block> => {
750
- const block = await L2Block.random(blockNumber);
751
- block.header.globalVariables.blockNumber = blockNumber;
2240
+ block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
752
2241
 
753
2242
  block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => {
754
2243
  const txEffect = await TxEffect.random();
755
2244
  txEffect.privateLogs = mockPrivateLogs(blockNumber, txIndex);
756
- txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex);
2245
+ txEffect.publicLogs = []; // No public logs needed for private log tests
757
2246
  return txEffect;
758
2247
  });
759
2248
 
760
- return {
761
- block: block,
762
- attestations: times(3, CommitteeAttestation.random),
763
- l1: {
764
- blockNumber: BigInt(blockNumber),
765
- blockHash: makeBlockHash(blockNumber),
766
- timestamp: BigInt(blockNumber),
767
- },
768
- };
2249
+ const checkpoint = new Checkpoint(
2250
+ AppendOnlyTreeSnapshot.random(),
2251
+ CheckpointHeader.random(),
2252
+ [block],
2253
+ CheckpointNumber(blockNumber),
2254
+ );
2255
+ return makePublishedCheckpoint(checkpoint, blockNumber);
769
2256
  };
770
2257
 
771
2258
  beforeEach(async () => {
772
- blocks = await timesParallel(numBlocks, (index: number) => mockBlockWithLogs(index + 1));
2259
+ // Create checkpoints sequentially to chain archive roots
2260
+ logsCheckpoints = [];
2261
+ for (let i = 0; i < numBlocksForLogs; i++) {
2262
+ const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
2263
+ logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
2264
+ }
773
2265
 
774
- await store.addBlocks(blocks);
775
- await store.addLogs(blocks.map(b => b.block));
2266
+ await store.addCheckpoints(logsCheckpoints);
2267
+ await store.addLogs(logsCheckpoints.flatMap(p => p.checkpoint.blocks));
776
2268
  });
777
2269
 
778
2270
  it('is possible to batch request private logs via tags', async () => {
779
- const tags = [makeTag(2, 1, 2), makeTag(1, 2, 0)];
2271
+ const tags = [makePrivateLogTag(2, 1, 2), makePrivateLogTag(1, 2, 0)];
780
2272
 
781
- const logsByTags = await store.getLogsByTags(tags);
2273
+ const logsByTags = await store.getPrivateLogsByTags(tags);
782
2274
 
783
2275
  expect(logsByTags).toEqual([
784
2276
  [
785
2277
  expect.objectContaining({
786
2278
  blockNumber: 2,
787
- log: makePrivateLog(tags[0]),
788
- isFromPublic: false,
2279
+ logData: makePrivateLog(tags[0]).getEmittedFields(),
789
2280
  }),
790
2281
  ],
791
2282
  [
792
2283
  expect.objectContaining({
793
2284
  blockNumber: 1,
794
- log: makePrivateLog(tags[1]),
795
- isFromPublic: false,
2285
+ logData: makePrivateLog(tags[1]).getEmittedFields(),
2286
+ }),
2287
+ ],
2288
+ ]);
2289
+ });
2290
+
2291
+ it('is possible to batch request logs that have the same tag but different content', async () => {
2292
+ const tags = [makePrivateLogTag(1, 2, 1)];
2293
+
2294
+ // Create a checkpoint containing logs that have the same tag as the checkpoints before.
2295
+ // Chain from the last checkpoint's archive
2296
+ const newBlockNumber = numBlocksForLogs + 1;
2297
+ const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
2298
+ const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
2299
+ const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1];
2300
+ newLog.fields[0] = tags[0].value;
2301
+ newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1] = newLog;
2302
+ await store.addCheckpoints([newCheckpoint]);
2303
+ await store.addLogs([newCheckpoint.checkpoint.blocks[0]]);
2304
+
2305
+ const logsByTags = await store.getPrivateLogsByTags(tags);
2306
+
2307
+ expect(logsByTags).toEqual([
2308
+ [
2309
+ expect.objectContaining({
2310
+ blockNumber: 1,
2311
+ logData: makePrivateLog(tags[0]).getEmittedFields(),
2312
+ }),
2313
+ expect.objectContaining({
2314
+ blockNumber: newBlockNumber,
2315
+ logData: newLog.getEmittedFields(),
796
2316
  }),
797
2317
  ],
798
2318
  ]);
799
2319
  });
800
2320
 
801
- it('is possible to batch request all logs (private and public) via tags', async () => {
802
- // Tag(1, 0, 0) is shared with the first private log and the first public log.
803
- const tags = [makeTag(1, 0, 0)];
2321
+ it('is possible to request logs for non-existing tags and determine their position', async () => {
2322
+ const tags = [makePrivateLogTag(99, 88, 77), makePrivateLogTag(1, 1, 1)];
804
2323
 
805
- const logsByTags = await store.getLogsByTags(tags);
2324
+ const logsByTags = await store.getPrivateLogsByTags(tags);
806
2325
 
807
2326
  expect(logsByTags).toEqual([
2327
+ [
2328
+ // No logs for the first tag.
2329
+ ],
808
2330
  [
809
2331
  expect.objectContaining({
810
2332
  blockNumber: 1,
811
- log: makePrivateLog(tags[0]),
812
- isFromPublic: false,
2333
+ logData: makePrivateLog(tags[1]).getEmittedFields(),
2334
+ }),
2335
+ ],
2336
+ ]);
2337
+ });
2338
+ });
2339
+
2340
+ describe('getPublicLogsByTagsFromContract', () => {
2341
+ const numBlocksForLogs = 3;
2342
+ const numTxsPerBlock = 4;
2343
+ const numPublicLogsPerTx = 2;
2344
+ const contractAddress = AztecAddress.fromNumber(543254);
2345
+
2346
+ let logsCheckpoints: PublishedCheckpoint[];
2347
+
2348
+ const makePublicLogTag = (blockNumber: number, txIndex: number, logIndex: number): Tag =>
2349
+ new Tag(
2350
+ blockNumber === 1 && txIndex === 0 && logIndex === 0
2351
+ ? Fr.ZERO // Shared tag
2352
+ : new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * 123),
2353
+ );
2354
+
2355
+ const makePublicLog = (tag: Tag) =>
2356
+ PublicLog.from({
2357
+ contractAddress: contractAddress,
2358
+ // Arbitrary length
2359
+ fields: new Array(10).fill(null).map((_, i) => (!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))),
2360
+ });
2361
+
2362
+ const mockPublicLogs = (blockNumber: number, txIndex: number) => {
2363
+ return times(numPublicLogsPerTx, (logIndex: number) => {
2364
+ const tag = makePublicLogTag(blockNumber, txIndex, logIndex);
2365
+ return makePublicLog(tag);
2366
+ });
2367
+ };
2368
+
2369
+ const mockCheckpointWithLogs = async (
2370
+ blockNumber: number,
2371
+ previousArchive?: AppendOnlyTreeSnapshot,
2372
+ ): Promise<PublishedCheckpoint> => {
2373
+ const block = await L2BlockNew.random(BlockNumber(blockNumber), {
2374
+ checkpointNumber: CheckpointNumber(blockNumber),
2375
+ indexWithinCheckpoint: 0,
2376
+ state: makeStateForBlock(blockNumber, numTxsPerBlock),
2377
+ ...(previousArchive ? { lastArchive: previousArchive } : {}),
2378
+ });
2379
+ block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
2380
+
2381
+ block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => {
2382
+ const txEffect = await TxEffect.random();
2383
+ txEffect.privateLogs = []; // No private logs needed for public log tests
2384
+ txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex);
2385
+ return txEffect;
2386
+ });
2387
+
2388
+ const checkpoint = new Checkpoint(
2389
+ AppendOnlyTreeSnapshot.random(),
2390
+ CheckpointHeader.random(),
2391
+ [block],
2392
+ CheckpointNumber(blockNumber),
2393
+ );
2394
+ return makePublishedCheckpoint(checkpoint, blockNumber);
2395
+ };
2396
+
2397
+ beforeEach(async () => {
2398
+ // Create checkpoints sequentially to chain archive roots
2399
+ logsCheckpoints = [];
2400
+ for (let i = 0; i < numBlocksForLogs; i++) {
2401
+ const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
2402
+ logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
2403
+ }
2404
+
2405
+ await store.addCheckpoints(logsCheckpoints);
2406
+ await store.addLogs(logsCheckpoints.flatMap(p => p.checkpoint.blocks));
2407
+ });
2408
+
2409
+ it('is possible to batch request public logs via tags', async () => {
2410
+ const tags = [makePublicLogTag(2, 1, 1), makePublicLogTag(1, 2, 0)];
2411
+
2412
+ const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
2413
+
2414
+ expect(logsByTags).toEqual([
2415
+ [
2416
+ expect.objectContaining({
2417
+ blockNumber: 2,
2418
+ logData: makePublicLog(tags[0]).getEmittedFields(),
813
2419
  }),
2420
+ ],
2421
+ [
814
2422
  expect.objectContaining({
815
2423
  blockNumber: 1,
816
- log: makePublicLog(tags[0]),
817
- isFromPublic: true,
2424
+ logData: makePublicLog(tags[1]).getEmittedFields(),
818
2425
  }),
819
2426
  ],
820
2427
  ]);
821
2428
  });
822
2429
 
823
2430
  it('is possible to batch request logs that have the same tag but different content', async () => {
824
- const tags = [makeTag(1, 2, 1)];
2431
+ const tags = [makePublicLogTag(1, 2, 1)];
825
2432
 
826
- // Create a block containing logs that have the same tag as the blocks before.
827
- const newBlockNumber = numBlocks;
828
- const newBlock = await mockBlockWithLogs(newBlockNumber);
829
- const newLog = newBlock.block.body.txEffects[1].privateLogs[1];
830
- newLog.fields[0] = tags[0];
831
- newBlock.block.body.txEffects[1].privateLogs[1] = newLog;
832
- await store.addBlocks([newBlock]);
833
- await store.addLogs([newBlock.block]);
2433
+ // Create a checkpoint containing logs that have the same tag as the checkpoints before.
2434
+ // Chain from the last checkpoint's archive
2435
+ const newBlockNumber = numBlocksForLogs + 1;
2436
+ const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
2437
+ const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
2438
+ const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1];
2439
+ newLog.fields[0] = tags[0].value;
2440
+ newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1] = newLog;
2441
+ await store.addCheckpoints([newCheckpoint]);
2442
+ await store.addLogs([newCheckpoint.checkpoint.blocks[0]]);
834
2443
 
835
- const logsByTags = await store.getLogsByTags(tags);
2444
+ const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
836
2445
 
837
2446
  expect(logsByTags).toEqual([
838
2447
  [
839
2448
  expect.objectContaining({
840
2449
  blockNumber: 1,
841
- log: makePrivateLog(tags[0]),
842
- isFromPublic: false,
2450
+ logData: makePublicLog(tags[0]).getEmittedFields(),
843
2451
  }),
844
2452
  expect.objectContaining({
845
2453
  blockNumber: newBlockNumber,
846
- log: newLog,
847
- isFromPublic: false,
2454
+ logData: newLog.getEmittedFields(),
848
2455
  }),
849
2456
  ],
850
2457
  ]);
851
2458
  });
852
2459
 
853
2460
  it('is possible to request logs for non-existing tags and determine their position', async () => {
854
- const tags = [makeTag(99, 88, 77), makeTag(1, 1, 1)];
2461
+ const tags = [makePublicLogTag(99, 88, 77), makePublicLogTag(1, 1, 0)];
855
2462
 
856
- const logsByTags = await store.getLogsByTags(tags);
2463
+ const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
857
2464
 
858
2465
  expect(logsByTags).toEqual([
859
2466
  [
@@ -862,8 +2469,7 @@ export function describeArchiverDataStore(
862
2469
  [
863
2470
  expect.objectContaining({
864
2471
  blockNumber: 1,
865
- log: makePrivateLog(tags[1]),
866
- isFromPublic: false,
2472
+ logData: makePublicLog(tags[1]).getEmittedFields(),
867
2473
  }),
868
2474
  ],
869
2475
  ]);
@@ -871,32 +2477,33 @@ export function describeArchiverDataStore(
871
2477
  });
872
2478
 
873
2479
  describe('getPublicLogs', () => {
874
- const txsPerBlock = 4;
875
- const numPublicFunctionCalls = 3;
876
- const numPublicLogs = 2;
877
- const numBlocks = 10;
878
- let blocks: PublishedL2Block[];
2480
+ const numBlocksForPublicLogs = 10;
879
2481
 
880
- beforeEach(async () => {
881
- blocks = await timesParallel(numBlocks, async (index: number) => ({
882
- block: await L2Block.random(index + 1, txsPerBlock, numPublicFunctionCalls, numPublicLogs),
883
- l1: { blockNumber: BigInt(index), blockHash: makeBlockHash(index), timestamp: BigInt(index) },
884
- attestations: times(3, CommitteeAttestation.random),
885
- }));
2482
+ // Helper to get total public logs per tx from a block
2483
+ const getPublicLogsPerTx = (block: L2BlockNew, txIndex: number) =>
2484
+ block.body.txEffects[txIndex].publicLogs.length;
2485
+
2486
+ // Helper to get number of txs in a block
2487
+ const getTxsPerBlock = (block: L2BlockNew) => block.body.txEffects.length;
886
2488
 
887
- await store.addBlocks(blocks);
888
- await store.addLogs(blocks.map(b => b.block));
2489
+ beforeEach(async () => {
2490
+ // Use the outer publishedCheckpoints for log tests
2491
+ for (let i = 0; i < numBlocksForPublicLogs; i++) {
2492
+ await store.addCheckpoints([publishedCheckpoints[i]]);
2493
+ await store.addLogs(publishedCheckpoints[i].checkpoint.blocks);
2494
+ }
889
2495
  });
890
2496
 
891
2497
  it('no logs returned if deleted ("txHash" filter param is respected variant)', async () => {
892
2498
  // get random tx
893
- const targetBlockIndex = randomInt(numBlocks);
894
- const targetTxIndex = randomInt(txsPerBlock);
895
- const targetTxHash = blocks[targetBlockIndex].block.body.txEffects[targetTxIndex].txHash;
2499
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2500
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2501
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2502
+ const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
896
2503
 
897
2504
  await Promise.all([
898
- store.unwindBlocks(blocks.length, blocks.length),
899
- store.deleteLogs(blocks.map(b => b.block)),
2505
+ store.unwindCheckpoints(CheckpointNumber(numBlocksForPublicLogs), numBlocksForPublicLogs),
2506
+ store.deleteLogs(publishedCheckpoints.slice(0, numBlocksForPublicLogs).flatMap(b => b.checkpoint.blocks)),
900
2507
  ]);
901
2508
 
902
2509
  const response = await store.getPublicLogs({ txHash: targetTxHash });
@@ -908,16 +2515,17 @@ export function describeArchiverDataStore(
908
2515
 
909
2516
  it('"txHash" filter param is respected', async () => {
910
2517
  // get random tx
911
- const targetBlockIndex = randomInt(numBlocks);
912
- const targetTxIndex = randomInt(txsPerBlock);
913
- const targetTxHash = blocks[targetBlockIndex].block.body.txEffects[targetTxIndex].txHash;
2518
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2519
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2520
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2521
+ const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
914
2522
 
915
2523
  const response = await store.getPublicLogs({ txHash: targetTxHash });
916
2524
  const logs = response.logs;
917
2525
 
918
2526
  expect(response.maxLogsHit).toBeFalsy();
919
2527
 
920
- const expectedNumLogs = numPublicFunctionCalls * numPublicLogs;
2528
+ const expectedNumLogs = getPublicLogsPerTx(targetBlock, targetTxIndex);
921
2529
  expect(logs.length).toEqual(expectedNumLogs);
922
2530
 
923
2531
  const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM;
@@ -927,6 +2535,17 @@ export function describeArchiverDataStore(
927
2535
  }
928
2536
  });
929
2537
 
2538
+ it('returns block hash on public log ids', async () => {
2539
+ const targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
2540
+ const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2541
+
2542
+ const logs = (await store.getPublicLogs({ fromBlock: targetBlock.number, toBlock: targetBlock.number + 1 }))
2543
+ .logs;
2544
+
2545
+ expect(logs.length).toBeGreaterThan(0);
2546
+ expect(logs.every(log => log.id.blockHash.equals(expectedBlockHash))).toBe(true);
2547
+ });
2548
+
930
2549
  it('"fromBlock" and "toBlock" filter params are respected', async () => {
931
2550
  // Set "fromBlock" and "toBlock"
932
2551
  const fromBlock = 3;
@@ -937,7 +2556,12 @@ export function describeArchiverDataStore(
937
2556
 
938
2557
  expect(response.maxLogsHit).toBeFalsy();
939
2558
 
940
- const expectedNumLogs = txsPerBlock * numPublicFunctionCalls * numPublicLogs * (toBlock - fromBlock);
2559
+ // Compute expected logs from the blocks in range
2560
+ let expectedNumLogs = 0;
2561
+ for (let i = fromBlock - 1; i < toBlock - 1; i++) {
2562
+ const block = publishedCheckpoints[i].checkpoint.blocks[0];
2563
+ expectedNumLogs += block.body.txEffects.reduce((sum, tx) => sum + tx.publicLogs.length, 0);
2564
+ }
941
2565
  expect(logs.length).toEqual(expectedNumLogs);
942
2566
 
943
2567
  for (const log of logs) {
@@ -949,11 +2573,12 @@ export function describeArchiverDataStore(
949
2573
 
950
2574
  it('"contractAddress" filter param is respected', async () => {
951
2575
  // Get a random contract address from the logs
952
- const targetBlockIndex = randomInt(numBlocks);
953
- const targetTxIndex = randomInt(txsPerBlock);
954
- const targetLogIndex = randomInt(numPublicLogs * numPublicFunctionCalls);
2576
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2577
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2578
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2579
+ const targetLogIndex = randomInt(getPublicLogsPerTx(targetBlock, targetTxIndex));
955
2580
  const targetContractAddress =
956
- blocks[targetBlockIndex].block.body.txEffects[targetTxIndex].publicLogs[targetLogIndex].contractAddress;
2581
+ targetBlock.body.txEffects[targetTxIndex].publicLogs[targetLogIndex].contractAddress;
957
2582
 
958
2583
  const response = await store.getPublicLogs({ contractAddress: targetContractAddress });
959
2584
 
@@ -966,11 +2591,19 @@ export function describeArchiverDataStore(
966
2591
 
967
2592
  it('"afterLog" filter param is respected', async () => {
968
2593
  // Get a random log as reference
969
- const targetBlockIndex = randomInt(numBlocks);
970
- const targetTxIndex = randomInt(txsPerBlock);
971
- const targetLogIndex = randomInt(numPublicLogs);
972
-
973
- const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);
2594
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2595
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2596
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2597
+ const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
2598
+ const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
2599
+ const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2600
+
2601
+ const afterLog = new LogId(
2602
+ BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM),
2603
+ targetBlockHash,
2604
+ targetTxIndex,
2605
+ targetLogIndex,
2606
+ );
974
2607
 
975
2608
  const response = await store.getPublicLogs({ afterLog });
976
2609
  const logs = response.logs;
@@ -992,52 +2625,77 @@ export function describeArchiverDataStore(
992
2625
  it('"txHash" filter param is ignored when "afterLog" is set', async () => {
993
2626
  // Get random txHash
994
2627
  const txHash = TxHash.random();
995
- const afterLog = new LogId(1, 0, 0);
2628
+ const afterLog = new LogId(BlockNumber(1), L2BlockHash.random(), 0, 0);
996
2629
 
997
2630
  const response = await store.getPublicLogs({ txHash, afterLog });
998
2631
  expect(response.logs.length).toBeGreaterThan(1);
999
2632
  });
1000
2633
 
1001
2634
  it('intersecting works', async () => {
1002
- let logs = (await store.getPublicLogs({ fromBlock: -10, toBlock: -5 })).logs;
2635
+ let logs = (await store.getPublicLogs({ fromBlock: -10 as BlockNumber, toBlock: -5 as BlockNumber })).logs;
1003
2636
  expect(logs.length).toBe(0);
1004
2637
 
1005
2638
  // "fromBlock" gets correctly trimmed to range and "toBlock" is exclusive
1006
- logs = (await store.getPublicLogs({ fromBlock: -10, toBlock: 5 })).logs;
2639
+ logs = (await store.getPublicLogs({ fromBlock: -10 as BlockNumber, toBlock: BlockNumber(5) })).logs;
1007
2640
  let blockNumbers = new Set(logs.map(log => log.id.blockNumber));
1008
2641
  expect(blockNumbers).toEqual(new Set([1, 2, 3, 4]));
1009
2642
 
1010
2643
  // "toBlock" should be exclusive
1011
- logs = (await store.getPublicLogs({ fromBlock: 1, toBlock: 1 })).logs;
2644
+ logs = (await store.getPublicLogs({ fromBlock: BlockNumber(1), toBlock: BlockNumber(1) })).logs;
1012
2645
  expect(logs.length).toBe(0);
1013
2646
 
1014
- logs = (await store.getPublicLogs({ fromBlock: 10, toBlock: 5 })).logs;
2647
+ logs = (await store.getPublicLogs({ fromBlock: BlockNumber(10), toBlock: BlockNumber(5) })).logs;
1015
2648
  expect(logs.length).toBe(0);
1016
2649
 
1017
2650
  // both "fromBlock" and "toBlock" get correctly capped to range and logs from all blocks are returned
1018
- logs = (await store.getPublicLogs({ fromBlock: -100, toBlock: +100 })).logs;
2651
+ logs = (await store.getPublicLogs({ fromBlock: -100 as BlockNumber, toBlock: +100 })).logs;
1019
2652
  blockNumbers = new Set(logs.map(log => log.id.blockNumber));
1020
- expect(blockNumbers.size).toBe(numBlocks);
2653
+ expect(blockNumbers.size).toBe(numBlocksForPublicLogs);
1021
2654
 
1022
2655
  // intersecting with "afterLog" works
1023
- logs = (await store.getPublicLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(4, 0, 0) })).logs;
2656
+ logs = (
2657
+ await store.getPublicLogs({
2658
+ fromBlock: BlockNumber(2),
2659
+ toBlock: BlockNumber(5),
2660
+ afterLog: new LogId(BlockNumber(4), L2BlockHash.random(), 0, 0),
2661
+ })
2662
+ ).logs;
1024
2663
  blockNumbers = new Set(logs.map(log => log.id.blockNumber));
1025
2664
  expect(blockNumbers).toEqual(new Set([4]));
1026
2665
 
1027
- logs = (await store.getPublicLogs({ toBlock: 5, afterLog: new LogId(5, 1, 0) })).logs;
2666
+ logs = (
2667
+ await store.getPublicLogs({
2668
+ toBlock: BlockNumber(5),
2669
+ afterLog: new LogId(BlockNumber(5), L2BlockHash.random(), 1, 0),
2670
+ })
2671
+ ).logs;
1028
2672
  expect(logs.length).toBe(0);
1029
2673
 
1030
- logs = (await store.getPublicLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(100, 0, 0) })).logs;
2674
+ logs = (
2675
+ await store.getPublicLogs({
2676
+ fromBlock: BlockNumber(2),
2677
+ toBlock: BlockNumber(5),
2678
+ afterLog: new LogId(BlockNumber(100), L2BlockHash.random(), 0, 0),
2679
+ })
2680
+ ).logs;
1031
2681
  expect(logs.length).toBe(0);
1032
2682
  });
1033
2683
 
1034
2684
  it('"txIndex" and "logIndex" are respected when "afterLog.blockNumber" is equal to "fromBlock"', async () => {
1035
2685
  // Get a random log as reference
1036
- const targetBlockIndex = randomInt(numBlocks);
1037
- const targetTxIndex = randomInt(txsPerBlock);
1038
- const targetLogIndex = randomInt(numPublicLogs);
1039
-
1040
- const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);
2686
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2687
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2688
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2689
+ const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
2690
+ const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
2691
+ const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2692
+
2693
+ const afterLog = new LogId(
2694
+ BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM),
2695
+ targetBlockHash,
2696
+ targetTxIndex,
2697
+ targetLogIndex,
2698
+ );
1041
2699
 
1042
2700
  const response = await store.getPublicLogs({ afterLog, fromBlock: afterLog.blockNumber });
1043
2701
  const logs = response.logs;
@@ -1056,5 +2714,130 @@ export function describeArchiverDataStore(
1056
2714
  }
1057
2715
  });
1058
2716
  });
2717
+
2718
+ describe('getContractClassLogs', () => {
2719
+ let targetBlock: L2BlockNew;
2720
+ let expectedContractClassLog: ContractClassLog;
2721
+
2722
+ beforeEach(async () => {
2723
+ await store.addCheckpoints(publishedCheckpoints);
2724
+
2725
+ targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
2726
+ expectedContractClassLog = await ContractClassLog.random();
2727
+ targetBlock.body.txEffects.forEach((txEffect, index) => {
2728
+ txEffect.contractClassLogs = index === 0 ? [expectedContractClassLog] : [];
2729
+ });
2730
+
2731
+ await store.addLogs([targetBlock]);
2732
+ });
2733
+
2734
+ it('returns block hash on contract class log ids', async () => {
2735
+ const result = await store.getContractClassLogs({
2736
+ fromBlock: targetBlock.number,
2737
+ toBlock: targetBlock.number + 1,
2738
+ });
2739
+
2740
+ expect(result.maxLogsHit).toBeFalsy();
2741
+ expect(result.logs).toHaveLength(1);
2742
+
2743
+ const [{ id, log }] = result.logs;
2744
+ const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2745
+
2746
+ expect(id.blockHash.equals(expectedBlockHash)).toBe(true);
2747
+ expect(id.blockNumber).toEqual(targetBlock.number);
2748
+ expect(log).toEqual(expectedContractClassLog);
2749
+ });
2750
+ });
2751
+
2752
+ describe('pendingChainValidationStatus', () => {
2753
+ it('should return undefined when no status is set', async () => {
2754
+ const status = await store.getPendingChainValidationStatus();
2755
+ expect(status).toBeUndefined();
2756
+ });
2757
+
2758
+ it('should store and retrieve a valid validation status', async () => {
2759
+ const validStatus: ValidateBlockResult = { valid: true };
2760
+
2761
+ await store.setPendingChainValidationStatus(validStatus);
2762
+ const retrievedStatus = await store.getPendingChainValidationStatus();
2763
+
2764
+ expect(retrievedStatus).toEqual(validStatus);
2765
+ });
2766
+
2767
+ it('should store and retrieve an invalid validation status with insufficient attestations', async () => {
2768
+ const invalidStatus: ValidateBlockResult = {
2769
+ valid: false,
2770
+ block: randomBlockInfo(1),
2771
+ committee: [EthAddress.random(), EthAddress.random()],
2772
+ epoch: EpochNumber(123),
2773
+ seed: 456n,
2774
+ attestors: [EthAddress.random()],
2775
+ attestations: [CommitteeAttestation.random()],
2776
+ reason: 'insufficient-attestations',
2777
+ };
2778
+
2779
+ await store.setPendingChainValidationStatus(invalidStatus);
2780
+ const retrievedStatus = await store.getPendingChainValidationStatus();
2781
+
2782
+ expect(retrievedStatus).toEqual(invalidStatus);
2783
+ });
2784
+
2785
+ it('should store and retrieve an invalid validation status with invalid attestation', async () => {
2786
+ const invalidStatus: ValidateBlockResult = {
2787
+ valid: false,
2788
+ block: randomBlockInfo(2),
2789
+ committee: [EthAddress.random()],
2790
+ attestors: [EthAddress.random()],
2791
+ epoch: EpochNumber(789),
2792
+ seed: 101n,
2793
+ attestations: [CommitteeAttestation.random()],
2794
+ reason: 'invalid-attestation',
2795
+ invalidIndex: 5,
2796
+ };
2797
+
2798
+ await store.setPendingChainValidationStatus(invalidStatus);
2799
+ const retrievedStatus = await store.getPendingChainValidationStatus();
2800
+
2801
+ expect(retrievedStatus).toEqual(invalidStatus);
2802
+ });
2803
+
2804
+ it('should overwrite existing status when setting a new one', async () => {
2805
+ const firstStatus: ValidateBlockResult = { valid: true };
2806
+ const secondStatus: ValidateBlockResult = {
2807
+ valid: false,
2808
+ block: randomBlockInfo(3),
2809
+ committee: [EthAddress.random()],
2810
+ epoch: EpochNumber(999),
2811
+ seed: 888n,
2812
+ attestors: [EthAddress.random()],
2813
+ attestations: [CommitteeAttestation.random()],
2814
+ reason: 'insufficient-attestations',
2815
+ };
2816
+
2817
+ await store.setPendingChainValidationStatus(firstStatus);
2818
+ await store.setPendingChainValidationStatus(secondStatus);
2819
+ const retrievedStatus = await store.getPendingChainValidationStatus();
2820
+
2821
+ expect(retrievedStatus).toEqual(secondStatus);
2822
+ });
2823
+
2824
+ it('should handle empty committee and attestations arrays', async () => {
2825
+ const statusWithEmptyArrays: ValidateBlockResult = {
2826
+ valid: false,
2827
+ block: randomBlockInfo(4),
2828
+ committee: [],
2829
+ epoch: EpochNumber(0),
2830
+ seed: 0n,
2831
+ attestors: [],
2832
+ attestations: [],
2833
+ reason: 'insufficient-attestations',
2834
+ };
2835
+
2836
+ await store.setPendingChainValidationStatus(statusWithEmptyArrays);
2837
+ const retrievedStatus = await store.getPendingChainValidationStatus();
2838
+
2839
+ expect(retrievedStatus).toEqual(statusWithEmptyArrays);
2840
+ });
2841
+ });
1059
2842
  });
1060
2843
  }