@aztec/archiver 0.0.1-commit.9593d84 → 0.0.1-commit.96bb3f7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. package/README.md +14 -14
  2. package/dest/archiver/archiver.d.ts +105 -85
  3. package/dest/archiver/archiver.d.ts.map +1 -1
  4. package/dest/archiver/archiver.js +949 -255
  5. package/dest/archiver/archiver_store.d.ts +109 -49
  6. package/dest/archiver/archiver_store.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  8. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  9. package/dest/archiver/archiver_store_test_suite.js +1871 -390
  10. package/dest/archiver/config.d.ts +5 -4
  11. package/dest/archiver/config.d.ts.map +1 -1
  12. package/dest/archiver/config.js +12 -5
  13. package/dest/archiver/errors.d.ts +25 -1
  14. package/dest/archiver/errors.d.ts.map +1 -1
  15. package/dest/archiver/errors.js +37 -0
  16. package/dest/archiver/index.d.ts +2 -2
  17. package/dest/archiver/index.d.ts.map +1 -1
  18. package/dest/archiver/instrumentation.d.ts +3 -1
  19. package/dest/archiver/instrumentation.d.ts.map +1 -1
  20. package/dest/archiver/instrumentation.js +22 -59
  21. package/dest/archiver/kv_archiver_store/block_store.d.ts +60 -20
  22. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  23. package/dest/archiver/kv_archiver_store/block_store.js +345 -89
  24. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +2 -2
  25. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  26. package/dest/archiver/kv_archiver_store/contract_class_store.js +1 -1
  27. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +2 -2
  28. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  29. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +49 -58
  30. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  31. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +68 -48
  32. package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
  33. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  34. package/dest/archiver/kv_archiver_store/log_store.js +149 -84
  35. package/dest/archiver/kv_archiver_store/message_store.d.ts +6 -5
  36. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  37. package/dest/archiver/kv_archiver_store/message_store.js +15 -14
  38. package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
  39. package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
  40. package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
  41. package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
  42. package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
  43. package/dest/archiver/l1/calldata_retriever.js +471 -0
  44. package/dest/archiver/l1/data_retrieval.d.ts +90 -0
  45. package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
  46. package/dest/archiver/{data_retrieval.js → l1/data_retrieval.js} +52 -107
  47. package/dest/archiver/l1/debug_tx.d.ts +19 -0
  48. package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
  49. package/dest/archiver/l1/debug_tx.js +73 -0
  50. package/dest/archiver/l1/spire_proposer.d.ts +70 -0
  51. package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
  52. package/dest/archiver/l1/spire_proposer.js +157 -0
  53. package/dest/archiver/l1/trace_tx.d.ts +97 -0
  54. package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
  55. package/dest/archiver/l1/trace_tx.js +91 -0
  56. package/dest/archiver/l1/types.d.ts +12 -0
  57. package/dest/archiver/l1/types.d.ts.map +1 -0
  58. package/dest/archiver/l1/types.js +3 -0
  59. package/dest/archiver/l1/validate_trace.d.ts +29 -0
  60. package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
  61. package/dest/archiver/l1/validate_trace.js +150 -0
  62. package/dest/archiver/structs/inbox_message.d.ts +4 -4
  63. package/dest/archiver/structs/inbox_message.d.ts.map +1 -1
  64. package/dest/archiver/structs/inbox_message.js +6 -5
  65. package/dest/archiver/structs/published.d.ts +1 -2
  66. package/dest/archiver/structs/published.d.ts.map +1 -1
  67. package/dest/archiver/validation.d.ts +4 -4
  68. package/dest/archiver/validation.d.ts.map +1 -1
  69. package/dest/archiver/validation.js +1 -1
  70. package/dest/factory.d.ts +2 -2
  71. package/dest/factory.d.ts.map +1 -1
  72. package/dest/factory.js +4 -3
  73. package/dest/index.d.ts +2 -2
  74. package/dest/index.d.ts.map +1 -1
  75. package/dest/index.js +1 -1
  76. package/dest/test/mock_archiver.d.ts +16 -8
  77. package/dest/test/mock_archiver.d.ts.map +1 -1
  78. package/dest/test/mock_archiver.js +19 -14
  79. package/dest/test/mock_l1_to_l2_message_source.d.ts +7 -6
  80. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  81. package/dest/test/mock_l1_to_l2_message_source.js +21 -11
  82. package/dest/test/mock_l2_block_source.d.ts +23 -8
  83. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  84. package/dest/test/mock_l2_block_source.js +95 -35
  85. package/dest/test/mock_structs.d.ts +3 -2
  86. package/dest/test/mock_structs.d.ts.map +1 -1
  87. package/dest/test/mock_structs.js +9 -8
  88. package/package.json +17 -16
  89. package/src/archiver/archiver.ts +751 -344
  90. package/src/archiver/archiver_store.ts +123 -48
  91. package/src/archiver/archiver_store_test_suite.ts +1943 -365
  92. package/src/archiver/config.ts +17 -12
  93. package/src/archiver/errors.ts +64 -0
  94. package/src/archiver/index.ts +1 -1
  95. package/src/archiver/instrumentation.ts +24 -59
  96. package/src/archiver/kv_archiver_store/block_store.ts +475 -106
  97. package/src/archiver/kv_archiver_store/contract_class_store.ts +1 -1
  98. package/src/archiver/kv_archiver_store/contract_instance_store.ts +1 -1
  99. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +92 -72
  100. package/src/archiver/kv_archiver_store/log_store.ts +209 -99
  101. package/src/archiver/kv_archiver_store/message_store.ts +21 -18
  102. package/src/archiver/l1/README.md +98 -0
  103. package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
  104. package/src/archiver/l1/calldata_retriever.ts +641 -0
  105. package/src/archiver/{data_retrieval.ts → l1/data_retrieval.ts} +101 -165
  106. package/src/archiver/l1/debug_tx.ts +99 -0
  107. package/src/archiver/l1/spire_proposer.ts +160 -0
  108. package/src/archiver/l1/trace_tx.ts +128 -0
  109. package/src/archiver/l1/types.ts +13 -0
  110. package/src/archiver/l1/validate_trace.ts +211 -0
  111. package/src/archiver/structs/inbox_message.ts +8 -8
  112. package/src/archiver/structs/published.ts +0 -1
  113. package/src/archiver/validation.ts +6 -6
  114. package/src/factory.ts +4 -3
  115. package/src/index.ts +1 -1
  116. package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
  117. package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
  118. package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
  119. package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
  120. package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
  121. package/src/test/fixtures/trace_transaction-proxied.json +128 -0
  122. package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
  123. package/src/test/mock_archiver.ts +22 -16
  124. package/src/test/mock_l1_to_l2_message_source.ts +18 -11
  125. package/src/test/mock_l2_block_source.ts +115 -37
  126. package/src/test/mock_structs.ts +10 -9
  127. package/dest/archiver/data_retrieval.d.ts +0 -79
  128. package/dest/archiver/data_retrieval.d.ts.map +0 -1
@@ -1,46 +1,57 @@
1
1
  import {
2
+ INITIAL_CHECKPOINT_NUMBER,
2
3
  INITIAL_L2_BLOCK_NUM,
4
+ MAX_NOTE_HASHES_PER_TX,
3
5
  NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
4
6
  PRIVATE_LOG_SIZE_IN_FIELDS,
5
7
  } from '@aztec/constants';
6
8
  import { makeTuple } from '@aztec/foundation/array';
7
- import { EpochNumber } from '@aztec/foundation/branded-types';
9
+ import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
8
10
  import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
9
11
  import { times, timesParallel } from '@aztec/foundation/collection';
10
- import { randomInt } from '@aztec/foundation/crypto';
11
- import { Fr } from '@aztec/foundation/fields';
12
+ import { randomInt } from '@aztec/foundation/crypto/random';
13
+ import { Fr } from '@aztec/foundation/curves/bn254';
12
14
  import { toArray } from '@aztec/foundation/iterable';
13
15
  import { sleep } from '@aztec/foundation/sleep';
14
16
  import { AztecAddress } from '@aztec/stdlib/aztec-address';
15
17
  import {
18
+ CheckpointedL2Block,
16
19
  CommitteeAttestation,
17
20
  EthAddress,
18
- L2Block,
19
21
  L2BlockHash,
20
- PublishedL2Block,
21
- type ValidateBlockResult,
22
- randomBlockInfo,
23
- wrapInBlock,
22
+ L2BlockNew,
23
+ type ValidateCheckpointResult,
24
24
  } from '@aztec/stdlib/block';
25
+ import { Checkpoint, L1PublishedData, PublishedCheckpoint, randomCheckpointInfo } from '@aztec/stdlib/checkpoint';
25
26
  import {
26
27
  type ContractClassPublic,
27
28
  type ContractInstanceWithAddress,
28
29
  SerializableContractInstance,
29
30
  computePublicBytecodeCommitment,
30
31
  } from '@aztec/stdlib/contract';
31
- import { LogId, PrivateLog, PublicLog } from '@aztec/stdlib/logs';
32
+ import { ContractClassLog, LogId, PrivateLog, PublicLog, SiloedTag, Tag } from '@aztec/stdlib/logs';
32
33
  import { InboxLeaf } from '@aztec/stdlib/messaging';
34
+ import { CheckpointHeader } from '@aztec/stdlib/rollup';
33
35
  import {
34
36
  makeContractClassPublic,
35
37
  makeExecutablePrivateFunctionWithMembershipProof,
36
38
  makeUtilityFunctionWithMembershipProof,
37
39
  } from '@aztec/stdlib/testing';
38
40
  import '@aztec/stdlib/testing/jest';
39
- import { type IndexedTxEffect, TxEffect, TxHash } from '@aztec/stdlib/tx';
41
+ import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
42
+ import { type IndexedTxEffect, PartialStateReference, StateReference, TxEffect, TxHash } from '@aztec/stdlib/tx';
40
43
 
41
44
  import { makeInboxMessage, makeInboxMessages } from '../test/mock_structs.js';
42
45
  import type { ArchiverDataStore, ArchiverL1SynchPoint } from './archiver_store.js';
43
- import { BlockNumberNotSequentialError, InitialBlockNumberNotSequentialError } from './errors.js';
46
+ import {
47
+ BlockArchiveNotConsistentError,
48
+ BlockIndexNotSequentialError,
49
+ BlockNumberNotSequentialError,
50
+ CheckpointNumberNotConsistentError,
51
+ CheckpointNumberNotSequentialError,
52
+ InitialBlockNumberNotSequentialError,
53
+ InitialCheckpointNumberNotSequentialError,
54
+ } from './errors.js';
44
55
  import { MessageStoreError } from './kv_archiver_store/message_store.js';
45
56
  import type { InboxMessage } from './structs/inbox_message.js';
46
57
 
@@ -54,206 +65,1572 @@ export function describeArchiverDataStore(
54
65
  ) {
55
66
  describe(testName, () => {
56
67
  let store: ArchiverDataStore;
57
- let blocks: PublishedL2Block[];
58
-
59
- const blockTests: [number, number, () => PublishedL2Block[]][] = [
60
- [1, 1, () => blocks.slice(0, 1)],
61
- [10, 1, () => blocks.slice(9, 10)],
62
- [1, 10, () => blocks.slice(0, 10)],
63
- [2, 5, () => blocks.slice(1, 6)],
64
- [5, 2, () => blocks.slice(4, 6)],
68
+ let publishedCheckpoints: PublishedCheckpoint[];
69
+
70
+ const blockNumberTests: [number, () => L2BlockNew][] = [
71
+ [1, () => publishedCheckpoints[0].checkpoint.blocks[0]],
72
+ [10, () => publishedCheckpoints[9].checkpoint.blocks[0]],
73
+ [5, () => publishedCheckpoints[4].checkpoint.blocks[0]],
65
74
  ];
66
75
 
67
76
  const makeBlockHash = (blockNumber: number) => `0x${blockNumber.toString(16).padStart(64, '0')}`;
68
77
 
69
- const makePublished = (block: L2Block, l1BlockNumber: number): PublishedL2Block =>
70
- PublishedL2Block.fromFields({
71
- block: block,
72
- l1: {
73
- blockNumber: BigInt(l1BlockNumber),
74
- blockHash: makeBlockHash(l1BlockNumber),
75
- timestamp: BigInt(l1BlockNumber * 1000),
76
- },
77
- attestations: times(3, CommitteeAttestation.random),
78
- });
79
-
80
- const expectBlocksEqual = (actual: PublishedL2Block[], expected: PublishedL2Block[]) => {
81
- expect(actual.length).toEqual(expected.length);
82
- for (let i = 0; i < expected.length; i++) {
83
- const expectedBlock = expected[i];
84
- const actualBlock = actual[i];
85
- expect(actualBlock.l1).toEqual(expectedBlock.l1);
86
- expect(actualBlock.block.equals(expectedBlock.block)).toBe(true);
87
- expect(actualBlock.attestations.every((a, i) => a.equals(expectedBlock.attestations[i]))).toBe(true);
88
- }
78
+ // Create a state reference with properly calculated noteHashTree.nextAvailableLeafIndex
79
+ // This is needed because the log store calculates dataStartIndexForBlock as:
80
+ // noteHashTree.nextAvailableLeafIndex - txEffects.length * MAX_NOTE_HASHES_PER_TX
81
+ // If nextAvailableLeafIndex is too small (random values 0-1000), this becomes negative
82
+ const makeStateForBlock = (blockNumber: number, txsPerBlock: number): StateReference => {
83
+ // Ensure nextAvailableLeafIndex is large enough for all blocks up to this point
84
+ const noteHashIndex = blockNumber * txsPerBlock * MAX_NOTE_HASHES_PER_TX;
85
+ return new StateReference(
86
+ AppendOnlyTreeSnapshot.random(),
87
+ new PartialStateReference(
88
+ new AppendOnlyTreeSnapshot(Fr.random(), noteHashIndex),
89
+ AppendOnlyTreeSnapshot.random(),
90
+ AppendOnlyTreeSnapshot.random(),
91
+ ),
92
+ );
93
+ };
94
+
95
+ const makePublishedCheckpoint = (checkpoint: Checkpoint, l1BlockNumber: number): PublishedCheckpoint => {
96
+ return new PublishedCheckpoint(
97
+ checkpoint,
98
+ new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)),
99
+ times(3, CommitteeAttestation.random),
100
+ );
101
+ };
102
+
103
+ const expectCheckpointedBlockEquals = (
104
+ actual: CheckpointedL2Block,
105
+ expectedBlock: L2BlockNew,
106
+ expectedCheckpoint: PublishedCheckpoint,
107
+ ) => {
108
+ expect(actual.l1).toEqual(expectedCheckpoint.l1);
109
+ expect(actual.block.header.equals(expectedBlock.header)).toBe(true);
110
+ expect(actual.checkpointNumber).toEqual(expectedCheckpoint.checkpoint.number);
111
+ expect(actual.attestations.every((a, i) => a.equals(expectedCheckpoint.attestations[i]))).toBe(true);
89
112
  };
90
113
 
91
114
  beforeEach(async () => {
92
115
  store = await getStore();
93
- blocks = await timesParallel(10, async i => makePublished(await L2Block.random(i + 1), i + 10));
116
+ // Create checkpoints sequentially to ensure archive roots are chained properly.
117
+ // Each block's header.lastArchive must equal the previous block's archive.
118
+ publishedCheckpoints = [];
119
+ const txsPerBlock = 4;
120
+ for (let i = 0; i < 10; i++) {
121
+ const blockNumber = i + 1;
122
+ const previousArchive = i > 0 ? publishedCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
123
+ const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
124
+ numBlocks: 1,
125
+ startBlockNumber: blockNumber,
126
+ previousArchive,
127
+ txsPerBlock,
128
+ state: makeStateForBlock(blockNumber, txsPerBlock),
129
+ // Ensure each tx has public logs for getPublicLogs tests
130
+ txOptions: { numPublicCallsPerTx: 2, numPublicLogsPerCall: 2 },
131
+ });
132
+ publishedCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
133
+ }
94
134
  });
95
135
 
96
- describe('addBlocks', () => {
97
- it('returns success when adding blocks', async () => {
98
- await expect(store.addBlocks(blocks)).resolves.toBe(true);
136
+ describe('addCheckpoints', () => {
137
+ it('returns success when adding checkpoints', async () => {
138
+ await expect(store.addCheckpoints(publishedCheckpoints)).resolves.toBe(true);
99
139
  });
100
140
 
101
- it('allows duplicate blocks', async () => {
102
- await store.addBlocks(blocks);
103
- await expect(store.addBlocks(blocks)).resolves.toBe(true);
141
+ it('throws on duplicate checkpoints', async () => {
142
+ await store.addCheckpoints(publishedCheckpoints);
143
+ await expect(store.addCheckpoints(publishedCheckpoints)).rejects.toThrow(
144
+ InitialCheckpointNumberNotSequentialError,
145
+ );
104
146
  });
105
147
 
106
148
  it('throws an error if the previous block does not exist in the store', async () => {
107
- const block = makePublished(await L2Block.random(2), 2);
108
- await expect(store.addBlocks([block])).rejects.toThrow(InitialBlockNumberNotSequentialError);
109
- await expect(store.getPublishedBlocks(1, 10)).resolves.toEqual([]);
149
+ const checkpoint = await Checkpoint.random(CheckpointNumber(2), { numBlocks: 1, startBlockNumber: 2 });
150
+ const block = makePublishedCheckpoint(checkpoint, 2);
151
+ await expect(store.addCheckpoints([block])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
152
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
110
153
  });
111
154
 
112
155
  it('throws an error if there is a gap in the blocks being added', async () => {
113
- const blocks = [makePublished(await L2Block.random(1), 1), makePublished(await L2Block.random(3), 3)];
114
- await expect(store.addBlocks(blocks)).rejects.toThrow(BlockNumberNotSequentialError);
115
- await expect(store.getPublishedBlocks(1, 10)).resolves.toEqual([]);
156
+ const checkpoint1 = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 1, startBlockNumber: 1 });
157
+ const checkpoint3 = await Checkpoint.random(CheckpointNumber(3), { numBlocks: 1, startBlockNumber: 3 });
158
+ const checkpoints = [makePublishedCheckpoint(checkpoint1, 1), makePublishedCheckpoint(checkpoint3, 3)];
159
+ await expect(store.addCheckpoints(checkpoints)).rejects.toThrow(CheckpointNumberNotSequentialError);
160
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
161
+ });
162
+
163
+ it('throws an error if blocks within a checkpoint are not sequential', async () => {
164
+ // Create a checkpoint with non-sequential block numbers (block 1 and block 3, skipping block 2)
165
+ const block1 = await L2BlockNew.random(BlockNumber(1), { checkpointNumber: CheckpointNumber(1) });
166
+ const block3 = await L2BlockNew.random(BlockNumber(3), { checkpointNumber: CheckpointNumber(1) });
167
+
168
+ const checkpoint = new Checkpoint(
169
+ AppendOnlyTreeSnapshot.random(),
170
+ CheckpointHeader.random(),
171
+ [block1, block3],
172
+ CheckpointNumber(1),
173
+ );
174
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
175
+
176
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockNumberNotSequentialError);
177
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
178
+ });
179
+
180
+ it('throws an error if blocks within a checkpoint do not have sequential indexes', async () => {
181
+ // Create a checkpoint with non-sequential indexes
182
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
183
+ checkpointNumber: CheckpointNumber(1),
184
+ indexWithinCheckpoint: 0,
185
+ });
186
+ const block3 = await L2BlockNew.random(BlockNumber(2), {
187
+ checkpointNumber: CheckpointNumber(1),
188
+ indexWithinCheckpoint: 2,
189
+ });
190
+
191
+ const checkpoint = new Checkpoint(
192
+ AppendOnlyTreeSnapshot.random(),
193
+ CheckpointHeader.random(),
194
+ [block1, block3],
195
+ CheckpointNumber(1),
196
+ );
197
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
198
+
199
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockIndexNotSequentialError);
200
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
201
+ });
202
+
203
+ it('throws an error if blocks within a checkpoint do not start from index 0', async () => {
204
+ // Create a checkpoint with non-sequential indexes
205
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
206
+ checkpointNumber: CheckpointNumber(1),
207
+ indexWithinCheckpoint: 1,
208
+ });
209
+ const block3 = await L2BlockNew.random(BlockNumber(2), {
210
+ checkpointNumber: CheckpointNumber(1),
211
+ indexWithinCheckpoint: 2,
212
+ });
213
+
214
+ const checkpoint = new Checkpoint(
215
+ AppendOnlyTreeSnapshot.random(),
216
+ CheckpointHeader.random(),
217
+ [block1, block3],
218
+ CheckpointNumber(1),
219
+ );
220
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
221
+
222
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockIndexNotSequentialError);
223
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
224
+ });
225
+
226
+ it('throws an error if block has invalid checkpoint index', async () => {
227
+ // Create a block wit an invalid checkpoint index
228
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
229
+ checkpointNumber: CheckpointNumber(1),
230
+ indexWithinCheckpoint: -1,
231
+ });
232
+
233
+ const checkpoint = new Checkpoint(
234
+ AppendOnlyTreeSnapshot.random(),
235
+ CheckpointHeader.random(),
236
+ [block1],
237
+ CheckpointNumber(1),
238
+ );
239
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
240
+
241
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(BlockIndexNotSequentialError);
242
+ await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
243
+ });
244
+
245
+ it('throws an error if checkpoint has invalid initial number', async () => {
246
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
247
+ checkpointNumber: CheckpointNumber(2),
248
+ indexWithinCheckpoint: 0,
249
+ });
250
+
251
+ const checkpoint = new Checkpoint(
252
+ AppendOnlyTreeSnapshot.random(),
253
+ CheckpointHeader.random(),
254
+ [block1],
255
+ CheckpointNumber(2),
256
+ );
257
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
258
+
259
+ await expect(store.addCheckpoints([publishedCheckpoint])).rejects.toThrow(
260
+ InitialCheckpointNumberNotSequentialError,
261
+ );
262
+ });
263
+
264
+ it('allows the correct initial checkpoint', async () => {
265
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
266
+ checkpointNumber: CheckpointNumber(1),
267
+ indexWithinCheckpoint: 0,
268
+ });
269
+
270
+ const checkpoint = new Checkpoint(
271
+ AppendOnlyTreeSnapshot.random(),
272
+ CheckpointHeader.random(),
273
+ [block1],
274
+ CheckpointNumber(1),
275
+ );
276
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
277
+
278
+ await expect(store.addCheckpoints([publishedCheckpoint])).resolves.toBe(true);
279
+ });
280
+
281
+ it('throws on duplicate initial checkpoint', async () => {
282
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
283
+ checkpointNumber: CheckpointNumber(1),
284
+ indexWithinCheckpoint: 0,
285
+ });
286
+
287
+ const block2 = await L2BlockNew.random(BlockNumber(1), {
288
+ checkpointNumber: CheckpointNumber(1),
289
+ indexWithinCheckpoint: 0,
290
+ });
291
+
292
+ const checkpoint = new Checkpoint(
293
+ AppendOnlyTreeSnapshot.random(),
294
+ CheckpointHeader.random(),
295
+ [block1],
296
+ CheckpointNumber(1),
297
+ );
298
+ const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
299
+
300
+ const checkpoint2 = new Checkpoint(
301
+ AppendOnlyTreeSnapshot.random(),
302
+ CheckpointHeader.random(),
303
+ [block2],
304
+ CheckpointNumber(1),
305
+ );
306
+ const publishedCheckpoint2 = makePublishedCheckpoint(checkpoint2, 10);
307
+
308
+ await expect(store.addCheckpoints([publishedCheckpoint])).resolves.toBe(true);
309
+ await expect(store.addCheckpoints([publishedCheckpoint2])).rejects.toThrow(
310
+ InitialCheckpointNumberNotSequentialError,
311
+ );
116
312
  });
117
313
  });
118
314
 
119
- describe('unwindBlocks', () => {
120
- it('unwinding blocks will remove blocks from the chain', async () => {
121
- await store.addBlocks(blocks);
122
- const blockNumber = await store.getSynchedL2BlockNumber();
315
+ describe('unwindcheckpoints', () => {
316
+ it('unwinding checkpoints will remove checkpoints from the chain', async () => {
317
+ await store.addCheckpoints(publishedCheckpoints);
318
+ const checkpointNumber = await store.getSynchedCheckpointNumber();
319
+ const lastCheckpoint = publishedCheckpoints.at(-1)!;
320
+ const lastBlockNumber = lastCheckpoint.checkpoint.blocks[0].number;
123
321
 
124
- expectBlocksEqual(await store.getPublishedBlocks(blockNumber, 1), [blocks[blocks.length - 1]]);
322
+ // Verify block exists before unwinding
323
+ const retrievedBlock = await store.getCheckpointedBlock(lastBlockNumber);
324
+ expect(retrievedBlock).toBeDefined();
325
+ expect(retrievedBlock!.block.header.equals(lastCheckpoint.checkpoint.blocks[0].header)).toBe(true);
326
+ expect(retrievedBlock!.checkpointNumber).toEqual(checkpointNumber);
125
327
 
126
- await store.unwindBlocks(blockNumber, 1);
328
+ await store.unwindCheckpoints(checkpointNumber, 1);
127
329
 
128
- expect(await store.getSynchedL2BlockNumber()).toBe(blockNumber - 1);
129
- expect(await store.getPublishedBlocks(blockNumber, 1)).toEqual([]);
330
+ expect(await store.getSynchedCheckpointNumber()).toBe(checkpointNumber - 1);
331
+ await expect(store.getCheckpointedBlock(lastBlockNumber)).resolves.toBeUndefined();
130
332
  });
131
333
 
132
334
  it('can unwind multiple empty blocks', async () => {
133
- const emptyBlocks = await timesParallel(10, async i => makePublished(await L2Block.random(i + 1, 0), i + 10));
134
- await store.addBlocks(emptyBlocks);
135
- expect(await store.getSynchedL2BlockNumber()).toBe(10);
335
+ // Create checkpoints sequentially to chain archive roots
336
+ const emptyCheckpoints: PublishedCheckpoint[] = [];
337
+ for (let i = 0; i < 10; i++) {
338
+ const previousArchive = i > 0 ? emptyCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
339
+ const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
340
+ numBlocks: 1,
341
+ startBlockNumber: i + 1,
342
+ txsPerBlock: 0,
343
+ previousArchive,
344
+ });
345
+ emptyCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
346
+ }
347
+ await store.addCheckpoints(emptyCheckpoints);
348
+ expect(await store.getSynchedCheckpointNumber()).toBe(10);
136
349
 
137
- await store.unwindBlocks(10, 3);
138
- expect(await store.getSynchedL2BlockNumber()).toBe(7);
139
- expect((await store.getPublishedBlocks(1, 10)).map(b => b.block.number)).toEqual([1, 2, 3, 4, 5, 6, 7]);
350
+ await store.unwindCheckpoints(CheckpointNumber(10), 3);
351
+ expect(await store.getSynchedCheckpointNumber()).toBe(7);
352
+ expect((await store.getRangeOfCheckpoints(CheckpointNumber(1), 10)).map(b => b.checkpointNumber)).toEqual([
353
+ 1, 2, 3, 4, 5, 6, 7,
354
+ ]);
140
355
  });
141
356
 
142
- it('refuses to unwind blocks if the tip is not the last block', async () => {
143
- await store.addBlocks(blocks);
144
- await expect(store.unwindBlocks(5, 1)).rejects.toThrow(/can only unwind blocks from the tip/i);
357
+ it('refuses to unwind checkpoints if the tip is not the last checkpoint', async () => {
358
+ await store.addCheckpoints(publishedCheckpoints);
359
+ await expect(store.unwindCheckpoints(CheckpointNumber(5), 1)).rejects.toThrow(
360
+ /can only unwind checkpoints from the tip/i,
361
+ );
145
362
  });
146
363
 
147
364
  it('unwound blocks and headers cannot be retrieved by hash or archive', async () => {
148
- await store.addBlocks(blocks);
149
- const lastBlock = blocks[blocks.length - 1];
150
- const blockHash = await lastBlock.block.hash();
151
- const archive = lastBlock.block.archive.root;
365
+ await store.addCheckpoints(publishedCheckpoints);
366
+ const lastCheckpoint = publishedCheckpoints[publishedCheckpoints.length - 1];
367
+ const lastBlock = lastCheckpoint.checkpoint.blocks[0];
368
+ const blockHash = await lastBlock.header.hash();
369
+ const archive = lastBlock.archive.root;
152
370
 
153
371
  // Verify block and header exist before unwinding
154
- expect(await store.getPublishedBlockByHash(blockHash)).toBeDefined();
155
- expect(await store.getPublishedBlockByArchive(archive)).toBeDefined();
156
- expect(await store.getBlockHeaderByHash(blockHash)).toBeDefined();
157
- expect(await store.getBlockHeaderByArchive(archive)).toBeDefined();
372
+ const retrievedByHash = await store.getCheckpointedBlockByHash(blockHash);
373
+ expect(retrievedByHash).toBeDefined();
374
+ expect(retrievedByHash!.block.header.equals(lastBlock.header)).toBe(true);
375
+
376
+ const retrievedByArchive = await store.getCheckpointedBlockByArchive(archive);
377
+ expect(retrievedByArchive).toBeDefined();
378
+ expect(retrievedByArchive!.block.header.equals(lastBlock.header)).toBe(true);
158
379
 
159
- // Unwind the block
160
- await store.unwindBlocks(lastBlock.block.number, 1);
380
+ const headerByHash = await store.getBlockHeaderByHash(blockHash);
381
+ expect(headerByHash).toBeDefined();
382
+ expect(headerByHash!.equals(lastBlock.header)).toBe(true);
383
+
384
+ const headerByArchive = await store.getBlockHeaderByArchive(archive);
385
+ expect(headerByArchive).toBeDefined();
386
+ expect(headerByArchive!.equals(lastBlock.header)).toBe(true);
387
+
388
+ // Unwind the checkpoint
389
+ await store.unwindCheckpoints(lastCheckpoint.checkpoint.number, 1);
161
390
 
162
391
  // Verify neither block nor header can be retrieved after unwinding
163
- expect(await store.getPublishedBlockByHash(blockHash)).toBeUndefined();
164
- expect(await store.getPublishedBlockByArchive(archive)).toBeUndefined();
392
+ expect(await store.getCheckpointedBlockByHash(blockHash)).toBeUndefined();
393
+ expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
165
394
  expect(await store.getBlockHeaderByHash(blockHash)).toBeUndefined();
166
395
  expect(await store.getBlockHeaderByArchive(archive)).toBeUndefined();
167
396
  });
168
397
  });
169
398
 
170
- describe('getBlocks', () => {
171
- beforeEach(async () => {
172
- await store.addBlocks(blocks);
399
+ describe('multi-block checkpoints', () => {
400
+ it('block number increases correctly when adding checkpoints with multiple blocks', async () => {
401
+ // Create 3 checkpoints: first with 2 blocks, second with 3 blocks, third with 1 block
402
+ // Total blocks: 6, spanning block numbers 1-6
403
+ // Chain archive roots across checkpoints
404
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
405
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
406
+
407
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
408
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
409
+ numBlocks: 3,
410
+ startBlockNumber: 3,
411
+ previousArchive: previousArchive1,
412
+ });
413
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
414
+
415
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
416
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
417
+ numBlocks: 1,
418
+ startBlockNumber: 6,
419
+ previousArchive: previousArchive2,
420
+ });
421
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
422
+
423
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
424
+
425
+ // Checkpoint number should be 3 (the last checkpoint number)
426
+ expect(await store.getSynchedCheckpointNumber()).toBe(3);
427
+ // Block number should be 6 (the last block number across all checkpoints)
428
+ expect(await store.getLatestBlockNumber()).toBe(6);
173
429
  });
174
430
 
175
- it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => {
176
- expectBlocksEqual(await store.getPublishedBlocks(start, limit), getExpectedBlocks());
431
+ it('block number decreases correctly when unwinding checkpoints with multiple blocks', async () => {
432
+ // Create 3 checkpoints with varying block counts, chaining archive roots
433
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
434
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
435
+
436
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
437
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
438
+ numBlocks: 3,
439
+ startBlockNumber: 3,
440
+ previousArchive: previousArchive1,
441
+ });
442
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
443
+
444
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
445
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
446
+ numBlocks: 2,
447
+ startBlockNumber: 6,
448
+ previousArchive: previousArchive2,
449
+ });
450
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
451
+
452
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
453
+
454
+ expect(await store.getSynchedCheckpointNumber()).toBe(3);
455
+ expect(await store.getLatestBlockNumber()).toBe(7);
456
+
457
+ // Unwind the last checkpoint (which has 2 blocks)
458
+ await store.unwindCheckpoints(CheckpointNumber(3), 1);
459
+
460
+ expect(await store.getSynchedCheckpointNumber()).toBe(2);
461
+ expect(await store.getLatestBlockNumber()).toBe(5);
462
+
463
+ // Unwind another checkpoint (which has 3 blocks)
464
+ await store.unwindCheckpoints(CheckpointNumber(2), 1);
465
+
466
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
467
+ expect(await store.getLatestBlockNumber()).toBe(2);
177
468
  });
178
469
 
179
- it('returns an empty array if no blocks are found', async () => {
180
- await expect(store.getPublishedBlocks(12, 1)).resolves.toEqual([]);
470
+ it('unwinding multiple checkpoints with multiple blocks in one go', async () => {
471
+ // Create 4 checkpoints with varying block counts, chaining archive roots
472
+ // Checkpoint 1: blocks 1-2 (2 blocks)
473
+ // Checkpoint 2: blocks 3-5 (3 blocks)
474
+ // Checkpoint 3: blocks 6-7 (2 blocks)
475
+ // Checkpoint 4: blocks 8-10 (3 blocks)
476
+ // Total: 10 blocks across 4 checkpoints
477
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
478
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
479
+
480
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
481
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
482
+ numBlocks: 3,
483
+ startBlockNumber: 3,
484
+ previousArchive: previousArchive1,
485
+ });
486
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
487
+
488
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
489
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
490
+ numBlocks: 2,
491
+ startBlockNumber: 6,
492
+ previousArchive: previousArchive2,
493
+ });
494
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
495
+
496
+ const previousArchive3 = checkpoint3Cp.blocks.at(-1)!.archive;
497
+ const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
498
+ numBlocks: 3,
499
+ startBlockNumber: 8,
500
+ previousArchive: previousArchive3,
501
+ });
502
+ const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
503
+
504
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3, checkpoint4]);
505
+
506
+ expect(await store.getSynchedCheckpointNumber()).toBe(4);
507
+ expect(await store.getLatestBlockNumber()).toBe(10);
508
+
509
+ // Unwind 2 checkpoints at once (checkpoints 3 and 4, which together have 5 blocks)
510
+ await store.unwindCheckpoints(CheckpointNumber(4), 2);
511
+
512
+ expect(await store.getSynchedCheckpointNumber()).toBe(2);
513
+ expect(await store.getLatestBlockNumber()).toBe(5);
514
+
515
+ // Verify blocks 1-5 still exist (from checkpoints 1 and 2)
516
+ for (let blockNumber = 1; blockNumber <= 5; blockNumber++) {
517
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
518
+ }
519
+
520
+ // Verify blocks 6-10 are gone (from checkpoints 3 and 4)
521
+ for (let blockNumber = 6; blockNumber <= 10; blockNumber++) {
522
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
523
+ }
524
+
525
+ // Unwind remaining 2 checkpoints at once (checkpoints 1 and 2, which together have 5 blocks)
526
+ await store.unwindCheckpoints(CheckpointNumber(2), 2);
527
+
528
+ expect(await store.getSynchedCheckpointNumber()).toBe(0);
529
+ expect(await store.getLatestBlockNumber()).toBe(0);
530
+
531
+ // Verify all blocks are gone
532
+ for (let blockNumber = 1; blockNumber <= 10; blockNumber++) {
533
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
534
+ }
181
535
  });
182
536
 
183
- it('throws an error if limit is invalid', async () => {
184
- await expect(store.getPublishedBlocks(1, 0)).rejects.toThrow('Invalid limit: 0');
537
+ it('getCheckpointedBlock returns correct checkpoint info for blocks within multi-block checkpoints', async () => {
538
+ // Create checkpoints with chained archive roots
539
+ // Create a checkpoint with 3 blocks
540
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 });
541
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
542
+
543
+ // Create another checkpoint with 2 blocks
544
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
545
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
546
+ numBlocks: 2,
547
+ startBlockNumber: 4,
548
+ previousArchive: previousArchive1,
549
+ });
550
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
551
+
552
+ await store.addCheckpoints([checkpoint1, checkpoint2]);
553
+
554
+ // Check blocks from the first checkpoint (blocks 1, 2, 3)
555
+ for (let i = 0; i < 3; i++) {
556
+ const blockNumber = i + 1;
557
+ const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
558
+
559
+ expect(retrievedBlock).toBeDefined();
560
+ expect(retrievedBlock!.checkpointNumber).toBe(1);
561
+ expect(retrievedBlock!.block.number).toBe(blockNumber);
562
+ expect(retrievedBlock!.l1).toEqual(checkpoint1.l1);
563
+ expect(retrievedBlock!.attestations.every((a, j) => a.equals(checkpoint1.attestations[j]))).toBe(true);
564
+ }
565
+
566
+ // Check blocks from the second checkpoint (blocks 4, 5)
567
+ for (let i = 0; i < 2; i++) {
568
+ const blockNumber = i + 4;
569
+ const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
570
+
571
+ expect(retrievedBlock).toBeDefined();
572
+ expect(retrievedBlock!.checkpointNumber).toBe(2);
573
+ expect(retrievedBlock!.block.number).toBe(blockNumber);
574
+ expect(retrievedBlock!.l1).toEqual(checkpoint2.l1);
575
+ expect(retrievedBlock!.attestations.every((a, j) => a.equals(checkpoint2.attestations[j]))).toBe(true);
576
+ }
185
577
  });
186
578
 
187
- it('throws an error if `from` it is out of range', async () => {
188
- await expect(store.getPublishedBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).rejects.toThrow('Invalid start: -99');
579
+ it('getCheckpointedBlockByHash returns correct checkpoint info for blocks within multi-block checkpoints', async () => {
580
+ const checkpoint = makePublishedCheckpoint(
581
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
582
+ 10,
583
+ );
584
+
585
+ await store.addCheckpoints([checkpoint]);
586
+
587
+ // Check each block by its hash
588
+ for (let i = 0; i < checkpoint.checkpoint.blocks.length; i++) {
589
+ const block = checkpoint.checkpoint.blocks[i];
590
+ const blockHash = await block.header.hash();
591
+ const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
592
+
593
+ expect(retrievedBlock).toBeDefined();
594
+ expect(retrievedBlock!.checkpointNumber).toBe(1);
595
+ expect(retrievedBlock!.block.number).toBe(i + 1);
596
+ expect(retrievedBlock!.l1).toEqual(checkpoint.l1);
597
+ }
189
598
  });
190
599
 
191
- it('throws an error if unexpected initial block number is found', async () => {
192
- await store.addBlocks([makePublished(await L2Block.random(21), 31)], { force: true });
193
- await expect(store.getPublishedBlocks(20, 1)).rejects.toThrow(`mismatch`);
600
+ it('getCheckpointedBlockByArchive returns correct checkpoint info for blocks within multi-block checkpoints', async () => {
601
+ const checkpoint = makePublishedCheckpoint(
602
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
603
+ 10,
604
+ );
605
+
606
+ await store.addCheckpoints([checkpoint]);
607
+
608
+ // Check each block by its archive root
609
+ for (let i = 0; i < checkpoint.checkpoint.blocks.length; i++) {
610
+ const block = checkpoint.checkpoint.blocks[i];
611
+ const archive = block.archive.root;
612
+ const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
613
+
614
+ expect(retrievedBlock).toBeDefined();
615
+ expect(retrievedBlock!.checkpointNumber).toBe(1);
616
+ expect(retrievedBlock!.block.number).toBe(i + 1);
617
+ expect(retrievedBlock!.l1).toEqual(checkpoint.l1);
618
+ }
194
619
  });
195
620
 
196
- it('throws an error if a gap is found', async () => {
197
- await store.addBlocks(
198
- [makePublished(await L2Block.random(20), 30), makePublished(await L2Block.random(22), 32)],
199
- { force: true },
621
+ it('unwinding a multi-block checkpoint removes all its blocks', async () => {
622
+ const checkpoint = makePublishedCheckpoint(
623
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
624
+ 10,
200
625
  );
201
- await expect(store.getPublishedBlocks(20, 2)).rejects.toThrow(`mismatch`);
626
+
627
+ await store.addCheckpoints([checkpoint]);
628
+
629
+ // Verify all 3 blocks exist
630
+ for (let blockNumber = 1; blockNumber <= 3; blockNumber++) {
631
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
632
+ }
633
+
634
+ // Unwind the checkpoint
635
+ await store.unwindCheckpoints(CheckpointNumber(1), 1);
636
+
637
+ // Verify all 3 blocks are removed
638
+ for (let blockNumber = 1; blockNumber <= 3; blockNumber++) {
639
+ expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
640
+ }
641
+
642
+ expect(await store.getSynchedCheckpointNumber()).toBe(0);
643
+ expect(await store.getLatestBlockNumber()).toBe(0);
202
644
  });
203
645
  });
204
646
 
205
- describe('getPublishedBlockByHash', () => {
647
+ describe('uncheckpointed blocks', () => {
648
+ it('can add blocks independently before a checkpoint arrives', async () => {
649
+ // First, establish some checkpointed blocks (checkpoint 1 with blocks 1-3)
650
+ const checkpoint1 = makePublishedCheckpoint(
651
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
652
+ 10,
653
+ );
654
+ await store.addCheckpoints([checkpoint1]);
655
+
656
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
657
+ expect(await store.getLatestBlockNumber()).toBe(3);
658
+
659
+ // Now add blocks 4, 5, 6 independently (without a checkpoint) for upcoming checkpoint 2
660
+ // Chain archive roots from the last block of checkpoint 1
661
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
662
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
663
+ checkpointNumber: CheckpointNumber(2),
664
+ indexWithinCheckpoint: 0,
665
+ lastArchive: lastBlockArchive,
666
+ });
667
+ const block5 = await L2BlockNew.random(BlockNumber(5), {
668
+ checkpointNumber: CheckpointNumber(2),
669
+ indexWithinCheckpoint: 1,
670
+ lastArchive: block4.archive,
671
+ });
672
+ const block6 = await L2BlockNew.random(BlockNumber(6), {
673
+ checkpointNumber: CheckpointNumber(2),
674
+ indexWithinCheckpoint: 2,
675
+ lastArchive: block5.archive,
676
+ });
677
+
678
+ await store.addBlocks([block4, block5, block6]);
679
+
680
+ // Checkpoint number should still be 1 (no new checkpoint added)
681
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
682
+ // But latest block number should be 6
683
+ expect(await store.getLatestBlockNumber()).toBe(6);
684
+ });
685
+
686
+ it('getBlock retrieves uncheckpointed blocks', async () => {
687
+ // First, establish some checkpointed blocks
688
+ const checkpoint1 = makePublishedCheckpoint(
689
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
690
+ 10,
691
+ );
692
+ await store.addCheckpoints([checkpoint1]);
693
+
694
+ // Add uncheckpointed blocks for upcoming checkpoint 2, chaining archive roots
695
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
696
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
697
+ checkpointNumber: CheckpointNumber(2),
698
+ indexWithinCheckpoint: 0,
699
+ lastArchive: lastBlockArchive,
700
+ });
701
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
702
+ checkpointNumber: CheckpointNumber(2),
703
+ indexWithinCheckpoint: 1,
704
+ lastArchive: block3.archive,
705
+ });
706
+ await store.addBlocks([block3, block4]);
707
+
708
+ // getBlock should work for both checkpointed and uncheckpointed blocks
709
+ expect((await store.getBlock(1))?.number).toBe(1);
710
+ expect((await store.getBlock(2))?.number).toBe(2);
711
+ expect((await store.getBlock(3))?.equals(block3)).toBe(true);
712
+ expect((await store.getBlock(4))?.equals(block4)).toBe(true);
713
+ expect(await store.getBlock(5)).toBeUndefined();
714
+
715
+ const block5 = await L2BlockNew.random(BlockNumber(5), {
716
+ checkpointNumber: CheckpointNumber(2),
717
+ indexWithinCheckpoint: 2,
718
+ lastArchive: block4.archive,
719
+ });
720
+ await store.addBlocks([block5]);
721
+
722
+ // Verify the uncheckpointed blocks have correct data
723
+ const retrieved3 = await store.getBlock(3);
724
+ expect(retrieved3!.number).toBe(3);
725
+ expect(retrieved3!.equals(block3)).toBe(true);
726
+ const retrieved4 = await store.getBlock(4);
727
+ expect(retrieved4!.number).toBe(4);
728
+ expect(retrieved4!.equals(block4)).toBe(true);
729
+ const retrieved5 = await store.getBlock(5);
730
+ expect(retrieved5!.number).toBe(5);
731
+ expect(retrieved5!.equals(block5)).toBe(true);
732
+ });
733
+
734
+ it('getBlockByHash retrieves uncheckpointed blocks', async () => {
735
+ // Add uncheckpointed blocks (no checkpoints at all) for initial checkpoint 1, chaining archive roots
736
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
737
+ checkpointNumber: CheckpointNumber(1),
738
+ indexWithinCheckpoint: 0,
739
+ });
740
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
741
+ checkpointNumber: CheckpointNumber(1),
742
+ indexWithinCheckpoint: 1,
743
+ lastArchive: block1.archive,
744
+ });
745
+ await store.addBlocks([block1, block2]);
746
+
747
+ // getBlockByHash should work for uncheckpointed blocks
748
+ const hash1 = await block1.header.hash();
749
+ const hash2 = await block2.header.hash();
750
+
751
+ const retrieved1 = await store.getBlockByHash(hash1);
752
+ expect(retrieved1!.equals(block1)).toBe(true);
753
+
754
+ const retrieved2 = await store.getBlockByHash(hash2);
755
+ expect(retrieved2!.equals(block2)).toBe(true);
756
+ });
757
+
758
+ it('getBlockByArchive retrieves uncheckpointed blocks', async () => {
759
+ // Add uncheckpointed blocks for initial checkpoint 1, chaining archive roots
760
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
761
+ checkpointNumber: CheckpointNumber(1),
762
+ indexWithinCheckpoint: 0,
763
+ });
764
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
765
+ checkpointNumber: CheckpointNumber(1),
766
+ indexWithinCheckpoint: 1,
767
+ lastArchive: block1.archive,
768
+ });
769
+ await store.addBlocks([block1, block2]);
770
+
771
+ // getBlockByArchive should work for uncheckpointed blocks
772
+ const archive1 = block1.archive.root;
773
+ const archive2 = block2.archive.root;
774
+
775
+ const retrieved1 = await store.getBlockByArchive(archive1);
776
+ expect(retrieved1!.equals(block1)).toBe(true);
777
+
778
+ const retrieved2 = await store.getBlockByArchive(archive2);
779
+ expect(retrieved2!.equals(block2)).toBe(true);
780
+ });
781
+
782
+ it('getCheckpointedBlock returns undefined for uncheckpointed blocks', async () => {
783
+ // Add a checkpoint with blocks 1-2
784
+ const checkpoint1 = makePublishedCheckpoint(
785
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
786
+ 10,
787
+ );
788
+ await store.addCheckpoints([checkpoint1]);
789
+
790
+ // Add uncheckpointed blocks 3-4 for upcoming checkpoint 2, chaining archive roots
791
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
792
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
793
+ checkpointNumber: CheckpointNumber(2),
794
+ indexWithinCheckpoint: 0,
795
+ lastArchive: lastBlockArchive,
796
+ });
797
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
798
+ checkpointNumber: CheckpointNumber(2),
799
+ indexWithinCheckpoint: 1,
800
+ lastArchive: block3.archive,
801
+ });
802
+ await store.addBlocks([block3, block4]);
803
+
804
+ // getCheckpointedBlock should work for checkpointed blocks
805
+ expect((await store.getCheckpointedBlock(1))?.block.number).toBe(1);
806
+ expect((await store.getCheckpointedBlock(2))?.block.number).toBe(2);
807
+
808
+ // getCheckpointedBlock should return undefined for uncheckpointed blocks
809
+ expect(await store.getCheckpointedBlock(3)).toBeUndefined();
810
+ expect(await store.getCheckpointedBlock(4)).toBeUndefined();
811
+
812
+ // But getBlock should work for all blocks
813
+ expect((await store.getBlock(3))?.equals(block3)).toBe(true);
814
+ expect((await store.getBlock(4))?.equals(block4)).toBe(true);
815
+ });
816
+
817
+ it('getCheckpointedBlockByHash returns undefined for uncheckpointed blocks', async () => {
818
+ // Add uncheckpointed blocks for initial checkpoint 1
819
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
820
+ checkpointNumber: CheckpointNumber(1),
821
+ indexWithinCheckpoint: 0,
822
+ });
823
+ await store.addBlocks([block1]);
824
+
825
+ const hash = await block1.header.hash();
826
+
827
+ // getCheckpointedBlockByHash should return undefined
828
+ expect(await store.getCheckpointedBlockByHash(hash)).toBeUndefined();
829
+
830
+ // But getBlockByHash should work
831
+ expect((await store.getBlockByHash(hash))?.equals(block1)).toBe(true);
832
+ });
833
+
834
+ it('getCheckpointedBlockByArchive returns undefined for uncheckpointed blocks', async () => {
835
+ // Add uncheckpointed blocks for initial checkpoint 1
836
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
837
+ checkpointNumber: CheckpointNumber(1),
838
+ indexWithinCheckpoint: 0,
839
+ });
840
+ await store.addBlocks([block1]);
841
+
842
+ const archive = block1.archive.root;
843
+
844
+ // getCheckpointedBlockByArchive should return undefined
845
+ expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
846
+
847
+ // But getBlockByArchive should work
848
+ expect((await store.getBlockByArchive(archive))?.equals(block1)).toBe(true);
849
+ });
850
+
851
+ it('checkpoint adopts previously added uncheckpointed blocks', async () => {
852
+ // Add blocks 1-3 without a checkpoint (for initial checkpoint 1), chaining archive roots
853
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
854
+ checkpointNumber: CheckpointNumber(1),
855
+ indexWithinCheckpoint: 0,
856
+ });
857
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
858
+ checkpointNumber: CheckpointNumber(1),
859
+ indexWithinCheckpoint: 1,
860
+ lastArchive: block1.archive,
861
+ });
862
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
863
+ checkpointNumber: CheckpointNumber(1),
864
+ indexWithinCheckpoint: 2,
865
+ lastArchive: block2.archive,
866
+ });
867
+ await store.addBlocks([block1, block2, block3]);
868
+
869
+ expect(await store.getSynchedCheckpointNumber()).toBe(0);
870
+ expect(await store.getLatestBlockNumber()).toBe(3);
871
+
872
+ // getCheckpointedBlock should return undefined for all
873
+ expect(await store.getCheckpointedBlock(1)).toBeUndefined();
874
+ expect(await store.getCheckpointedBlock(2)).toBeUndefined();
875
+ expect(await store.getCheckpointedBlock(3)).toBeUndefined();
876
+
877
+ // Now add a checkpoint that covers blocks 1-3
878
+ const checkpoint1 = makePublishedCheckpoint(
879
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
880
+ 10,
881
+ );
882
+ await store.addCheckpoints([checkpoint1]);
883
+
884
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
885
+ expect(await store.getLatestBlockNumber()).toBe(3);
886
+
887
+ // Now getCheckpointedBlock should work for all blocks
888
+ const checkpointed1 = await store.getCheckpointedBlock(1);
889
+ expect(checkpointed1).toBeDefined();
890
+ expect(checkpointed1!.checkpointNumber).toBe(1);
891
+ expect(checkpointed1!.l1).toEqual(checkpoint1.l1);
892
+
893
+ const checkpointed2 = await store.getCheckpointedBlock(2);
894
+ expect(checkpointed2).toBeDefined();
895
+ expect(checkpointed2!.checkpointNumber).toBe(1);
896
+
897
+ const checkpointed3 = await store.getCheckpointedBlock(3);
898
+ expect(checkpointed3).toBeDefined();
899
+ expect(checkpointed3!.checkpointNumber).toBe(1);
900
+ });
901
+
902
+ it('can add more uncheckpointed blocks after a checkpoint and then checkpoint them', async () => {
903
+ // Start with checkpoint 1 covering blocks 1-2
904
+ const checkpoint1 = makePublishedCheckpoint(
905
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
906
+ 10,
907
+ );
908
+ await store.addCheckpoints([checkpoint1]);
909
+
910
+ // Add uncheckpointed blocks 3-5 for the upcoming checkpoint 2, chaining archive roots
911
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
912
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
913
+ checkpointNumber: CheckpointNumber(2),
914
+ indexWithinCheckpoint: 0,
915
+ lastArchive: lastBlockArchive,
916
+ });
917
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
918
+ checkpointNumber: CheckpointNumber(2),
919
+ indexWithinCheckpoint: 1,
920
+ lastArchive: block3.archive,
921
+ });
922
+ const block5 = await L2BlockNew.random(BlockNumber(5), {
923
+ checkpointNumber: CheckpointNumber(2),
924
+ indexWithinCheckpoint: 2,
925
+ lastArchive: block4.archive,
926
+ });
927
+ await store.addBlocks([block3, block4, block5]);
928
+
929
+ expect(await store.getSynchedCheckpointNumber()).toBe(1);
930
+ expect(await store.getLatestBlockNumber()).toBe(5);
931
+
932
+ // Blocks 3-5 are not checkpointed yet
933
+ expect(await store.getCheckpointedBlock(3)).toBeUndefined();
934
+ expect(await store.getCheckpointedBlock(4)).toBeUndefined();
935
+ expect(await store.getCheckpointedBlock(5)).toBeUndefined();
936
+
937
+ // Add checkpoint 2 covering blocks 3-5, chaining from checkpoint1
938
+ const checkpoint2 = makePublishedCheckpoint(
939
+ await Checkpoint.random(CheckpointNumber(2), {
940
+ numBlocks: 3,
941
+ startBlockNumber: 3,
942
+ previousArchive: lastBlockArchive,
943
+ }),
944
+ 11,
945
+ );
946
+ await store.addCheckpoints([checkpoint2]);
947
+
948
+ expect(await store.getSynchedCheckpointNumber()).toBe(2);
949
+ expect(await store.getLatestBlockNumber()).toBe(5);
950
+
951
+ // Now blocks 3-5 should be checkpointed with checkpoint 2's info
952
+ const checkpointed3 = await store.getCheckpointedBlock(3);
953
+ expect(checkpointed3).toBeDefined();
954
+ expect(checkpointed3!.checkpointNumber).toBe(2);
955
+ expect(checkpointed3!.l1).toEqual(checkpoint2.l1);
956
+
957
+ const checkpointed4 = await store.getCheckpointedBlock(4);
958
+ expect(checkpointed4).toBeDefined();
959
+ expect(checkpointed4!.checkpointNumber).toBe(2);
960
+
961
+ const checkpointed5 = await store.getCheckpointedBlock(5);
962
+ expect(checkpointed5).toBeDefined();
963
+ expect(checkpointed5!.checkpointNumber).toBe(2);
964
+ });
965
+
966
+ it('getBlocks retrieves both checkpointed and uncheckpointed blocks', async () => {
967
+ // Add checkpoint with blocks 1-2
968
+ const checkpoint1 = makePublishedCheckpoint(
969
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
970
+ 10,
971
+ );
972
+ await store.addCheckpoints([checkpoint1]);
973
+
974
+ // Add uncheckpointed blocks 3-4 for the upcoming checkpoint 2, chaining archive roots
975
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
976
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
977
+ checkpointNumber: CheckpointNumber(2),
978
+ indexWithinCheckpoint: 0,
979
+ lastArchive: lastBlockArchive,
980
+ });
981
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
982
+ checkpointNumber: CheckpointNumber(2),
983
+ indexWithinCheckpoint: 1,
984
+ lastArchive: block3.archive,
985
+ });
986
+ await store.addBlocks([block3, block4]);
987
+
988
+ // getBlocks should retrieve all blocks
989
+ const allBlocks = await store.getBlocks(1, 10);
990
+ expect(allBlocks.length).toBe(4);
991
+ expect(allBlocks.map(b => b.number)).toEqual([1, 2, 3, 4]);
992
+ });
993
+ });
994
+
995
+ describe('addBlocks validation', () => {
996
+ it('throws if blocks have different checkpoint numbers', async () => {
997
+ // First, establish checkpoint 1 with blocks 1-2
998
+ const checkpoint1 = makePublishedCheckpoint(
999
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1000
+ 10,
1001
+ );
1002
+ await store.addCheckpoints([checkpoint1]);
1003
+
1004
+ // Try to add blocks 3 and 4 with different checkpoint numbers
1005
+ // Chain archives correctly to test the checkpoint number validation
1006
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1007
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1008
+ checkpointNumber: CheckpointNumber(2),
1009
+ indexWithinCheckpoint: 0,
1010
+ lastArchive: lastBlockArchive,
1011
+ });
1012
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1013
+ checkpointNumber: CheckpointNumber(3),
1014
+ indexWithinCheckpoint: 1,
1015
+ lastArchive: block3.archive,
1016
+ });
1017
+
1018
+ await expect(store.addBlocks([block3, block4])).rejects.toThrow(CheckpointNumberNotConsistentError);
1019
+ });
1020
+
1021
+ it('throws if checkpoint number is not the current checkpoint', async () => {
1022
+ // First, establish checkpoint 1 with blocks 1-2
1023
+ const checkpoint1 = makePublishedCheckpoint(
1024
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1025
+ 10,
1026
+ );
1027
+ await store.addCheckpoints([checkpoint1]);
1028
+
1029
+ // Try to add blocks for checkpoint 3 (skipping checkpoint 2)
1030
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1031
+ checkpointNumber: CheckpointNumber(3),
1032
+ indexWithinCheckpoint: 0,
1033
+ });
1034
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1035
+ checkpointNumber: CheckpointNumber(3),
1036
+ indexWithinCheckpoint: 1,
1037
+ });
1038
+
1039
+ await expect(store.addBlocks([block3, block4])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
1040
+ });
1041
+
1042
+ it('allows blocks with the same checkpoint number for the current checkpoint', async () => {
1043
+ // First, establish checkpoint 1 with blocks 1-2
1044
+ const checkpoint1 = makePublishedCheckpoint(
1045
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1046
+ 10,
1047
+ );
1048
+ await store.addCheckpoints([checkpoint1]);
1049
+
1050
+ // Add blocks 3 and 4 with consistent checkpoint number (2), chaining archive roots
1051
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1052
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1053
+ checkpointNumber: CheckpointNumber(2),
1054
+ indexWithinCheckpoint: 0,
1055
+ lastArchive: lastBlockArchive,
1056
+ });
1057
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1058
+ checkpointNumber: CheckpointNumber(2),
1059
+ indexWithinCheckpoint: 1,
1060
+ lastArchive: block3.archive,
1061
+ });
1062
+
1063
+ await expect(store.addBlocks([block3, block4])).resolves.toBe(true);
1064
+
1065
+ // Verify blocks were added
1066
+ expect((await store.getBlock(3))?.equals(block3)).toBe(true);
1067
+ expect((await store.getBlock(4))?.equals(block4)).toBe(true);
1068
+ });
1069
+
1070
+ it('allows blocks for the initial checkpoint when store is empty', async () => {
1071
+ // Add blocks for the initial checkpoint (1), chaining archive roots
1072
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
1073
+ checkpointNumber: CheckpointNumber(1),
1074
+ indexWithinCheckpoint: 0,
1075
+ });
1076
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
1077
+ checkpointNumber: CheckpointNumber(1),
1078
+ indexWithinCheckpoint: 1,
1079
+ lastArchive: block1.archive,
1080
+ });
1081
+
1082
+ await expect(store.addBlocks([block1, block2])).resolves.toBe(true);
1083
+
1084
+ // Verify blocks were added
1085
+ expect((await store.getBlock(1))?.equals(block1)).toBe(true);
1086
+ expect((await store.getBlock(2))?.equals(block2)).toBe(true);
1087
+ expect(await store.getLatestBlockNumber()).toBe(2);
1088
+ });
1089
+
1090
+ it('throws if initial block is duplicated across calls', async () => {
1091
+ // Add blocks for the initial checkpoint (1)
1092
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
1093
+ checkpointNumber: CheckpointNumber(1),
1094
+ indexWithinCheckpoint: 0,
1095
+ });
1096
+ const block2 = await L2BlockNew.random(BlockNumber(1), {
1097
+ checkpointNumber: CheckpointNumber(1),
1098
+ indexWithinCheckpoint: 0,
1099
+ });
1100
+
1101
+ await expect(store.addBlocks([block1])).resolves.toBe(true);
1102
+ await expect(store.addBlocks([block2])).rejects.toThrow(InitialBlockNumberNotSequentialError);
1103
+ });
1104
+
1105
+ it('throws if first block has wrong checkpoint number when store is empty', async () => {
1106
+ // Try to add blocks for checkpoint 2 when store is empty (should start at 1)
1107
+ const block1 = await L2BlockNew.random(BlockNumber(1), {
1108
+ checkpointNumber: CheckpointNumber(2),
1109
+ indexWithinCheckpoint: 0,
1110
+ });
1111
+ const block2 = await L2BlockNew.random(BlockNumber(2), {
1112
+ checkpointNumber: CheckpointNumber(2),
1113
+ indexWithinCheckpoint: 1,
1114
+ });
1115
+
1116
+ await expect(store.addBlocks([block1, block2])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
1117
+ });
1118
+
1119
+ it('allows adding more blocks to the same checkpoint in separate calls', async () => {
1120
+ // First, establish checkpoint 1 with blocks 1-2
1121
+ const checkpoint1 = makePublishedCheckpoint(
1122
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1123
+ 10,
1124
+ );
1125
+ await store.addCheckpoints([checkpoint1]);
1126
+
1127
+ // Add block 3 for checkpoint 2, chaining archive roots
1128
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1129
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1130
+ checkpointNumber: CheckpointNumber(2),
1131
+ indexWithinCheckpoint: 0,
1132
+ lastArchive: lastBlockArchive,
1133
+ });
1134
+ await expect(store.addBlocks([block3])).resolves.toBe(true);
1135
+
1136
+ // Add block 4 for the same checkpoint 2 in a separate call
1137
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1138
+ checkpointNumber: CheckpointNumber(2),
1139
+ indexWithinCheckpoint: 1,
1140
+ lastArchive: block3.archive,
1141
+ });
1142
+ await expect(store.addBlocks([block4])).resolves.toBe(true);
1143
+
1144
+ expect(await store.getLatestBlockNumber()).toBe(4);
1145
+ });
1146
+
1147
+ it('throws if adding blocks in separate calls with non-consecutive indexes', async () => {
1148
+ // First, establish checkpoint 1 with blocks 1-2
1149
+ const checkpoint1 = makePublishedCheckpoint(
1150
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1151
+ 10,
1152
+ );
1153
+ await store.addCheckpoints([checkpoint1]);
1154
+
1155
+ // Add block 3 for checkpoint 2, chaining archive roots
1156
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1157
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1158
+ checkpointNumber: CheckpointNumber(2),
1159
+ indexWithinCheckpoint: 0,
1160
+ lastArchive: lastBlockArchive,
1161
+ });
1162
+ await expect(store.addBlocks([block3])).resolves.toBe(true);
1163
+
1164
+ // Add block 4 for the same checkpoint 2 in a separate call but with a missing index
1165
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1166
+ checkpointNumber: CheckpointNumber(2),
1167
+ indexWithinCheckpoint: 2,
1168
+ lastArchive: block3.archive,
1169
+ });
1170
+ await expect(store.addBlocks([block4])).rejects.toThrow(BlockIndexNotSequentialError);
1171
+
1172
+ expect(await store.getLatestBlockNumber()).toBe(3);
1173
+ });
1174
+
1175
+ it('throws if second batch of blocks has different checkpoint number than first batch', async () => {
1176
+ // First, establish checkpoint 1 with blocks 1-2
1177
+ const checkpoint1 = makePublishedCheckpoint(
1178
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1179
+ 10,
1180
+ );
1181
+ await store.addCheckpoints([checkpoint1]);
1182
+
1183
+ // Add block 3 for checkpoint 2, chaining archive roots
1184
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1185
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1186
+ checkpointNumber: CheckpointNumber(2),
1187
+ indexWithinCheckpoint: 0,
1188
+ lastArchive: lastBlockArchive,
1189
+ });
1190
+ await store.addBlocks([block3]);
1191
+
1192
+ // Try to add block 4 for checkpoint 3 (should fail because current checkpoint is still 2)
1193
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1194
+ checkpointNumber: CheckpointNumber(3),
1195
+ indexWithinCheckpoint: 0,
1196
+ lastArchive: block3.archive,
1197
+ });
1198
+ await expect(store.addBlocks([block4])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
1199
+ });
1200
+
1201
+ it('force option bypasses checkpoint number validation', async () => {
1202
+ // First, establish checkpoint 1 with blocks 1-2
1203
+ const checkpoint1 = makePublishedCheckpoint(
1204
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1205
+ 10,
1206
+ );
1207
+ await store.addCheckpoints([checkpoint1]);
1208
+
1209
+ // Add blocks with different checkpoint numbers using force option, chaining archive roots
1210
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1211
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1212
+ checkpointNumber: CheckpointNumber(2),
1213
+ indexWithinCheckpoint: 0,
1214
+ lastArchive: lastBlockArchive,
1215
+ });
1216
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1217
+ checkpointNumber: CheckpointNumber(5),
1218
+ indexWithinCheckpoint: 0,
1219
+ lastArchive: block3.archive,
1220
+ });
1221
+
1222
+ await expect(store.addBlocks([block3, block4], { force: true })).resolves.toBe(true);
1223
+ });
1224
+
1225
+ it('force option bypasses blockindex number validation', async () => {
1226
+ // First, establish checkpoint 1 with blocks 1-2
1227
+ const checkpoint1 = makePublishedCheckpoint(
1228
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1229
+ 10,
1230
+ );
1231
+ await store.addCheckpoints([checkpoint1]);
1232
+
1233
+ // Add blocks with different checkpoint numbers using force option, chaining archive roots
1234
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1235
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1236
+ checkpointNumber: CheckpointNumber(2),
1237
+ indexWithinCheckpoint: 0,
1238
+ lastArchive: lastBlockArchive,
1239
+ });
1240
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1241
+ checkpointNumber: CheckpointNumber(2),
1242
+ indexWithinCheckpoint: 2,
1243
+ lastArchive: block3.archive,
1244
+ });
1245
+
1246
+ await expect(store.addBlocks([block3, block4], { force: true })).resolves.toBe(true);
1247
+ });
1248
+
1249
+ it('throws if adding blocks with non-consecutive archives', async () => {
1250
+ // First, establish checkpoint 1 with blocks 1-2
1251
+ const checkpoint1 = makePublishedCheckpoint(
1252
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1253
+ 10,
1254
+ );
1255
+ await store.addCheckpoints([checkpoint1]);
1256
+
1257
+ // Add block 3 for checkpoint 2 with incorrect archive
1258
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1259
+ checkpointNumber: CheckpointNumber(2),
1260
+ indexWithinCheckpoint: 0,
1261
+ });
1262
+ await expect(store.addBlocks([block3])).rejects.toThrow(BlockArchiveNotConsistentError);
1263
+
1264
+ expect(await store.getLatestBlockNumber()).toBe(2);
1265
+ });
1266
+
1267
+ it('throws if adding blocks with non-consecutive archives across calls', async () => {
1268
+ // First, establish checkpoint 1 with blocks 1-2
1269
+ const checkpoint1 = makePublishedCheckpoint(
1270
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1271
+ 10,
1272
+ );
1273
+ await store.addCheckpoints([checkpoint1]);
1274
+
1275
+ // Add block 3 for checkpoint 2 with correct archive
1276
+ const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1)!.archive;
1277
+ const block3 = await L2BlockNew.random(BlockNumber(3), {
1278
+ checkpointNumber: CheckpointNumber(2),
1279
+ indexWithinCheckpoint: 0,
1280
+ lastArchive: lastBlockArchive,
1281
+ });
1282
+ await expect(store.addBlocks([block3])).resolves.toBe(true);
1283
+
1284
+ // Add block 4 with incorrect archive (should fail)
1285
+ const block4 = await L2BlockNew.random(BlockNumber(4), {
1286
+ checkpointNumber: CheckpointNumber(2),
1287
+ indexWithinCheckpoint: 1,
1288
+ lastArchive: AppendOnlyTreeSnapshot.random(),
1289
+ });
1290
+ await expect(store.addBlocks([block4])).rejects.toThrow(BlockArchiveNotConsistentError);
1291
+
1292
+ expect(await store.getLatestBlockNumber()).toBe(3);
1293
+ });
1294
+ });
1295
+
1296
+ describe('getBlocksForCheckpoint', () => {
1297
+ it('returns blocks for a single-block checkpoint', async () => {
1298
+ const checkpoint = makePublishedCheckpoint(
1299
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 1, startBlockNumber: 1 }),
1300
+ 10,
1301
+ );
1302
+ await store.addCheckpoints([checkpoint]);
1303
+
1304
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1305
+ expect(blocks).toBeDefined();
1306
+ expect(blocks!.length).toBe(1);
1307
+ expect(blocks![0].number).toBe(1);
1308
+ });
1309
+
1310
+ it('returns all blocks for a multi-block checkpoint', async () => {
1311
+ const checkpoint = makePublishedCheckpoint(
1312
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 4, startBlockNumber: 1 }),
1313
+ 10,
1314
+ );
1315
+ await store.addCheckpoints([checkpoint]);
1316
+
1317
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1318
+ expect(blocks).toBeDefined();
1319
+ expect(blocks!.length).toBe(4);
1320
+ expect(blocks!.map(b => b.number)).toEqual([1, 2, 3, 4]);
1321
+ });
1322
+
1323
+ it('returns correct blocks for different checkpoints', async () => {
1324
+ // Create checkpoints with chained archive roots
1325
+ // Checkpoint 1: blocks 1-2
1326
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1327
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1328
+
1329
+ // Checkpoint 2: blocks 3-5
1330
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1331
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1332
+ numBlocks: 3,
1333
+ startBlockNumber: 3,
1334
+ previousArchive: previousArchive1,
1335
+ });
1336
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1337
+
1338
+ // Checkpoint 3: blocks 6-7
1339
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1340
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1341
+ numBlocks: 2,
1342
+ startBlockNumber: 6,
1343
+ previousArchive: previousArchive2,
1344
+ });
1345
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1346
+
1347
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1348
+
1349
+ const blocks1 = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1350
+ expect(blocks1).toBeDefined();
1351
+ expect(blocks1!.map(b => b.number)).toEqual([1, 2]);
1352
+
1353
+ const blocks2 = await store.getBlocksForCheckpoint(CheckpointNumber(2));
1354
+ expect(blocks2).toBeDefined();
1355
+ expect(blocks2!.map(b => b.number)).toEqual([3, 4, 5]);
1356
+
1357
+ const blocks3 = await store.getBlocksForCheckpoint(CheckpointNumber(3));
1358
+ expect(blocks3).toBeDefined();
1359
+ expect(blocks3!.map(b => b.number)).toEqual([6, 7]);
1360
+ });
1361
+
1362
+ it('returns undefined for non-existent checkpoint', async () => {
1363
+ const checkpoint = makePublishedCheckpoint(
1364
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1365
+ 10,
1366
+ );
1367
+ await store.addCheckpoints([checkpoint]);
1368
+
1369
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(5));
1370
+ expect(blocks).toBeUndefined();
1371
+ });
1372
+
1373
+ it('returns undefined when no checkpoints exist', async () => {
1374
+ const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
1375
+ expect(blocks).toBeUndefined();
1376
+ });
1377
+ });
1378
+
1379
+ describe('getRangeOfCheckpoints', () => {
1380
+ it('returns empty array when no checkpoints exist', async () => {
1381
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1382
+ expect(checkpoints).toEqual([]);
1383
+ });
1384
+
1385
+ it('returns single checkpoint', async () => {
1386
+ const checkpoint = makePublishedCheckpoint(
1387
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1388
+ 10,
1389
+ );
1390
+ await store.addCheckpoints([checkpoint]);
1391
+
1392
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1393
+ expect(checkpoints.length).toBe(1);
1394
+ expect(checkpoints[0].checkpointNumber).toBe(1);
1395
+ expect(checkpoints[0].startBlock).toBe(1);
1396
+ expect(checkpoints[0].numBlocks).toBe(2);
1397
+ });
1398
+
1399
+ it('returns multiple checkpoints in order', async () => {
1400
+ // Create checkpoints with chained archive roots
1401
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1402
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1403
+
1404
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1405
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1406
+ numBlocks: 3,
1407
+ startBlockNumber: 3,
1408
+ previousArchive: previousArchive1,
1409
+ });
1410
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1411
+
1412
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1413
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1414
+ numBlocks: 1,
1415
+ startBlockNumber: 6,
1416
+ previousArchive: previousArchive2,
1417
+ });
1418
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1419
+
1420
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1421
+
1422
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1423
+ expect(checkpoints.length).toBe(3);
1424
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([1, 2, 3]);
1425
+ expect(checkpoints.map(c => c.startBlock)).toEqual([1, 3, 6]);
1426
+ expect(checkpoints.map(c => c.numBlocks)).toEqual([2, 3, 1]);
1427
+ });
1428
+
1429
+ it('respects the from parameter', async () => {
1430
+ // Create checkpoints with chained archive roots
1431
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1432
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1433
+
1434
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1435
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1436
+ numBlocks: 2,
1437
+ startBlockNumber: 3,
1438
+ previousArchive: previousArchive1,
1439
+ });
1440
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1441
+
1442
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1443
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1444
+ numBlocks: 2,
1445
+ startBlockNumber: 5,
1446
+ previousArchive: previousArchive2,
1447
+ });
1448
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1449
+
1450
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1451
+
1452
+ // Start from checkpoint 2
1453
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(2), 10);
1454
+ expect(checkpoints.length).toBe(2);
1455
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([2, 3]);
1456
+ });
1457
+
1458
+ it('respects the limit parameter', async () => {
1459
+ // Create checkpoints with chained archive roots
1460
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 1, startBlockNumber: 1 });
1461
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1462
+
1463
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1464
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1465
+ numBlocks: 1,
1466
+ startBlockNumber: 2,
1467
+ previousArchive: previousArchive1,
1468
+ });
1469
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1470
+
1471
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1472
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1473
+ numBlocks: 1,
1474
+ startBlockNumber: 3,
1475
+ previousArchive: previousArchive2,
1476
+ });
1477
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1478
+
1479
+ const previousArchive3 = checkpoint3Cp.blocks.at(-1)!.archive;
1480
+ const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
1481
+ numBlocks: 1,
1482
+ startBlockNumber: 4,
1483
+ previousArchive: previousArchive3,
1484
+ });
1485
+ const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
1486
+
1487
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3, checkpoint4]);
1488
+
1489
+ // Only get 2 checkpoints
1490
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 2);
1491
+ expect(checkpoints.length).toBe(2);
1492
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([1, 2]);
1493
+ });
1494
+
1495
+ it('returns correct checkpoint data including L1 info', async () => {
1496
+ const checkpoint = makePublishedCheckpoint(
1497
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 3, startBlockNumber: 1 }),
1498
+ 42,
1499
+ );
1500
+ await store.addCheckpoints([checkpoint]);
1501
+
1502
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 1);
1503
+ expect(checkpoints.length).toBe(1);
1504
+
1505
+ const data = checkpoints[0];
1506
+ expect(data.checkpointNumber).toBe(1);
1507
+ expect(data.startBlock).toBe(1);
1508
+ expect(data.numBlocks).toBe(3);
1509
+ expect(data.l1.blockNumber).toBe(42n);
1510
+ expect(data.header.equals(checkpoint.checkpoint.header)).toBe(true);
1511
+ expect(data.archive.equals(checkpoint.checkpoint.archive)).toBe(true);
1512
+ });
1513
+
1514
+ it('returns empty array when from is beyond available checkpoints', async () => {
1515
+ const checkpoint = makePublishedCheckpoint(
1516
+ await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 }),
1517
+ 10,
1518
+ );
1519
+ await store.addCheckpoints([checkpoint]);
1520
+
1521
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(5), 10);
1522
+ expect(checkpoints).toEqual([]);
1523
+ });
1524
+
1525
+ it('works correctly after unwinding checkpoints', async () => {
1526
+ // Create checkpoints with chained archive roots
1527
+ const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), { numBlocks: 2, startBlockNumber: 1 });
1528
+ const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
1529
+
1530
+ const previousArchive1 = checkpoint1Cp.blocks.at(-1)!.archive;
1531
+ const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
1532
+ numBlocks: 2,
1533
+ startBlockNumber: 3,
1534
+ previousArchive: previousArchive1,
1535
+ });
1536
+ const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
1537
+
1538
+ const previousArchive2 = checkpoint2Cp.blocks.at(-1)!.archive;
1539
+ const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
1540
+ numBlocks: 2,
1541
+ startBlockNumber: 5,
1542
+ previousArchive: previousArchive2,
1543
+ });
1544
+ const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
1545
+
1546
+ await store.addCheckpoints([checkpoint1, checkpoint2, checkpoint3]);
1547
+
1548
+ // Unwind checkpoint 3
1549
+ await store.unwindCheckpoints(CheckpointNumber(3), 1);
1550
+
1551
+ const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
1552
+ expect(checkpoints.length).toBe(2);
1553
+ expect(checkpoints.map(c => c.checkpointNumber)).toEqual([1, 2]);
1554
+ });
1555
+ });
1556
+
1557
+ describe('getCheckpointedBlock', () => {
206
1558
  beforeEach(async () => {
207
- await store.addBlocks(blocks);
1559
+ await store.addCheckpoints(publishedCheckpoints);
1560
+ });
1561
+
1562
+ it.each(blockNumberTests)('retrieves previously stored block %i', async (blockNumber, getExpectedBlock) => {
1563
+ const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
1564
+ const expectedBlock = getExpectedBlock();
1565
+ const expectedCheckpoint = publishedCheckpoints[blockNumber - 1];
1566
+
1567
+ expect(retrievedBlock).toBeDefined();
1568
+ expectCheckpointedBlockEquals(retrievedBlock!, expectedBlock, expectedCheckpoint);
1569
+ });
1570
+
1571
+ it('returns undefined if block is not found', async () => {
1572
+ await expect(store.getCheckpointedBlock(12)).resolves.toBeUndefined();
1573
+ });
1574
+
1575
+ it('returns undefined for block number 0', async () => {
1576
+ await expect(store.getCheckpointedBlock(0)).resolves.toBeUndefined();
1577
+ });
1578
+ });
1579
+
1580
+ describe('getCheckpointedBlockByHash', () => {
1581
+ beforeEach(async () => {
1582
+ await store.addCheckpoints(publishedCheckpoints);
208
1583
  });
209
1584
 
210
1585
  it('retrieves a block by its hash', async () => {
211
- const expectedBlock = blocks[5];
212
- const blockHash = await expectedBlock.block.hash();
213
- const retrievedBlock = await store.getPublishedBlockByHash(blockHash);
1586
+ const expectedCheckpoint = publishedCheckpoints[5];
1587
+ const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
1588
+ const blockHash = await expectedBlock.header.hash();
1589
+ const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
214
1590
 
215
1591
  expect(retrievedBlock).toBeDefined();
216
- expectBlocksEqual([retrievedBlock!], [expectedBlock]);
1592
+ expectCheckpointedBlockEquals(retrievedBlock!, expectedBlock, expectedCheckpoint);
217
1593
  });
218
1594
 
219
1595
  it('returns undefined for non-existent block hash', async () => {
220
1596
  const nonExistentHash = Fr.random();
221
- await expect(store.getPublishedBlockByHash(nonExistentHash)).resolves.toBeUndefined();
1597
+ await expect(store.getCheckpointedBlockByHash(nonExistentHash)).resolves.toBeUndefined();
222
1598
  });
223
1599
  });
224
1600
 
225
- describe('getPublishedBlockByArchive', () => {
1601
+ describe('getCheckpointedBlockByArchive', () => {
226
1602
  beforeEach(async () => {
227
- await store.addBlocks(blocks);
1603
+ await store.addCheckpoints(publishedCheckpoints);
228
1604
  });
229
1605
 
230
1606
  it('retrieves a block by its archive root', async () => {
231
- const expectedBlock = blocks[3];
232
- const archive = expectedBlock.block.archive.root;
233
- const retrievedBlock = await store.getPublishedBlockByArchive(archive);
1607
+ const expectedCheckpoint = publishedCheckpoints[3];
1608
+ const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
1609
+ const archive = expectedBlock.archive.root;
1610
+ const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
234
1611
 
235
1612
  expect(retrievedBlock).toBeDefined();
236
- expectBlocksEqual([retrievedBlock!], [expectedBlock]);
1613
+ expectCheckpointedBlockEquals(retrievedBlock!, expectedBlock, expectedCheckpoint);
237
1614
  });
238
1615
 
239
1616
  it('returns undefined for non-existent archive root', async () => {
240
1617
  const nonExistentArchive = Fr.random();
241
- await expect(store.getPublishedBlockByArchive(nonExistentArchive)).resolves.toBeUndefined();
1618
+ await expect(store.getCheckpointedBlockByArchive(nonExistentArchive)).resolves.toBeUndefined();
242
1619
  });
243
1620
  });
244
1621
 
245
1622
  describe('getBlockHeaderByHash', () => {
246
1623
  beforeEach(async () => {
247
- await store.addBlocks(blocks);
1624
+ await store.addCheckpoints(publishedCheckpoints);
248
1625
  });
249
1626
 
250
1627
  it('retrieves a block header by its hash', async () => {
251
- const expectedBlock = blocks[7];
252
- const blockHash = await expectedBlock.block.hash();
1628
+ const expectedBlock = publishedCheckpoints[7].checkpoint.blocks[0];
1629
+ const blockHash = await expectedBlock.header.hash();
253
1630
  const retrievedHeader = await store.getBlockHeaderByHash(blockHash);
254
1631
 
255
1632
  expect(retrievedHeader).toBeDefined();
256
- expect(retrievedHeader!.equals(expectedBlock.block.getBlockHeader())).toBe(true);
1633
+ expect(retrievedHeader!.equals(expectedBlock.header)).toBe(true);
257
1634
  });
258
1635
 
259
1636
  it('returns undefined for non-existent block hash', async () => {
@@ -264,16 +1641,16 @@ export function describeArchiverDataStore(
264
1641
 
265
1642
  describe('getBlockHeaderByArchive', () => {
266
1643
  beforeEach(async () => {
267
- await store.addBlocks(blocks);
1644
+ await store.addCheckpoints(publishedCheckpoints);
268
1645
  });
269
1646
 
270
1647
  it('retrieves a block header by its archive root', async () => {
271
- const expectedBlock = blocks[2];
272
- const archive = expectedBlock.block.archive.root;
1648
+ const expectedBlock = publishedCheckpoints[2].checkpoint.blocks[0];
1649
+ const archive = expectedBlock.archive.root;
273
1650
  const retrievedHeader = await store.getBlockHeaderByArchive(archive);
274
1651
 
275
1652
  expect(retrievedHeader).toBeDefined();
276
- expect(retrievedHeader!.equals(expectedBlock.block.getBlockHeader())).toBe(true);
1653
+ expect(retrievedHeader!.equals(expectedBlock.header)).toBe(true);
277
1654
  });
278
1655
 
279
1656
  it('returns undefined for non-existent archive root', async () => {
@@ -282,14 +1659,16 @@ export function describeArchiverDataStore(
282
1659
  });
283
1660
  });
284
1661
 
285
- describe('getSyncedL2BlockNumber', () => {
286
- it('returns the block number before INITIAL_L2_BLOCK_NUM if no blocks have been added', async () => {
287
- await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(INITIAL_L2_BLOCK_NUM - 1);
1662
+ describe('getSynchedCheckpointNumber', () => {
1663
+ it('returns the checkpoint number before INITIAL_CHECKPOINT_NUMBER if no checkpoints have been added', async () => {
1664
+ await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(INITIAL_CHECKPOINT_NUMBER - 1);
288
1665
  });
289
1666
 
290
- it("returns the most recently added block's number", async () => {
291
- await store.addBlocks(blocks);
292
- await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.at(-1)!.block.number);
1667
+ it('returns the most recently added checkpoint number', async () => {
1668
+ await store.addCheckpoints(publishedCheckpoints);
1669
+ await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(
1670
+ publishedCheckpoints.at(-1)!.checkpoint.number,
1671
+ );
293
1672
  });
294
1673
  });
295
1674
 
@@ -302,7 +1681,7 @@ export function describeArchiverDataStore(
302
1681
  });
303
1682
 
304
1683
  it('returns the L1 block number in which the most recent L2 block was published', async () => {
305
- await store.addBlocks(blocks);
1684
+ await store.addCheckpoints(publishedCheckpoints);
306
1685
  await expect(store.getSynchPoint()).resolves.toEqual({
307
1686
  blocksSynchedTo: 19n,
308
1687
  messagesSynchedTo: undefined,
@@ -335,61 +1714,47 @@ export function describeArchiverDataStore(
335
1714
 
336
1715
  describe('addLogs', () => {
337
1716
  it('adds private & public logs', async () => {
338
- const block = blocks[0].block;
339
- await expect(store.addLogs([block])).resolves.toEqual(true);
1717
+ const checkpoint = publishedCheckpoints[0];
1718
+ await store.addCheckpoints([checkpoint]);
1719
+ await expect(store.addLogs(checkpoint.checkpoint.blocks)).resolves.toEqual(true);
340
1720
  });
341
1721
  });
342
1722
 
343
- describe('deleteLogs', () => {
344
- it('deletes private & public logs', async () => {
345
- const block = blocks[0].block;
346
- await store.addBlocks([blocks[0]]);
347
- await expect(store.addLogs([block])).resolves.toEqual(true);
348
-
349
- expect((await store.getPrivateLogs(1, 1)).length).toEqual(
350
- block.body.txEffects.map(txEffect => txEffect.privateLogs).flat().length,
351
- );
352
- expect((await store.getPublicLogs({ fromBlock: 1 })).logs.length).toEqual(
353
- block.body.txEffects.map(txEffect => txEffect.publicLogs).flat().length,
354
- );
355
-
356
- // This one is a pain for memory as we would never want to just delete memory in the middle.
357
- await store.deleteLogs([block]);
1723
+ it('deleteLogs', async () => {
1724
+ const block = publishedCheckpoints[0].checkpoint.blocks[0];
1725
+ await store.addBlocks([block]);
1726
+ await expect(store.addLogs([block])).resolves.toEqual(true);
358
1727
 
359
- expect((await store.getPrivateLogs(1, 1)).length).toEqual(0);
360
- expect((await store.getPublicLogs({ fromBlock: 1 })).logs.length).toEqual(0);
361
- });
362
- });
1728
+ expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual(
1729
+ block.body.txEffects.map(txEffect => txEffect.publicLogs).flat().length,
1730
+ );
363
1731
 
364
- describe('getPrivateLogs', () => {
365
- it('gets added private logs', async () => {
366
- const block = blocks[0].block;
367
- await store.addBlocks([blocks[0]]);
368
- await store.addLogs([block]);
1732
+ // This one is a pain for memory as we would never want to just delete memory in the middle.
1733
+ await store.deleteLogs([block]);
369
1734
 
370
- const privateLogs = await store.getPrivateLogs(1, 1);
371
- expect(privateLogs).toEqual(block.body.txEffects.map(txEffect => txEffect.privateLogs).flat());
372
- });
1735
+ expect((await store.getPublicLogs({ fromBlock: BlockNumber(1) })).logs.length).toEqual(0);
373
1736
  });
374
1737
 
375
1738
  describe('getTxEffect', () => {
1739
+ const getBlock = (i: number) => publishedCheckpoints[i].checkpoint.blocks[0];
1740
+
376
1741
  beforeEach(async () => {
377
- await store.addLogs(blocks.map(b => b.block));
378
- await store.addBlocks(blocks);
1742
+ await store.addLogs(publishedCheckpoints.flatMap(x => x.checkpoint.blocks));
1743
+ await store.addCheckpoints(publishedCheckpoints);
379
1744
  });
380
1745
 
381
1746
  it.each([
382
- () => ({ data: blocks[0].block.body.txEffects[0], block: blocks[0].block, txIndexInBlock: 0 }),
383
- () => ({ data: blocks[9].block.body.txEffects[3], block: blocks[9].block, txIndexInBlock: 3 }),
384
- () => ({ data: blocks[3].block.body.txEffects[1], block: blocks[3].block, txIndexInBlock: 1 }),
385
- () => ({ data: blocks[5].block.body.txEffects[2], block: blocks[5].block, txIndexInBlock: 2 }),
386
- () => ({ data: blocks[1].block.body.txEffects[0], block: blocks[1].block, txIndexInBlock: 0 }),
1747
+ () => ({ data: getBlock(0).body.txEffects[0], block: getBlock(0), txIndexInBlock: 0 }),
1748
+ () => ({ data: getBlock(9).body.txEffects[3], block: getBlock(9), txIndexInBlock: 3 }),
1749
+ () => ({ data: getBlock(3).body.txEffects[1], block: getBlock(3), txIndexInBlock: 1 }),
1750
+ () => ({ data: getBlock(5).body.txEffects[2], block: getBlock(5), txIndexInBlock: 2 }),
1751
+ () => ({ data: getBlock(1).body.txEffects[0], block: getBlock(1), txIndexInBlock: 0 }),
387
1752
  ])('retrieves a previously stored transaction', async getExpectedTx => {
388
1753
  const { data, block, txIndexInBlock } = getExpectedTx();
389
1754
  const expectedTx: IndexedTxEffect = {
390
1755
  data,
391
1756
  l2BlockNumber: block.number,
392
- l2BlockHash: L2BlockHash.fromField(await block.hash()),
1757
+ l2BlockHash: L2BlockHash.fromField(await block.header.hash()),
393
1758
  txIndexInBlock,
394
1759
  };
395
1760
  const actualTx = await store.getTxEffect(data.txHash);
@@ -401,16 +1766,16 @@ export function describeArchiverDataStore(
401
1766
  });
402
1767
 
403
1768
  it.each([
404
- () => wrapInBlock(blocks[0].block.body.txEffects[0], blocks[0].block),
405
- () => wrapInBlock(blocks[9].block.body.txEffects[3], blocks[9].block),
406
- () => wrapInBlock(blocks[3].block.body.txEffects[1], blocks[3].block),
407
- () => wrapInBlock(blocks[5].block.body.txEffects[2], blocks[5].block),
408
- () => wrapInBlock(blocks[1].block.body.txEffects[0], blocks[1].block),
409
- ])('tries to retrieves a previously stored transaction after deleted', async getExpectedTx => {
410
- await store.unwindBlocks(blocks.length, blocks.length);
411
-
412
- const expectedTx = await getExpectedTx();
413
- const actualTx = await store.getTxEffect(expectedTx.data.txHash);
1769
+ () => getBlock(0).body.txEffects[0],
1770
+ () => getBlock(9).body.txEffects[3],
1771
+ () => getBlock(3).body.txEffects[1],
1772
+ () => getBlock(5).body.txEffects[2],
1773
+ () => getBlock(1).body.txEffects[0],
1774
+ ])('tries to retrieves a previously stored transaction after deleted', async getTxEffect => {
1775
+ await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
1776
+
1777
+ const txEffect = getTxEffect();
1778
+ const actualTx = await store.getTxEffect(txEffect.txHash);
414
1779
  expect(actualTx).toEqual(undefined);
415
1780
  });
416
1781
 
@@ -419,22 +1784,22 @@ export function describeArchiverDataStore(
419
1784
  });
420
1785
 
421
1786
  it('does not fail if the block is unwound while requesting a tx', async () => {
422
- const expectedTx = await wrapInBlock(blocks[1].block.body.txEffects[0], blocks[1].block);
1787
+ const txEffect = getBlock(1).body.txEffects[0];
423
1788
  let done = false;
424
1789
  void (async () => {
425
1790
  while (!done) {
426
- void store.getTxEffect(expectedTx.data.txHash);
1791
+ void store.getTxEffect(txEffect.txHash);
427
1792
  await sleep(1);
428
1793
  }
429
1794
  })();
430
- await store.unwindBlocks(blocks.length, blocks.length);
1795
+ await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
431
1796
  done = true;
432
- expect(await store.getTxEffect(expectedTx.data.txHash)).toEqual(undefined);
1797
+ expect(await store.getTxEffect(txEffect.txHash)).toEqual(undefined);
433
1798
  });
434
1799
  });
435
1800
 
436
1801
  describe('L1 to L2 Messages', () => {
437
- const initialL2BlockNumber = 13;
1802
+ const initialCheckpointNumber = CheckpointNumber(13);
438
1803
 
439
1804
  const checkMessages = async (msgs: InboxMessage[]) => {
440
1805
  expect(await store.getLastL1ToL2Message()).toEqual(msgs.at(-1));
@@ -442,43 +1807,50 @@ export function describeArchiverDataStore(
442
1807
  expect(await store.getTotalL1ToL2MessageCount()).toEqual(BigInt(msgs.length));
443
1808
  };
444
1809
 
445
- const makeInboxMessagesWithFullBlocks = (blockCount: number, opts: { initialL2BlockNumber?: number } = {}) =>
1810
+ const makeInboxMessagesWithFullBlocks = (
1811
+ blockCount: number,
1812
+ opts: { initialCheckpointNumber?: CheckpointNumber } = {},
1813
+ ) =>
446
1814
  makeInboxMessages(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * blockCount, {
447
1815
  overrideFn: (msg, i) => {
448
- const l2BlockNumber =
449
- (opts.initialL2BlockNumber ?? initialL2BlockNumber) + Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1816
+ const checkpointNumber = CheckpointNumber(
1817
+ (opts.initialCheckpointNumber ?? initialCheckpointNumber) +
1818
+ Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
1819
+ );
450
1820
  const index =
451
- InboxLeaf.smallestIndexFromL2Block(l2BlockNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
452
- return { ...msg, l2BlockNumber, index };
1821
+ InboxLeaf.smallestIndexForCheckpoint(checkpointNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1822
+ return { ...msg, checkpointNumber, index };
453
1823
  },
454
1824
  });
455
1825
 
456
1826
  it('stores first message ever', async () => {
457
- const msg = makeInboxMessage(Buffer16.ZERO, { index: 0n, l2BlockNumber: 1 });
1827
+ const msg = makeInboxMessage(Buffer16.ZERO, { index: 0n, checkpointNumber: CheckpointNumber(1) });
458
1828
  await store.addL1ToL2Messages([msg]);
459
1829
 
460
1830
  await checkMessages([msg]);
461
- expect(await store.getL1ToL2Messages(1)).toEqual([msg.leaf]);
1831
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([msg.leaf]);
462
1832
  });
463
1833
 
464
1834
  it('stores single message', async () => {
465
- const msg = makeInboxMessage(Buffer16.ZERO, { l2BlockNumber: 2 });
1835
+ const msg = makeInboxMessage(Buffer16.ZERO, { checkpointNumber: CheckpointNumber(2) });
466
1836
  await store.addL1ToL2Messages([msg]);
467
1837
 
468
1838
  await checkMessages([msg]);
469
- expect(await store.getL1ToL2Messages(2)).toEqual([msg.leaf]);
1839
+ expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toEqual([msg.leaf]);
470
1840
  });
471
1841
 
472
1842
  it('stores and returns messages across different blocks', async () => {
473
- const msgs = makeInboxMessages(5, { initialL2BlockNumber });
1843
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber });
474
1844
  await store.addL1ToL2Messages(msgs);
475
1845
 
476
1846
  await checkMessages(msgs);
477
- expect(await store.getL1ToL2Messages(initialL2BlockNumber + 2)).toEqual([msgs[2]].map(m => m.leaf));
1847
+ expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual(
1848
+ [msgs[2]].map(m => m.leaf),
1849
+ );
478
1850
  });
479
1851
 
480
1852
  it('stores the same messages again', async () => {
481
- const msgs = makeInboxMessages(5, { initialL2BlockNumber });
1853
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber });
482
1854
  await store.addL1ToL2Messages(msgs);
483
1855
  await store.addL1ToL2Messages(msgs.slice(2));
484
1856
 
@@ -486,26 +1858,29 @@ export function describeArchiverDataStore(
486
1858
  });
487
1859
 
488
1860
  it('stores and returns messages across different blocks with gaps', async () => {
489
- const msgs1 = makeInboxMessages(3, { initialL2BlockNumber: 1 });
490
- const msgs2 = makeInboxMessages(3, { initialL2BlockNumber: 20, initialHash: msgs1.at(-1)!.rollingHash });
1861
+ const msgs1 = makeInboxMessages(3, { initialCheckpointNumber: CheckpointNumber(1) });
1862
+ const msgs2 = makeInboxMessages(3, {
1863
+ initialCheckpointNumber: CheckpointNumber(20),
1864
+ initialHash: msgs1.at(-1)!.rollingHash,
1865
+ });
491
1866
 
492
1867
  await store.addL1ToL2Messages(msgs1);
493
1868
  await store.addL1ToL2Messages(msgs2);
494
1869
 
495
1870
  await checkMessages([...msgs1, ...msgs2]);
496
1871
 
497
- expect(await store.getL1ToL2Messages(1)).toEqual([msgs1[0].leaf]);
498
- expect(await store.getL1ToL2Messages(4)).toEqual([]);
499
- expect(await store.getL1ToL2Messages(20)).toEqual([msgs2[0].leaf]);
500
- expect(await store.getL1ToL2Messages(24)).toEqual([]);
1872
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([msgs1[0].leaf]);
1873
+ expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toEqual([]);
1874
+ expect(await store.getL1ToL2Messages(CheckpointNumber(20))).toEqual([msgs2[0].leaf]);
1875
+ expect(await store.getL1ToL2Messages(CheckpointNumber(24))).toEqual([]);
501
1876
  });
502
1877
 
503
1878
  it('stores and returns messages with block numbers larger than a byte', async () => {
504
- const msgs = makeInboxMessages(5, { initialL2BlockNumber: 1000 });
1879
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber: CheckpointNumber(1000) });
505
1880
  await store.addL1ToL2Messages(msgs);
506
1881
 
507
1882
  await checkMessages(msgs);
508
- expect(await store.getL1ToL2Messages(1002)).toEqual([msgs[2]].map(m => m.leaf));
1883
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1002))).toEqual([msgs[2]].map(m => m.leaf));
509
1884
  });
510
1885
 
511
1886
  it('stores and returns multiple messages per block', async () => {
@@ -513,7 +1888,7 @@ export function describeArchiverDataStore(
513
1888
  await store.addL1ToL2Messages(msgs);
514
1889
 
515
1890
  await checkMessages(msgs);
516
- const blockMessages = await store.getL1ToL2Messages(initialL2BlockNumber + 1);
1891
+ const blockMessages = await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 1));
517
1892
  expect(blockMessages).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
518
1893
  expect(blockMessages).toEqual(
519
1894
  msgs.slice(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2).map(m => m.leaf),
@@ -521,17 +1896,21 @@ export function describeArchiverDataStore(
521
1896
  });
522
1897
 
523
1898
  it('stores messages in multiple operations', async () => {
524
- const msgs = makeInboxMessages(20, { initialL2BlockNumber });
1899
+ const msgs = makeInboxMessages(20, { initialCheckpointNumber });
525
1900
  await store.addL1ToL2Messages(msgs.slice(0, 10));
526
1901
  await store.addL1ToL2Messages(msgs.slice(10, 20));
527
1902
 
528
- expect(await store.getL1ToL2Messages(initialL2BlockNumber + 2)).toEqual([msgs[2]].map(m => m.leaf));
529
- expect(await store.getL1ToL2Messages(initialL2BlockNumber + 12)).toEqual([msgs[12]].map(m => m.leaf));
1903
+ expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual(
1904
+ [msgs[2]].map(m => m.leaf),
1905
+ );
1906
+ expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 12))).toEqual(
1907
+ [msgs[12]].map(m => m.leaf),
1908
+ );
530
1909
  await checkMessages(msgs);
531
1910
  });
532
1911
 
533
1912
  it('iterates over messages from start index', async () => {
534
- const msgs = makeInboxMessages(10, { initialL2BlockNumber });
1913
+ const msgs = makeInboxMessages(10, { initialCheckpointNumber });
535
1914
  await store.addL1ToL2Messages(msgs);
536
1915
 
537
1916
  const iterated = await toArray(store.iterateL1ToL2Messages({ start: msgs[3].index }));
@@ -539,8 +1918,9 @@ export function describeArchiverDataStore(
539
1918
  });
540
1919
 
541
1920
  it('iterates over messages in reverse', async () => {
542
- const msgs = makeInboxMessages(10, { initialL2BlockNumber });
1921
+ const msgs = makeInboxMessages(10, { initialCheckpointNumber });
543
1922
  await store.addL1ToL2Messages(msgs);
1923
+ initialCheckpointNumber;
544
1924
 
545
1925
  const iterated = await toArray(store.iterateL1ToL2Messages({ reverse: true, end: msgs[3].index }));
546
1926
  expect(iterated).toEqual(msgs.slice(0, 4).reverse());
@@ -552,8 +1932,8 @@ export function describeArchiverDataStore(
552
1932
  });
553
1933
 
554
1934
  it('throws if block number for the first message is out of order', async () => {
555
- const msgs = makeInboxMessages(4, { initialL2BlockNumber });
556
- msgs[2].l2BlockNumber = initialL2BlockNumber - 1;
1935
+ const msgs = makeInboxMessages(4, { initialCheckpointNumber });
1936
+ msgs[2].checkpointNumber = CheckpointNumber(initialCheckpointNumber - 1);
557
1937
  await store.addL1ToL2Messages(msgs.slice(0, 2));
558
1938
  await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
559
1939
  });
@@ -567,28 +1947,28 @@ export function describeArchiverDataStore(
567
1947
  it('throws if rolling hash for first message is not correct', async () => {
568
1948
  const msgs = makeInboxMessages(4);
569
1949
  msgs[2].rollingHash = Buffer16.random();
570
- await store.addL1ToL2Messages(msgs.slice(0, 2));
1950
+ await store.addL1ToL2Messages(msgs.slice(0, CheckpointNumber(2)));
571
1951
  await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
572
1952
  });
573
1953
 
574
1954
  it('throws if index is not in the correct range', async () => {
575
- const msgs = makeInboxMessages(5, { initialL2BlockNumber });
1955
+ const msgs = makeInboxMessages(5, { initialCheckpointNumber });
576
1956
  msgs.at(-1)!.index += 100n;
577
1957
  await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
578
1958
  });
579
1959
 
580
1960
  it('throws if first index in block has gaps', async () => {
581
- const msgs = makeInboxMessages(4, { initialL2BlockNumber });
1961
+ const msgs = makeInboxMessages(4, { initialCheckpointNumber });
582
1962
  msgs[2].index++;
583
1963
  await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
584
1964
  });
585
1965
 
586
1966
  it('throws if index does not follow previous one', async () => {
587
1967
  const msgs = makeInboxMessages(2, {
588
- initialL2BlockNumber,
1968
+ initialCheckpointNumber,
589
1969
  overrideFn: (msg, i) => ({
590
1970
  ...msg,
591
- l2BlockNumber: 2,
1971
+ checkpointNumber: CheckpointNumber(2),
592
1972
  index: BigInt(i + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2),
593
1973
  }),
594
1974
  });
@@ -597,28 +1977,28 @@ export function describeArchiverDataStore(
597
1977
  });
598
1978
 
599
1979
  it('removes messages up to the given block number', async () => {
600
- const msgs = makeInboxMessagesWithFullBlocks(4, { initialL2BlockNumber: 1 });
1980
+ const msgs = makeInboxMessagesWithFullBlocks(4, { initialCheckpointNumber: CheckpointNumber(1) });
601
1981
 
602
1982
  await store.addL1ToL2Messages(msgs);
603
1983
  await checkMessages(msgs);
604
1984
 
605
- expect(await store.getL1ToL2Messages(1)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
606
- expect(await store.getL1ToL2Messages(2)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
607
- expect(await store.getL1ToL2Messages(3)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
608
- expect(await store.getL1ToL2Messages(4)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1985
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1986
+ expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1987
+ expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1988
+ expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
609
1989
 
610
- await store.rollbackL1ToL2MessagesToL2Block(2);
1990
+ await store.rollbackL1ToL2MessagesToCheckpoint(CheckpointNumber(2));
611
1991
 
612
- expect(await store.getL1ToL2Messages(1)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
613
- expect(await store.getL1ToL2Messages(2)).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
614
- expect(await store.getL1ToL2Messages(3)).toHaveLength(0);
615
- expect(await store.getL1ToL2Messages(4)).toHaveLength(0);
1992
+ expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1993
+ expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
1994
+ expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(0);
1995
+ expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(0);
616
1996
 
617
1997
  await checkMessages(msgs.slice(0, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2));
618
1998
  });
619
1999
 
620
2000
  it('removes messages starting with the given index', async () => {
621
- const msgs = makeInboxMessagesWithFullBlocks(4, { initialL2BlockNumber: 1 });
2001
+ const msgs = makeInboxMessagesWithFullBlocks(4, { initialCheckpointNumber: CheckpointNumber(1) });
622
2002
  await store.addL1ToL2Messages(msgs);
623
2003
 
624
2004
  await store.removeL1ToL2Messages(msgs[13].index);
@@ -638,7 +2018,7 @@ export function describeArchiverDataStore(
638
2018
  originalContractClassId: classId,
639
2019
  });
640
2020
  contractInstance = { ...randomInstance, address: await AztecAddress.random() };
641
- await store.addContractInstances([contractInstance], blockNum);
2021
+ await store.addContractInstances([contractInstance], BlockNumber(blockNum));
642
2022
  });
643
2023
 
644
2024
  it('returns previously stored contract instances', async () => {
@@ -652,7 +2032,7 @@ export function describeArchiverDataStore(
652
2032
  });
653
2033
 
654
2034
  it('returns undefined if previously stored contract instances was deleted', async () => {
655
- await store.deleteContractInstances([contractInstance], blockNum);
2035
+ await store.deleteContractInstances([contractInstance], BlockNumber(blockNum));
656
2036
  await expect(store.getContractInstance(contractInstance.address, timestamp)).resolves.toBeUndefined();
657
2037
  });
658
2038
  });
@@ -671,7 +2051,7 @@ export function describeArchiverDataStore(
671
2051
  originalContractClassId: classId,
672
2052
  });
673
2053
  contractInstance = { ...randomInstance, address: await AztecAddress.random() };
674
- await store.addContractInstances([contractInstance], 1);
2054
+ await store.addContractInstances([contractInstance], BlockNumber(1));
675
2055
  await store.addContractInstanceUpdates(
676
2056
  [
677
2057
  {
@@ -713,7 +2093,7 @@ export function describeArchiverDataStore(
713
2093
  ...randomInstance,
714
2094
  address: await AztecAddress.random(),
715
2095
  };
716
- await store.addContractInstances([otherContractInstance], 1);
2096
+ await store.addContractInstances([otherContractInstance], BlockNumber(1));
717
2097
 
718
2098
  const fetchedInstance = await store.getContractInstance(otherContractInstance.address, timestampOfChange + 1n);
719
2099
  expect(fetchedInstance?.originalContractClassId).toEqual(otherClassId);
@@ -731,7 +2111,7 @@ export function describeArchiverDataStore(
731
2111
  ...randomInstance,
732
2112
  address: await AztecAddress.random(),
733
2113
  };
734
- await store.addContractInstances([otherContractInstance], 1);
2114
+ await store.addContractInstances([otherContractInstance], BlockNumber(1));
735
2115
  await store.addContractInstanceUpdates(
736
2116
  [
737
2117
  {
@@ -759,7 +2139,7 @@ export function describeArchiverDataStore(
759
2139
  await store.addContractClasses(
760
2140
  [contractClass],
761
2141
  [await computePublicBytecodeCommitment(contractClass.packedBytecode)],
762
- blockNum,
2142
+ BlockNumber(blockNum),
763
2143
  );
764
2144
  });
765
2145
 
@@ -768,7 +2148,7 @@ export function describeArchiverDataStore(
768
2148
  });
769
2149
 
770
2150
  it('returns undefined if the initial deployed contract class was deleted', async () => {
771
- await store.deleteContractClasses([contractClass], blockNum);
2151
+ await store.deleteContractClasses([contractClass], BlockNumber(blockNum));
772
2152
  await expect(store.getContractClass(contractClass.id)).resolves.toBeUndefined();
773
2153
  });
774
2154
 
@@ -776,9 +2156,9 @@ export function describeArchiverDataStore(
776
2156
  await store.addContractClasses(
777
2157
  [contractClass],
778
2158
  [await computePublicBytecodeCommitment(contractClass.packedBytecode)],
779
- blockNum + 1,
2159
+ BlockNumber(blockNum + 1),
780
2160
  );
781
- await store.deleteContractClasses([contractClass], blockNum + 1);
2161
+ await store.deleteContractClasses([contractClass], BlockNumber(blockNum + 1));
782
2162
  await expect(store.getContractClass(contractClass.id)).resolves.toMatchObject(contractClass);
783
2163
  });
784
2164
 
@@ -817,154 +2197,269 @@ export function describeArchiverDataStore(
817
2197
  });
818
2198
  });
819
2199
 
820
- describe('getLogsByTags', () => {
821
- const numBlocks = 3;
2200
+ describe('getPrivateLogsByTags', () => {
2201
+ const numBlocksForLogs = 3;
822
2202
  const numTxsPerBlock = 4;
823
2203
  const numPrivateLogsPerTx = 3;
824
- const numPublicLogsPerTx = 2;
825
2204
 
826
- let blocks: PublishedL2Block[];
2205
+ let logsCheckpoints: PublishedCheckpoint[];
827
2206
 
828
- const makeTag = (blockNumber: number, txIndex: number, logIndex: number, isPublic = false) =>
829
- blockNumber === 1 && txIndex === 0 && logIndex === 0
830
- ? Fr.ZERO // Shared tag
831
- : new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * (isPublic ? 123 : 1));
2207
+ const makePrivateLogTag = (blockNumber: number, txIndex: number, logIndex: number): SiloedTag =>
2208
+ new SiloedTag(
2209
+ blockNumber === 1 && txIndex === 0 && logIndex === 0
2210
+ ? Fr.ZERO // Shared tag
2211
+ : new Fr(blockNumber * 100 + txIndex * 10 + logIndex),
2212
+ );
832
2213
 
833
- const makePrivateLog = (tag: Fr) =>
2214
+ const makePrivateLog = (tag: SiloedTag) =>
834
2215
  PrivateLog.from({
835
- fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, i => (!i ? tag : new Fr(tag.toNumber() + i))),
2216
+ fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, i =>
2217
+ !i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)),
2218
+ ),
836
2219
  emittedLength: PRIVATE_LOG_SIZE_IN_FIELDS,
837
2220
  });
838
2221
 
839
- const makePublicLog = (tag: Fr) =>
840
- PublicLog.from({
841
- contractAddress: AztecAddress.fromNumber(1),
842
- // Arbitrary length
843
- fields: new Array(10).fill(null).map((_, i) => (!i ? tag : new Fr(tag.toNumber() + i))),
844
- });
845
-
846
2222
  const mockPrivateLogs = (blockNumber: number, txIndex: number) => {
847
2223
  return times(numPrivateLogsPerTx, (logIndex: number) => {
848
- const tag = makeTag(blockNumber, txIndex, logIndex);
2224
+ const tag = makePrivateLogTag(blockNumber, txIndex, logIndex);
849
2225
  return makePrivateLog(tag);
850
2226
  });
851
2227
  };
852
2228
 
853
- const mockPublicLogs = (blockNumber: number, txIndex: number) => {
854
- return times(numPublicLogsPerTx, (logIndex: number) => {
855
- const tag = makeTag(blockNumber, txIndex, logIndex, /* isPublic */ true);
856
- return makePublicLog(tag);
2229
+ const mockCheckpointWithLogs = async (
2230
+ blockNumber: number,
2231
+ previousArchive?: AppendOnlyTreeSnapshot,
2232
+ ): Promise<PublishedCheckpoint> => {
2233
+ const block = await L2BlockNew.random(BlockNumber(blockNumber), {
2234
+ checkpointNumber: CheckpointNumber(blockNumber),
2235
+ indexWithinCheckpoint: 0,
2236
+ state: makeStateForBlock(blockNumber, numTxsPerBlock),
2237
+ ...(previousArchive ? { lastArchive: previousArchive } : {}),
857
2238
  });
858
- };
859
-
860
- const mockBlockWithLogs = async (blockNumber: number): Promise<PublishedL2Block> => {
861
- const block = await L2Block.random(blockNumber);
862
- block.header.globalVariables.blockNumber = blockNumber;
2239
+ block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
863
2240
 
864
2241
  block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => {
865
2242
  const txEffect = await TxEffect.random();
866
2243
  txEffect.privateLogs = mockPrivateLogs(blockNumber, txIndex);
867
- txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex);
2244
+ txEffect.publicLogs = []; // No public logs needed for private log tests
868
2245
  return txEffect;
869
2246
  });
870
2247
 
871
- return PublishedL2Block.fromFields({
872
- block: block,
873
- attestations: times(3, CommitteeAttestation.random),
874
- l1: {
875
- blockNumber: BigInt(blockNumber),
876
- blockHash: makeBlockHash(blockNumber),
877
- timestamp: BigInt(blockNumber),
878
- },
879
- });
2248
+ const checkpoint = new Checkpoint(
2249
+ AppendOnlyTreeSnapshot.random(),
2250
+ CheckpointHeader.random(),
2251
+ [block],
2252
+ CheckpointNumber(blockNumber),
2253
+ );
2254
+ return makePublishedCheckpoint(checkpoint, blockNumber);
880
2255
  };
881
2256
 
882
2257
  beforeEach(async () => {
883
- blocks = await timesParallel(numBlocks, (index: number) => mockBlockWithLogs(index + 1));
2258
+ // Create checkpoints sequentially to chain archive roots
2259
+ logsCheckpoints = [];
2260
+ for (let i = 0; i < numBlocksForLogs; i++) {
2261
+ const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
2262
+ logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
2263
+ }
884
2264
 
885
- await store.addBlocks(blocks);
886
- await store.addLogs(blocks.map(b => b.block));
2265
+ await store.addCheckpoints(logsCheckpoints);
2266
+ await store.addLogs(logsCheckpoints.flatMap(p => p.checkpoint.blocks));
887
2267
  });
888
2268
 
889
2269
  it('is possible to batch request private logs via tags', async () => {
890
- const tags = [makeTag(2, 1, 2), makeTag(1, 2, 0)];
2270
+ const tags = [makePrivateLogTag(2, 1, 2), makePrivateLogTag(1, 2, 0)];
891
2271
 
892
- const logsByTags = await store.getLogsByTags(tags);
2272
+ const logsByTags = await store.getPrivateLogsByTags(tags);
893
2273
 
894
2274
  expect(logsByTags).toEqual([
895
2275
  [
896
2276
  expect.objectContaining({
897
2277
  blockNumber: 2,
898
- log: makePrivateLog(tags[0]),
899
- isFromPublic: false,
2278
+ logData: makePrivateLog(tags[0]).getEmittedFields(),
900
2279
  }),
901
2280
  ],
902
2281
  [
903
2282
  expect.objectContaining({
904
2283
  blockNumber: 1,
905
- log: makePrivateLog(tags[1]),
906
- isFromPublic: false,
2284
+ logData: makePrivateLog(tags[1]).getEmittedFields(),
907
2285
  }),
908
2286
  ],
909
2287
  ]);
910
2288
  });
911
2289
 
912
- it('is possible to batch request all logs (private and public) via tags', async () => {
913
- // Tag(1, 0, 0) is shared with the first private log and the first public log.
914
- const tags = [makeTag(1, 0, 0)];
2290
+ it('is possible to batch request logs that have the same tag but different content', async () => {
2291
+ const tags = [makePrivateLogTag(1, 2, 1)];
915
2292
 
916
- const logsByTags = await store.getLogsByTags(tags);
2293
+ // Create a checkpoint containing logs that have the same tag as the checkpoints before.
2294
+ // Chain from the last checkpoint's archive
2295
+ const newBlockNumber = numBlocksForLogs + 1;
2296
+ const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
2297
+ const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
2298
+ const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1];
2299
+ newLog.fields[0] = tags[0].value;
2300
+ newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1] = newLog;
2301
+ await store.addCheckpoints([newCheckpoint]);
2302
+ await store.addLogs([newCheckpoint.checkpoint.blocks[0]]);
2303
+
2304
+ const logsByTags = await store.getPrivateLogsByTags(tags);
917
2305
 
918
2306
  expect(logsByTags).toEqual([
919
2307
  [
920
2308
  expect.objectContaining({
921
2309
  blockNumber: 1,
922
- log: makePrivateLog(tags[0]),
923
- isFromPublic: false,
2310
+ logData: makePrivateLog(tags[0]).getEmittedFields(),
2311
+ }),
2312
+ expect.objectContaining({
2313
+ blockNumber: newBlockNumber,
2314
+ logData: newLog.getEmittedFields(),
924
2315
  }),
2316
+ ],
2317
+ ]);
2318
+ });
2319
+
2320
+ it('is possible to request logs for non-existing tags and determine their position', async () => {
2321
+ const tags = [makePrivateLogTag(99, 88, 77), makePrivateLogTag(1, 1, 1)];
2322
+
2323
+ const logsByTags = await store.getPrivateLogsByTags(tags);
2324
+
2325
+ expect(logsByTags).toEqual([
2326
+ [
2327
+ // No logs for the first tag.
2328
+ ],
2329
+ [
925
2330
  expect.objectContaining({
926
2331
  blockNumber: 1,
927
- log: makePublicLog(tags[0]),
928
- isFromPublic: true,
2332
+ logData: makePrivateLog(tags[1]).getEmittedFields(),
2333
+ }),
2334
+ ],
2335
+ ]);
2336
+ });
2337
+ });
2338
+
2339
+ describe('getPublicLogsByTagsFromContract', () => {
2340
+ const numBlocksForLogs = 3;
2341
+ const numTxsPerBlock = 4;
2342
+ const numPublicLogsPerTx = 2;
2343
+ const contractAddress = AztecAddress.fromNumber(543254);
2344
+
2345
+ let logsCheckpoints: PublishedCheckpoint[];
2346
+
2347
+ const makePublicLogTag = (blockNumber: number, txIndex: number, logIndex: number): Tag =>
2348
+ new Tag(
2349
+ blockNumber === 1 && txIndex === 0 && logIndex === 0
2350
+ ? Fr.ZERO // Shared tag
2351
+ : new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * 123),
2352
+ );
2353
+
2354
+ const makePublicLog = (tag: Tag) =>
2355
+ PublicLog.from({
2356
+ contractAddress: contractAddress,
2357
+ // Arbitrary length
2358
+ fields: new Array(10).fill(null).map((_, i) => (!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))),
2359
+ });
2360
+
2361
+ const mockPublicLogs = (blockNumber: number, txIndex: number) => {
2362
+ return times(numPublicLogsPerTx, (logIndex: number) => {
2363
+ const tag = makePublicLogTag(blockNumber, txIndex, logIndex);
2364
+ return makePublicLog(tag);
2365
+ });
2366
+ };
2367
+
2368
+ const mockCheckpointWithLogs = async (
2369
+ blockNumber: number,
2370
+ previousArchive?: AppendOnlyTreeSnapshot,
2371
+ ): Promise<PublishedCheckpoint> => {
2372
+ const block = await L2BlockNew.random(BlockNumber(blockNumber), {
2373
+ checkpointNumber: CheckpointNumber(blockNumber),
2374
+ indexWithinCheckpoint: 0,
2375
+ state: makeStateForBlock(blockNumber, numTxsPerBlock),
2376
+ ...(previousArchive ? { lastArchive: previousArchive } : {}),
2377
+ });
2378
+ block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
2379
+
2380
+ block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => {
2381
+ const txEffect = await TxEffect.random();
2382
+ txEffect.privateLogs = []; // No private logs needed for public log tests
2383
+ txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex);
2384
+ return txEffect;
2385
+ });
2386
+
2387
+ const checkpoint = new Checkpoint(
2388
+ AppendOnlyTreeSnapshot.random(),
2389
+ CheckpointHeader.random(),
2390
+ [block],
2391
+ CheckpointNumber(blockNumber),
2392
+ );
2393
+ return makePublishedCheckpoint(checkpoint, blockNumber);
2394
+ };
2395
+
2396
+ beforeEach(async () => {
2397
+ // Create checkpoints sequentially to chain archive roots
2398
+ logsCheckpoints = [];
2399
+ for (let i = 0; i < numBlocksForLogs; i++) {
2400
+ const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
2401
+ logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
2402
+ }
2403
+
2404
+ await store.addCheckpoints(logsCheckpoints);
2405
+ await store.addLogs(logsCheckpoints.flatMap(p => p.checkpoint.blocks));
2406
+ });
2407
+
2408
+ it('is possible to batch request public logs via tags', async () => {
2409
+ const tags = [makePublicLogTag(2, 1, 1), makePublicLogTag(1, 2, 0)];
2410
+
2411
+ const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
2412
+
2413
+ expect(logsByTags).toEqual([
2414
+ [
2415
+ expect.objectContaining({
2416
+ blockNumber: 2,
2417
+ logData: makePublicLog(tags[0]).getEmittedFields(),
2418
+ }),
2419
+ ],
2420
+ [
2421
+ expect.objectContaining({
2422
+ blockNumber: 1,
2423
+ logData: makePublicLog(tags[1]).getEmittedFields(),
929
2424
  }),
930
2425
  ],
931
2426
  ]);
932
2427
  });
933
2428
 
934
2429
  it('is possible to batch request logs that have the same tag but different content', async () => {
935
- const tags = [makeTag(1, 2, 1)];
2430
+ const tags = [makePublicLogTag(1, 2, 1)];
936
2431
 
937
- // Create a block containing logs that have the same tag as the blocks before.
938
- const newBlockNumber = numBlocks;
939
- const newBlock = await mockBlockWithLogs(newBlockNumber);
940
- const newLog = newBlock.block.body.txEffects[1].privateLogs[1];
941
- newLog.fields[0] = tags[0];
942
- newBlock.block.body.txEffects[1].privateLogs[1] = newLog;
943
- await store.addBlocks([newBlock]);
944
- await store.addLogs([newBlock.block]);
2432
+ // Create a checkpoint containing logs that have the same tag as the checkpoints before.
2433
+ // Chain from the last checkpoint's archive
2434
+ const newBlockNumber = numBlocksForLogs + 1;
2435
+ const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
2436
+ const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
2437
+ const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1];
2438
+ newLog.fields[0] = tags[0].value;
2439
+ newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1] = newLog;
2440
+ await store.addCheckpoints([newCheckpoint]);
2441
+ await store.addLogs([newCheckpoint.checkpoint.blocks[0]]);
945
2442
 
946
- const logsByTags = await store.getLogsByTags(tags);
2443
+ const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
947
2444
 
948
2445
  expect(logsByTags).toEqual([
949
2446
  [
950
2447
  expect.objectContaining({
951
2448
  blockNumber: 1,
952
- log: makePrivateLog(tags[0]),
953
- isFromPublic: false,
2449
+ logData: makePublicLog(tags[0]).getEmittedFields(),
954
2450
  }),
955
2451
  expect.objectContaining({
956
2452
  blockNumber: newBlockNumber,
957
- log: newLog,
958
- isFromPublic: false,
2453
+ logData: newLog.getEmittedFields(),
959
2454
  }),
960
2455
  ],
961
2456
  ]);
962
2457
  });
963
2458
 
964
2459
  it('is possible to request logs for non-existing tags and determine their position', async () => {
965
- const tags = [makeTag(99, 88, 77), makeTag(1, 1, 1)];
2460
+ const tags = [makePublicLogTag(99, 88, 77), makePublicLogTag(1, 1, 0)];
966
2461
 
967
- const logsByTags = await store.getLogsByTags(tags);
2462
+ const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
968
2463
 
969
2464
  expect(logsByTags).toEqual([
970
2465
  [
@@ -973,8 +2468,7 @@ export function describeArchiverDataStore(
973
2468
  [
974
2469
  expect.objectContaining({
975
2470
  blockNumber: 1,
976
- log: makePrivateLog(tags[1]),
977
- isFromPublic: false,
2471
+ logData: makePublicLog(tags[1]).getEmittedFields(),
978
2472
  }),
979
2473
  ],
980
2474
  ]);
@@ -982,34 +2476,33 @@ export function describeArchiverDataStore(
982
2476
  });
983
2477
 
984
2478
  describe('getPublicLogs', () => {
985
- const txsPerBlock = 4;
986
- const numPublicFunctionCalls = 3;
987
- const numPublicLogs = 2;
988
- const numBlocks = 10;
989
- let blocks: PublishedL2Block[];
2479
+ const numBlocksForPublicLogs = 10;
990
2480
 
991
- beforeEach(async () => {
992
- blocks = await timesParallel(numBlocks, async (index: number) =>
993
- PublishedL2Block.fromFields({
994
- block: await L2Block.random(index + 1, txsPerBlock, numPublicFunctionCalls, numPublicLogs),
995
- l1: { blockNumber: BigInt(index), blockHash: makeBlockHash(index), timestamp: BigInt(index) },
996
- attestations: times(3, CommitteeAttestation.random),
997
- }),
998
- );
2481
+ // Helper to get total public logs per tx from a block
2482
+ const getPublicLogsPerTx = (block: L2BlockNew, txIndex: number) =>
2483
+ block.body.txEffects[txIndex].publicLogs.length;
999
2484
 
1000
- await store.addBlocks(blocks);
1001
- await store.addLogs(blocks.map(b => b.block));
2485
+ // Helper to get number of txs in a block
2486
+ const getTxsPerBlock = (block: L2BlockNew) => block.body.txEffects.length;
2487
+
2488
+ beforeEach(async () => {
2489
+ // Use the outer publishedCheckpoints for log tests
2490
+ for (let i = 0; i < numBlocksForPublicLogs; i++) {
2491
+ await store.addCheckpoints([publishedCheckpoints[i]]);
2492
+ await store.addLogs(publishedCheckpoints[i].checkpoint.blocks);
2493
+ }
1002
2494
  });
1003
2495
 
1004
2496
  it('no logs returned if deleted ("txHash" filter param is respected variant)', async () => {
1005
2497
  // get random tx
1006
- const targetBlockIndex = randomInt(numBlocks);
1007
- const targetTxIndex = randomInt(txsPerBlock);
1008
- const targetTxHash = blocks[targetBlockIndex].block.body.txEffects[targetTxIndex].txHash;
2498
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2499
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2500
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2501
+ const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
1009
2502
 
1010
2503
  await Promise.all([
1011
- store.unwindBlocks(blocks.length, blocks.length),
1012
- store.deleteLogs(blocks.map(b => b.block)),
2504
+ store.unwindCheckpoints(CheckpointNumber(numBlocksForPublicLogs), numBlocksForPublicLogs),
2505
+ store.deleteLogs(publishedCheckpoints.slice(0, numBlocksForPublicLogs).flatMap(b => b.checkpoint.blocks)),
1013
2506
  ]);
1014
2507
 
1015
2508
  const response = await store.getPublicLogs({ txHash: targetTxHash });
@@ -1021,16 +2514,17 @@ export function describeArchiverDataStore(
1021
2514
 
1022
2515
  it('"txHash" filter param is respected', async () => {
1023
2516
  // get random tx
1024
- const targetBlockIndex = randomInt(numBlocks);
1025
- const targetTxIndex = randomInt(txsPerBlock);
1026
- const targetTxHash = blocks[targetBlockIndex].block.body.txEffects[targetTxIndex].txHash;
2517
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2518
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2519
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2520
+ const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
1027
2521
 
1028
2522
  const response = await store.getPublicLogs({ txHash: targetTxHash });
1029
2523
  const logs = response.logs;
1030
2524
 
1031
2525
  expect(response.maxLogsHit).toBeFalsy();
1032
2526
 
1033
- const expectedNumLogs = numPublicFunctionCalls * numPublicLogs;
2527
+ const expectedNumLogs = getPublicLogsPerTx(targetBlock, targetTxIndex);
1034
2528
  expect(logs.length).toEqual(expectedNumLogs);
1035
2529
 
1036
2530
  const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM;
@@ -1040,6 +2534,17 @@ export function describeArchiverDataStore(
1040
2534
  }
1041
2535
  });
1042
2536
 
2537
+ it('returns block hash on public log ids', async () => {
2538
+ const targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
2539
+ const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2540
+
2541
+ const logs = (await store.getPublicLogs({ fromBlock: targetBlock.number, toBlock: targetBlock.number + 1 }))
2542
+ .logs;
2543
+
2544
+ expect(logs.length).toBeGreaterThan(0);
2545
+ expect(logs.every(log => log.id.blockHash.equals(expectedBlockHash))).toBe(true);
2546
+ });
2547
+
1043
2548
  it('"fromBlock" and "toBlock" filter params are respected', async () => {
1044
2549
  // Set "fromBlock" and "toBlock"
1045
2550
  const fromBlock = 3;
@@ -1050,7 +2555,12 @@ export function describeArchiverDataStore(
1050
2555
 
1051
2556
  expect(response.maxLogsHit).toBeFalsy();
1052
2557
 
1053
- const expectedNumLogs = txsPerBlock * numPublicFunctionCalls * numPublicLogs * (toBlock - fromBlock);
2558
+ // Compute expected logs from the blocks in range
2559
+ let expectedNumLogs = 0;
2560
+ for (let i = fromBlock - 1; i < toBlock - 1; i++) {
2561
+ const block = publishedCheckpoints[i].checkpoint.blocks[0];
2562
+ expectedNumLogs += block.body.txEffects.reduce((sum, tx) => sum + tx.publicLogs.length, 0);
2563
+ }
1054
2564
  expect(logs.length).toEqual(expectedNumLogs);
1055
2565
 
1056
2566
  for (const log of logs) {
@@ -1062,11 +2572,12 @@ export function describeArchiverDataStore(
1062
2572
 
1063
2573
  it('"contractAddress" filter param is respected', async () => {
1064
2574
  // Get a random contract address from the logs
1065
- const targetBlockIndex = randomInt(numBlocks);
1066
- const targetTxIndex = randomInt(txsPerBlock);
1067
- const targetLogIndex = randomInt(numPublicLogs * numPublicFunctionCalls);
2575
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2576
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2577
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2578
+ const targetLogIndex = randomInt(getPublicLogsPerTx(targetBlock, targetTxIndex));
1068
2579
  const targetContractAddress =
1069
- blocks[targetBlockIndex].block.body.txEffects[targetTxIndex].publicLogs[targetLogIndex].contractAddress;
2580
+ targetBlock.body.txEffects[targetTxIndex].publicLogs[targetLogIndex].contractAddress;
1070
2581
 
1071
2582
  const response = await store.getPublicLogs({ contractAddress: targetContractAddress });
1072
2583
 
@@ -1079,11 +2590,19 @@ export function describeArchiverDataStore(
1079
2590
 
1080
2591
  it('"afterLog" filter param is respected', async () => {
1081
2592
  // Get a random log as reference
1082
- const targetBlockIndex = randomInt(numBlocks);
1083
- const targetTxIndex = randomInt(txsPerBlock);
1084
- const targetLogIndex = randomInt(numPublicLogs);
1085
-
1086
- const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);
2593
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2594
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2595
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2596
+ const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
2597
+ const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
2598
+ const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2599
+
2600
+ const afterLog = new LogId(
2601
+ BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM),
2602
+ targetBlockHash,
2603
+ targetTxIndex,
2604
+ targetLogIndex,
2605
+ );
1087
2606
 
1088
2607
  const response = await store.getPublicLogs({ afterLog });
1089
2608
  const logs = response.logs;
@@ -1105,52 +2624,77 @@ export function describeArchiverDataStore(
1105
2624
  it('"txHash" filter param is ignored when "afterLog" is set', async () => {
1106
2625
  // Get random txHash
1107
2626
  const txHash = TxHash.random();
1108
- const afterLog = new LogId(1, 0, 0);
2627
+ const afterLog = new LogId(BlockNumber(1), L2BlockHash.random(), 0, 0);
1109
2628
 
1110
2629
  const response = await store.getPublicLogs({ txHash, afterLog });
1111
2630
  expect(response.logs.length).toBeGreaterThan(1);
1112
2631
  });
1113
2632
 
1114
2633
  it('intersecting works', async () => {
1115
- let logs = (await store.getPublicLogs({ fromBlock: -10, toBlock: -5 })).logs;
2634
+ let logs = (await store.getPublicLogs({ fromBlock: -10 as BlockNumber, toBlock: -5 as BlockNumber })).logs;
1116
2635
  expect(logs.length).toBe(0);
1117
2636
 
1118
2637
  // "fromBlock" gets correctly trimmed to range and "toBlock" is exclusive
1119
- logs = (await store.getPublicLogs({ fromBlock: -10, toBlock: 5 })).logs;
2638
+ logs = (await store.getPublicLogs({ fromBlock: -10 as BlockNumber, toBlock: BlockNumber(5) })).logs;
1120
2639
  let blockNumbers = new Set(logs.map(log => log.id.blockNumber));
1121
2640
  expect(blockNumbers).toEqual(new Set([1, 2, 3, 4]));
1122
2641
 
1123
2642
  // "toBlock" should be exclusive
1124
- logs = (await store.getPublicLogs({ fromBlock: 1, toBlock: 1 })).logs;
2643
+ logs = (await store.getPublicLogs({ fromBlock: BlockNumber(1), toBlock: BlockNumber(1) })).logs;
1125
2644
  expect(logs.length).toBe(0);
1126
2645
 
1127
- logs = (await store.getPublicLogs({ fromBlock: 10, toBlock: 5 })).logs;
2646
+ logs = (await store.getPublicLogs({ fromBlock: BlockNumber(10), toBlock: BlockNumber(5) })).logs;
1128
2647
  expect(logs.length).toBe(0);
1129
2648
 
1130
2649
  // both "fromBlock" and "toBlock" get correctly capped to range and logs from all blocks are returned
1131
- logs = (await store.getPublicLogs({ fromBlock: -100, toBlock: +100 })).logs;
2650
+ logs = (await store.getPublicLogs({ fromBlock: -100 as BlockNumber, toBlock: +100 })).logs;
1132
2651
  blockNumbers = new Set(logs.map(log => log.id.blockNumber));
1133
- expect(blockNumbers.size).toBe(numBlocks);
2652
+ expect(blockNumbers.size).toBe(numBlocksForPublicLogs);
1134
2653
 
1135
2654
  // intersecting with "afterLog" works
1136
- logs = (await store.getPublicLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(4, 0, 0) })).logs;
2655
+ logs = (
2656
+ await store.getPublicLogs({
2657
+ fromBlock: BlockNumber(2),
2658
+ toBlock: BlockNumber(5),
2659
+ afterLog: new LogId(BlockNumber(4), L2BlockHash.random(), 0, 0),
2660
+ })
2661
+ ).logs;
1137
2662
  blockNumbers = new Set(logs.map(log => log.id.blockNumber));
1138
2663
  expect(blockNumbers).toEqual(new Set([4]));
1139
2664
 
1140
- logs = (await store.getPublicLogs({ toBlock: 5, afterLog: new LogId(5, 1, 0) })).logs;
2665
+ logs = (
2666
+ await store.getPublicLogs({
2667
+ toBlock: BlockNumber(5),
2668
+ afterLog: new LogId(BlockNumber(5), L2BlockHash.random(), 1, 0),
2669
+ })
2670
+ ).logs;
1141
2671
  expect(logs.length).toBe(0);
1142
2672
 
1143
- logs = (await store.getPublicLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(100, 0, 0) })).logs;
2673
+ logs = (
2674
+ await store.getPublicLogs({
2675
+ fromBlock: BlockNumber(2),
2676
+ toBlock: BlockNumber(5),
2677
+ afterLog: new LogId(BlockNumber(100), L2BlockHash.random(), 0, 0),
2678
+ })
2679
+ ).logs;
1144
2680
  expect(logs.length).toBe(0);
1145
2681
  });
1146
2682
 
1147
2683
  it('"txIndex" and "logIndex" are respected when "afterLog.blockNumber" is equal to "fromBlock"', async () => {
1148
2684
  // Get a random log as reference
1149
- const targetBlockIndex = randomInt(numBlocks);
1150
- const targetTxIndex = randomInt(txsPerBlock);
1151
- const targetLogIndex = randomInt(numPublicLogs);
1152
-
1153
- const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);
2685
+ const targetBlockIndex = randomInt(numBlocksForPublicLogs);
2686
+ const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
2687
+ const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
2688
+ const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
2689
+ const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
2690
+ const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2691
+
2692
+ const afterLog = new LogId(
2693
+ BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM),
2694
+ targetBlockHash,
2695
+ targetTxIndex,
2696
+ targetLogIndex,
2697
+ );
1154
2698
 
1155
2699
  const response = await store.getPublicLogs({ afterLog, fromBlock: afterLog.blockNumber });
1156
2700
  const logs = response.logs;
@@ -1170,6 +2714,40 @@ export function describeArchiverDataStore(
1170
2714
  });
1171
2715
  });
1172
2716
 
2717
+ describe('getContractClassLogs', () => {
2718
+ let targetBlock: L2BlockNew;
2719
+ let expectedContractClassLog: ContractClassLog;
2720
+
2721
+ beforeEach(async () => {
2722
+ await store.addCheckpoints(publishedCheckpoints);
2723
+
2724
+ targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
2725
+ expectedContractClassLog = await ContractClassLog.random();
2726
+ targetBlock.body.txEffects.forEach((txEffect, index) => {
2727
+ txEffect.contractClassLogs = index === 0 ? [expectedContractClassLog] : [];
2728
+ });
2729
+
2730
+ await store.addLogs([targetBlock]);
2731
+ });
2732
+
2733
+ it('returns block hash on contract class log ids', async () => {
2734
+ const result = await store.getContractClassLogs({
2735
+ fromBlock: targetBlock.number,
2736
+ toBlock: targetBlock.number + 1,
2737
+ });
2738
+
2739
+ expect(result.maxLogsHit).toBeFalsy();
2740
+ expect(result.logs).toHaveLength(1);
2741
+
2742
+ const [{ id, log }] = result.logs;
2743
+ const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
2744
+
2745
+ expect(id.blockHash.equals(expectedBlockHash)).toBe(true);
2746
+ expect(id.blockNumber).toEqual(targetBlock.number);
2747
+ expect(log).toEqual(expectedContractClassLog);
2748
+ });
2749
+ });
2750
+
1173
2751
  describe('pendingChainValidationStatus', () => {
1174
2752
  it('should return undefined when no status is set', async () => {
1175
2753
  const status = await store.getPendingChainValidationStatus();
@@ -1177,7 +2755,7 @@ export function describeArchiverDataStore(
1177
2755
  });
1178
2756
 
1179
2757
  it('should store and retrieve a valid validation status', async () => {
1180
- const validStatus: ValidateBlockResult = { valid: true };
2758
+ const validStatus: ValidateCheckpointResult = { valid: true };
1181
2759
 
1182
2760
  await store.setPendingChainValidationStatus(validStatus);
1183
2761
  const retrievedStatus = await store.getPendingChainValidationStatus();
@@ -1186,9 +2764,9 @@ export function describeArchiverDataStore(
1186
2764
  });
1187
2765
 
1188
2766
  it('should store and retrieve an invalid validation status with insufficient attestations', async () => {
1189
- const invalidStatus: ValidateBlockResult = {
2767
+ const invalidStatus: ValidateCheckpointResult = {
1190
2768
  valid: false,
1191
- block: randomBlockInfo(1),
2769
+ checkpoint: randomCheckpointInfo(1),
1192
2770
  committee: [EthAddress.random(), EthAddress.random()],
1193
2771
  epoch: EpochNumber(123),
1194
2772
  seed: 456n,
@@ -1204,9 +2782,9 @@ export function describeArchiverDataStore(
1204
2782
  });
1205
2783
 
1206
2784
  it('should store and retrieve an invalid validation status with invalid attestation', async () => {
1207
- const invalidStatus: ValidateBlockResult = {
2785
+ const invalidStatus: ValidateCheckpointResult = {
1208
2786
  valid: false,
1209
- block: randomBlockInfo(2),
2787
+ checkpoint: randomCheckpointInfo(2),
1210
2788
  committee: [EthAddress.random()],
1211
2789
  attestors: [EthAddress.random()],
1212
2790
  epoch: EpochNumber(789),
@@ -1223,10 +2801,10 @@ export function describeArchiverDataStore(
1223
2801
  });
1224
2802
 
1225
2803
  it('should overwrite existing status when setting a new one', async () => {
1226
- const firstStatus: ValidateBlockResult = { valid: true };
1227
- const secondStatus: ValidateBlockResult = {
2804
+ const firstStatus: ValidateCheckpointResult = { valid: true };
2805
+ const secondStatus: ValidateCheckpointResult = {
1228
2806
  valid: false,
1229
- block: randomBlockInfo(3),
2807
+ checkpoint: randomCheckpointInfo(3),
1230
2808
  committee: [EthAddress.random()],
1231
2809
  epoch: EpochNumber(999),
1232
2810
  seed: 888n,
@@ -1243,9 +2821,9 @@ export function describeArchiverDataStore(
1243
2821
  });
1244
2822
 
1245
2823
  it('should handle empty committee and attestations arrays', async () => {
1246
- const statusWithEmptyArrays: ValidateBlockResult = {
2824
+ const statusWithEmptyArrays: ValidateCheckpointResult = {
1247
2825
  valid: false,
1248
- block: randomBlockInfo(4),
2826
+ checkpoint: randomCheckpointInfo(4),
1249
2827
  committee: [],
1250
2828
  epoch: EpochNumber(0),
1251
2829
  seed: 0n,