@aztec/archiver 0.0.1-commit.f2ce05ee → 0.0.1-commit.f5d02921e

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/dest/archiver.d.ts +9 -6
  2. package/dest/archiver.d.ts.map +1 -1
  3. package/dest/archiver.js +76 -111
  4. package/dest/config.d.ts +3 -3
  5. package/dest/config.d.ts.map +1 -1
  6. package/dest/config.js +2 -1
  7. package/dest/errors.d.ts +34 -10
  8. package/dest/errors.d.ts.map +1 -1
  9. package/dest/errors.js +45 -16
  10. package/dest/factory.d.ts +4 -5
  11. package/dest/factory.d.ts.map +1 -1
  12. package/dest/factory.js +29 -26
  13. package/dest/index.d.ts +2 -1
  14. package/dest/index.d.ts.map +1 -1
  15. package/dest/index.js +1 -0
  16. package/dest/l1/bin/retrieve-calldata.js +32 -28
  17. package/dest/l1/calldata_retriever.d.ts +73 -50
  18. package/dest/l1/calldata_retriever.d.ts.map +1 -1
  19. package/dest/l1/calldata_retriever.js +191 -259
  20. package/dest/l1/data_retrieval.d.ts +9 -9
  21. package/dest/l1/data_retrieval.d.ts.map +1 -1
  22. package/dest/l1/data_retrieval.js +21 -19
  23. package/dest/l1/spire_proposer.d.ts +5 -5
  24. package/dest/l1/spire_proposer.d.ts.map +1 -1
  25. package/dest/l1/spire_proposer.js +9 -17
  26. package/dest/modules/data_source_base.d.ts +14 -7
  27. package/dest/modules/data_source_base.d.ts.map +1 -1
  28. package/dest/modules/data_source_base.js +39 -77
  29. package/dest/modules/data_store_updater.d.ts +25 -12
  30. package/dest/modules/data_store_updater.d.ts.map +1 -1
  31. package/dest/modules/data_store_updater.js +125 -94
  32. package/dest/modules/instrumentation.d.ts +15 -2
  33. package/dest/modules/instrumentation.d.ts.map +1 -1
  34. package/dest/modules/instrumentation.js +19 -2
  35. package/dest/modules/l1_synchronizer.d.ts +5 -8
  36. package/dest/modules/l1_synchronizer.d.ts.map +1 -1
  37. package/dest/modules/l1_synchronizer.js +73 -33
  38. package/dest/modules/validation.d.ts +1 -1
  39. package/dest/modules/validation.d.ts.map +1 -1
  40. package/dest/modules/validation.js +2 -2
  41. package/dest/store/block_store.d.ts +65 -28
  42. package/dest/store/block_store.d.ts.map +1 -1
  43. package/dest/store/block_store.js +311 -134
  44. package/dest/store/contract_class_store.d.ts +2 -3
  45. package/dest/store/contract_class_store.d.ts.map +1 -1
  46. package/dest/store/contract_class_store.js +7 -67
  47. package/dest/store/contract_instance_store.d.ts +1 -1
  48. package/dest/store/contract_instance_store.d.ts.map +1 -1
  49. package/dest/store/contract_instance_store.js +6 -2
  50. package/dest/store/kv_archiver_store.d.ts +62 -21
  51. package/dest/store/kv_archiver_store.d.ts.map +1 -1
  52. package/dest/store/kv_archiver_store.js +75 -22
  53. package/dest/store/l2_tips_cache.d.ts +20 -0
  54. package/dest/store/l2_tips_cache.d.ts.map +1 -0
  55. package/dest/store/l2_tips_cache.js +109 -0
  56. package/dest/store/log_store.d.ts +6 -3
  57. package/dest/store/log_store.d.ts.map +1 -1
  58. package/dest/store/log_store.js +93 -16
  59. package/dest/store/message_store.d.ts +5 -1
  60. package/dest/store/message_store.d.ts.map +1 -1
  61. package/dest/store/message_store.js +14 -1
  62. package/dest/test/fake_l1_state.d.ts +13 -1
  63. package/dest/test/fake_l1_state.d.ts.map +1 -1
  64. package/dest/test/fake_l1_state.js +95 -23
  65. package/dest/test/mock_archiver.d.ts +1 -1
  66. package/dest/test/mock_archiver.d.ts.map +1 -1
  67. package/dest/test/mock_archiver.js +3 -2
  68. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  69. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  70. package/dest/test/mock_l1_to_l2_message_source.js +2 -1
  71. package/dest/test/mock_l2_block_source.d.ts +26 -5
  72. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  73. package/dest/test/mock_l2_block_source.js +160 -89
  74. package/dest/test/mock_structs.d.ts +4 -1
  75. package/dest/test/mock_structs.d.ts.map +1 -1
  76. package/dest/test/mock_structs.js +13 -1
  77. package/dest/test/noop_l1_archiver.d.ts +4 -1
  78. package/dest/test/noop_l1_archiver.d.ts.map +1 -1
  79. package/dest/test/noop_l1_archiver.js +5 -1
  80. package/package.json +13 -13
  81. package/src/archiver.ts +93 -132
  82. package/src/config.ts +8 -1
  83. package/src/errors.ts +70 -26
  84. package/src/factory.ts +30 -16
  85. package/src/index.ts +1 -0
  86. package/src/l1/README.md +25 -68
  87. package/src/l1/bin/retrieve-calldata.ts +40 -27
  88. package/src/l1/calldata_retriever.ts +250 -379
  89. package/src/l1/data_retrieval.ts +23 -25
  90. package/src/l1/spire_proposer.ts +7 -15
  91. package/src/modules/data_source_base.ts +78 -98
  92. package/src/modules/data_store_updater.ts +138 -124
  93. package/src/modules/instrumentation.ts +29 -2
  94. package/src/modules/l1_synchronizer.ts +86 -43
  95. package/src/modules/validation.ts +2 -2
  96. package/src/store/block_store.ts +393 -170
  97. package/src/store/contract_class_store.ts +8 -106
  98. package/src/store/contract_instance_store.ts +8 -5
  99. package/src/store/kv_archiver_store.ts +117 -36
  100. package/src/store/l2_tips_cache.ts +128 -0
  101. package/src/store/log_store.ts +126 -27
  102. package/src/store/message_store.ts +20 -1
  103. package/src/test/fake_l1_state.ts +125 -26
  104. package/src/test/mock_archiver.ts +3 -2
  105. package/src/test/mock_l1_to_l2_message_source.ts +1 -0
  106. package/src/test/mock_l2_block_source.ts +209 -82
  107. package/src/test/mock_structs.ts +20 -6
  108. package/src/test/noop_l1_archiver.ts +7 -1
@@ -1,6 +1,6 @@
1
1
  import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
2
2
  import { BlockNumber } from '@aztec/foundation/branded-types';
3
- import { filterAsync } from '@aztec/foundation/collection';
3
+ import { compactArray, filterAsync } from '@aztec/foundation/collection';
4
4
  import { Fr } from '@aztec/foundation/curves/bn254';
5
5
  import { createLogger } from '@aztec/foundation/log';
6
6
  import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
@@ -22,6 +22,7 @@ import {
22
22
  } from '@aztec/stdlib/logs';
23
23
  import { TxHash } from '@aztec/stdlib/tx';
24
24
 
25
+ import { OutOfOrderLogInsertionError } from '../errors.js';
25
26
  import type { BlockStore } from './block_store.js';
26
27
 
27
28
  /**
@@ -165,10 +166,21 @@ export class LogStore {
165
166
 
166
167
  for (const taggedLogBuffer of currentPrivateTaggedLogs) {
167
168
  if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
168
- privateTaggedLogs.set(
169
- taggedLogBuffer.tag,
170
- taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
171
- );
169
+ const newLogs = privateTaggedLogs.get(taggedLogBuffer.tag)!;
170
+ if (newLogs.length === 0) {
171
+ continue;
172
+ }
173
+ const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1)!);
174
+ const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
175
+ if (lastExisting.blockNumber > firstNew.blockNumber) {
176
+ throw new OutOfOrderLogInsertionError(
177
+ 'private',
178
+ taggedLogBuffer.tag,
179
+ lastExisting.blockNumber,
180
+ firstNew.blockNumber,
181
+ );
182
+ }
183
+ privateTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
172
184
  }
173
185
  }
174
186
 
@@ -200,10 +212,21 @@ export class LogStore {
200
212
 
201
213
  for (const taggedLogBuffer of currentPublicTaggedLogs) {
202
214
  if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
203
- publicTaggedLogs.set(
204
- taggedLogBuffer.tag,
205
- taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.tag)!),
206
- );
215
+ const newLogs = publicTaggedLogs.get(taggedLogBuffer.tag)!;
216
+ if (newLogs.length === 0) {
217
+ continue;
218
+ }
219
+ const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1)!);
220
+ const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
221
+ if (lastExisting.blockNumber > firstNew.blockNumber) {
222
+ throw new OutOfOrderLogInsertionError(
223
+ 'public',
224
+ taggedLogBuffer.tag,
225
+ lastExisting.blockNumber,
226
+ firstNew.blockNumber,
227
+ );
228
+ }
229
+ publicTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
207
230
  }
208
231
  }
209
232
 
@@ -290,18 +313,49 @@ export class LogStore {
290
313
 
291
314
  deleteLogs(blocks: L2Block[]): Promise<boolean> {
292
315
  return this.db.transactionAsync(async () => {
293
- await Promise.all(
294
- blocks.map(async block => {
295
- // Delete private logs
296
- const privateKeys = (await this.#privateLogKeysByBlock.getAsync(block.number)) ?? [];
297
- await Promise.all(privateKeys.map(tag => this.#privateLogsByTag.delete(tag)));
298
-
299
- // Delete public logs
300
- const publicKeys = (await this.#publicLogKeysByBlock.getAsync(block.number)) ?? [];
301
- await Promise.all(publicKeys.map(key => this.#publicLogsByContractAndTag.delete(key)));
302
- }),
316
+ const blockNumbers = new Set(blocks.map(block => block.number));
317
+ const firstBlockToDelete = Math.min(...blockNumbers);
318
+
319
+ // Collect all unique private tags across all blocks being deleted
320
+ const allPrivateTags = new Set(
321
+ compactArray(await Promise.all(blocks.map(block => this.#privateLogKeysByBlock.getAsync(block.number)))).flat(),
303
322
  );
304
323
 
324
+ // Trim private logs: for each tag, delete all instances including and after the first block being deleted.
325
+ // This hinges on the invariant that logs for a given tag are always inserted in order of block number, which is enforced in #addPrivateLogs.
326
+ for (const tag of allPrivateTags) {
327
+ const existing = await this.#privateLogsByTag.getAsync(tag);
328
+ if (existing === undefined || existing.length === 0) {
329
+ continue;
330
+ }
331
+ const lastIndexToKeep = existing.findLastIndex(
332
+ buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete,
333
+ );
334
+ const remaining = existing.slice(0, lastIndexToKeep + 1);
335
+ await (remaining.length > 0 ? this.#privateLogsByTag.set(tag, remaining) : this.#privateLogsByTag.delete(tag));
336
+ }
337
+
338
+ // Collect all unique public keys across all blocks being deleted
339
+ const allPublicKeys = new Set(
340
+ compactArray(await Promise.all(blocks.map(block => this.#publicLogKeysByBlock.getAsync(block.number)))).flat(),
341
+ );
342
+
343
+ // And do the same as we did with private logs
344
+ for (const key of allPublicKeys) {
345
+ const existing = await this.#publicLogsByContractAndTag.getAsync(key);
346
+ if (existing === undefined || existing.length === 0) {
347
+ continue;
348
+ }
349
+ const lastIndexToKeep = existing.findLastIndex(
350
+ buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete,
351
+ );
352
+ const remaining = existing.slice(0, lastIndexToKeep + 1);
353
+ await (remaining.length > 0
354
+ ? this.#publicLogsByContractAndTag.set(key, remaining)
355
+ : this.#publicLogsByContractAndTag.delete(key));
356
+ }
357
+
358
+ // After trimming the tagged logs, we can delete the block-level keys that track which tags are in which blocks.
305
359
  await Promise.all(
306
360
  blocks.map(block =>
307
361
  Promise.all([
@@ -322,17 +376,30 @@ export class LogStore {
322
376
  * array implies no logs match that tag.
323
377
  * @param tags - The tags to search for.
324
378
  * @param page - The page number (0-indexed) for pagination.
379
+ * @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
325
380
  * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
326
381
  * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
327
382
  */
328
- async getPrivateLogsByTags(tags: SiloedTag[], page: number = 0): Promise<TxScopedL2Log[][]> {
383
+ async getPrivateLogsByTags(
384
+ tags: SiloedTag[],
385
+ page: number = 0,
386
+ upToBlockNumber?: BlockNumber,
387
+ ): Promise<TxScopedL2Log[][]> {
329
388
  const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
389
+
330
390
  const start = page * MAX_LOGS_PER_TAG;
331
391
  const end = start + MAX_LOGS_PER_TAG;
332
392
 
333
- return logs.map(
334
- logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
335
- );
393
+ return logs.map(logBuffers => {
394
+ const deserialized = logBuffers?.slice(start, end).map(buf => TxScopedL2Log.fromBuffer(buf)) ?? [];
395
+ if (upToBlockNumber !== undefined) {
396
+ const cutoff = deserialized.findIndex(log => log.blockNumber > upToBlockNumber);
397
+ if (cutoff !== -1) {
398
+ return deserialized.slice(0, cutoff);
399
+ }
400
+ }
401
+ return deserialized;
402
+ });
336
403
  }
337
404
 
338
405
  /**
@@ -341,6 +408,7 @@ export class LogStore {
341
408
  * @param contractAddress - The contract address to search logs for.
342
409
  * @param tags - The tags to search for.
343
410
  * @param page - The page number (0-indexed) for pagination.
411
+ * @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
344
412
  * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
345
413
  * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
346
414
  */
@@ -348,6 +416,7 @@ export class LogStore {
348
416
  contractAddress: AztecAddress,
349
417
  tags: Tag[],
350
418
  page: number = 0,
419
+ upToBlockNumber?: BlockNumber,
351
420
  ): Promise<TxScopedL2Log[][]> {
352
421
  const logs = await Promise.all(
353
422
  tags.map(tag => {
@@ -358,9 +427,16 @@ export class LogStore {
358
427
  const start = page * MAX_LOGS_PER_TAG;
359
428
  const end = start + MAX_LOGS_PER_TAG;
360
429
 
361
- return logs.map(
362
- logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
363
- );
430
+ return logs.map(logBuffers => {
431
+ const deserialized = logBuffers?.slice(start, end).map(buf => TxScopedL2Log.fromBuffer(buf)) ?? [];
432
+ if (upToBlockNumber !== undefined) {
433
+ const cutoff = deserialized.findIndex(log => log.blockNumber > upToBlockNumber);
434
+ if (cutoff !== -1) {
435
+ return deserialized.slice(0, cutoff);
436
+ }
437
+ }
438
+ return deserialized;
439
+ });
364
440
  }
365
441
 
366
442
  /**
@@ -588,11 +664,24 @@ export class LogStore {
588
664
  txLogs: PublicLog[],
589
665
  filter: LogFilter = {},
590
666
  ): boolean {
667
+ if (filter.fromBlock && blockNumber < filter.fromBlock) {
668
+ return false;
669
+ }
670
+ if (filter.toBlock && blockNumber >= filter.toBlock) {
671
+ return false;
672
+ }
673
+ if (filter.txHash && !txHash.equals(filter.txHash)) {
674
+ return false;
675
+ }
676
+
591
677
  let maxLogsHit = false;
592
678
  let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
593
679
  for (; logIndex < txLogs.length; logIndex++) {
594
680
  const log = txLogs[logIndex];
595
- if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
681
+ if (
682
+ (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) &&
683
+ (!filter.tag || log.fields[0]?.equals(filter.tag))
684
+ ) {
596
685
  results.push(
597
686
  new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
598
687
  );
@@ -616,6 +705,16 @@ export class LogStore {
616
705
  txLogs: ContractClassLog[],
617
706
  filter: LogFilter = {},
618
707
  ): boolean {
708
+ if (filter.fromBlock && blockNumber < filter.fromBlock) {
709
+ return false;
710
+ }
711
+ if (filter.toBlock && blockNumber >= filter.toBlock) {
712
+ return false;
713
+ }
714
+ if (filter.txHash && !txHash.equals(filter.txHash)) {
715
+ return false;
716
+ }
717
+
619
718
  let maxLogsHit = false;
620
719
  let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
621
720
  for (; logIndex < txLogs.length; logIndex++) {
@@ -14,6 +14,7 @@ import {
14
14
  } from '@aztec/kv-store';
15
15
  import { InboxLeaf } from '@aztec/stdlib/messaging';
16
16
 
17
+ import { L1ToL2MessagesNotReadyError } from '../errors.js';
17
18
  import {
18
19
  type InboxMessage,
19
20
  deserializeInboxMessage,
@@ -40,6 +41,8 @@ export class MessageStore {
40
41
  #lastSynchedL1Block: AztecAsyncSingleton<Buffer>;
41
42
  /** Stores total messages stored */
42
43
  #totalMessageCount: AztecAsyncSingleton<bigint>;
44
+ /** Stores the checkpoint number whose message tree is currently being filled on L1. */
45
+ #inboxTreeInProgress: AztecAsyncSingleton<bigint>;
43
46
 
44
47
  #log = createLogger('archiver:message_store');
45
48
 
@@ -48,6 +51,7 @@ export class MessageStore {
48
51
  this.#l1ToL2MessageIndices = db.openMap('archiver_l1_to_l2_message_indices');
49
52
  this.#lastSynchedL1Block = db.openSingleton('archiver_last_l1_block_id');
50
53
  this.#totalMessageCount = db.openSingleton('archiver_l1_to_l2_message_count');
54
+ this.#inboxTreeInProgress = db.openSingleton('archiver_inbox_tree_in_progress');
51
55
  }
52
56
 
53
57
  public async getTotalL1ToL2MessageCount(): Promise<bigint> {
@@ -137,7 +141,7 @@ export class MessageStore {
137
141
  );
138
142
  }
139
143
 
140
- // Check the first message in a block has the correct index.
144
+ // Check the first message in a checkpoint has the correct index.
141
145
  if (
142
146
  (!lastMessage || message.checkpointNumber > lastMessage.checkpointNumber) &&
143
147
  message.index !== expectedStart
@@ -185,7 +189,22 @@ export class MessageStore {
185
189
  return msg ? deserializeInboxMessage(msg) : undefined;
186
190
  }
187
191
 
192
+ /** Returns the inbox tree-in-progress checkpoint number from L1, or undefined if not yet set. */
193
+ public getInboxTreeInProgress(): Promise<bigint | undefined> {
194
+ return this.#inboxTreeInProgress.getAsync();
195
+ }
196
+
197
+ /** Persists the inbox tree-in-progress checkpoint number from L1 state. */
198
+ public async setInboxTreeInProgress(value: bigint): Promise<void> {
199
+ await this.#inboxTreeInProgress.set(value);
200
+ }
201
+
188
202
  public async getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
203
+ const treeInProgress = await this.#inboxTreeInProgress.getAsync();
204
+ if (treeInProgress !== undefined && BigInt(checkpointNumber) >= treeInProgress) {
205
+ throw new L1ToL2MessagesNotReadyError(checkpointNumber, treeInProgress);
206
+ }
207
+
189
208
  const messages: Fr[] = [];
190
209
 
191
210
  const [startIndex, endIndex] = InboxLeaf.indexRangeForCheckpoint(checkpointNumber);
@@ -1,5 +1,6 @@
1
1
  import type { BlobClientInterface } from '@aztec/blob-client/client';
2
2
  import { type Blob, getBlobsPerL1Block, getPrefixedEthBlobCommitments } from '@aztec/blob-lib';
3
+ import { INITIAL_CHECKPOINT_NUMBER } from '@aztec/constants';
3
4
  import type { CheckpointProposedLog, InboxContract, MessageSentLog, RollupContract } from '@aztec/ethereum/contracts';
4
5
  import { MULTI_CALL_3_ADDRESS } from '@aztec/ethereum/contracts';
5
6
  import type { ViemPublicClient } from '@aztec/ethereum/types';
@@ -14,6 +15,7 @@ import { CommitteeAttestation, CommitteeAttestationsAndSigners, L2Block } from '
14
15
  import { Checkpoint } from '@aztec/stdlib/checkpoint';
15
16
  import { getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers';
16
17
  import { InboxLeaf } from '@aztec/stdlib/messaging';
18
+ import { ConsensusPayload, SignatureDomainSeparator } from '@aztec/stdlib/p2p';
17
19
  import {
18
20
  makeAndSignCommitteeAttestationsAndSigners,
19
21
  makeCheckpointAttestationFromCheckpoint,
@@ -22,7 +24,16 @@ import {
22
24
  import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
23
25
 
24
26
  import { type MockProxy, mock } from 'jest-mock-extended';
25
- import { type FormattedBlock, type Transaction, encodeFunctionData, multicall3Abi, toHex } from 'viem';
27
+ import {
28
+ type AbiParameter,
29
+ type FormattedBlock,
30
+ type Transaction,
31
+ encodeAbiParameters,
32
+ encodeFunctionData,
33
+ keccak256,
34
+ multicall3Abi,
35
+ toHex,
36
+ } from 'viem';
26
37
 
27
38
  import { updateRollingHash } from '../structs/inbox_message.js';
28
39
 
@@ -87,6 +98,10 @@ type CheckpointData = {
87
98
  blobHashes: `0x${string}`[];
88
99
  blobs: Blob[];
89
100
  signers: Secp256k1Signer[];
101
+ /** Hash of the packed attestations, matching what the L1 event emits. */
102
+ attestationsHash: Buffer32;
103
+ /** Payload digest, matching what the L1 event emits. */
104
+ payloadDigest: Buffer32;
90
105
  /** If true, archiveAt will ignore it */
91
106
  pruned?: boolean;
92
107
  };
@@ -131,12 +146,17 @@ export class FakeL1State {
131
146
  private provenCheckpointNumber: CheckpointNumber = CheckpointNumber(0);
132
147
  private targetCommitteeSize: number = 0;
133
148
  private version: bigint = 1n;
149
+ private canPruneResult: boolean = false;
134
150
 
135
151
  // Computed from checkpoints based on L1 block visibility
136
152
  private pendingCheckpointNumber: CheckpointNumber = CheckpointNumber(0);
137
153
 
154
+ // The L1 block number reported as "finalized" (defaults to the start block)
155
+ private finalizedL1BlockNumber: bigint;
156
+
138
157
  constructor(private readonly config: FakeL1StateConfig) {
139
158
  this.l1BlockNumber = config.l1StartBlock;
159
+ this.finalizedL1BlockNumber = config.l1StartBlock;
140
160
  this.lastArchive = new AppendOnlyTreeSnapshot(config.genesisArchiveRoot, 1);
141
161
  }
142
162
 
@@ -193,10 +213,10 @@ export class FakeL1State {
193
213
  // Store the messages internally so they match the checkpoint's inHash
194
214
  this.addMessages(checkpointNumber, messagesL1BlockNumber, messages);
195
215
 
196
- // Create the transaction and blobs
197
- const tx = this.makeRollupTx(checkpoint, signers);
198
- const blobHashes = this.makeVersionedBlobHashes(checkpoint);
199
- const blobs = this.makeBlobsFromCheckpoint(checkpoint);
216
+ // Create the transaction, blobs, and event hashes
217
+ const { tx, attestationsHash, payloadDigest } = await this.makeRollupTx(checkpoint, signers);
218
+ const blobHashes = await this.makeVersionedBlobHashes(checkpoint);
219
+ const blobs = await this.makeBlobsFromCheckpoint(checkpoint);
200
220
 
201
221
  // Store the checkpoint data
202
222
  this.checkpoints.push({
@@ -207,6 +227,8 @@ export class FakeL1State {
207
227
  blobHashes,
208
228
  blobs,
209
229
  signers,
230
+ attestationsHash,
231
+ payloadDigest,
210
232
  });
211
233
 
212
234
  // Update last archive for auto-chaining
@@ -266,16 +288,40 @@ export class FakeL1State {
266
288
  this.updatePendingCheckpointNumber();
267
289
  }
268
290
 
291
+ /** Sets the L1 block number that will be reported as "finalized". */
292
+ setFinalizedL1BlockNumber(blockNumber: bigint): void {
293
+ this.finalizedL1BlockNumber = blockNumber;
294
+ }
295
+
269
296
  /** Marks a checkpoint as proven. Updates provenCheckpointNumber. */
270
297
  markCheckpointAsProven(checkpointNumber: CheckpointNumber): void {
271
298
  this.provenCheckpointNumber = checkpointNumber;
272
299
  }
273
300
 
301
+ /**
302
+ * Simulates what `rollup.getProvenCheckpointNumber({ blockNumber: atL1Block })` would return.
303
+ */
304
+ getProvenCheckpointNumberAtL1Block(atL1Block: bigint): CheckpointNumber {
305
+ if (this.provenCheckpointNumber === 0) {
306
+ return CheckpointNumber(0);
307
+ }
308
+ const checkpoint = this.checkpoints.find(cp => cp.checkpointNumber === this.provenCheckpointNumber);
309
+ if (checkpoint && checkpoint.l1BlockNumber <= atL1Block) {
310
+ return this.provenCheckpointNumber;
311
+ }
312
+ return CheckpointNumber(0);
313
+ }
314
+
274
315
  /** Sets the target committee size for attestation validation. */
275
316
  setTargetCommitteeSize(size: number): void {
276
317
  this.targetCommitteeSize = size;
277
318
  }
278
319
 
320
+ /** Sets whether the rollup contract would allow pruning at the next block. */
321
+ setCanPrune(value: boolean): void {
322
+ this.canPruneResult = value;
323
+ }
324
+
279
325
  /**
280
326
  * Removes all entries for a checkpoint number (simulates L1 reorg or prune).
281
327
  * Note: Does NOT remove messages for this checkpoint (use numL1ToL2Messages: 0 when re-adding).
@@ -384,6 +430,13 @@ export class FakeL1State {
384
430
  });
385
431
  });
386
432
 
433
+ mockRollup.getProvenCheckpointNumber.mockImplementation((options?: { blockNumber?: bigint }) => {
434
+ const atBlock = options?.blockNumber ?? this.l1BlockNumber;
435
+ return Promise.resolve(this.getProvenCheckpointNumberAtL1Block(atBlock));
436
+ });
437
+
438
+ mockRollup.canPruneAtTime.mockImplementation(() => Promise.resolve(this.canPruneResult));
439
+
387
440
  // Mock the wrapper method for fetching checkpoint events
388
441
  mockRollup.getCheckpointProposedEvents.mockImplementation((fromBlock: bigint, toBlock: bigint) =>
389
442
  Promise.resolve(this.getCheckpointProposedLogs(fromBlock, toBlock)),
@@ -398,13 +451,22 @@ export class FakeL1State {
398
451
  createMockInboxContract(_publicClient: MockProxy<ViemPublicClient>): MockProxy<InboxContract> {
399
452
  const mockInbox = mock<InboxContract>();
400
453
 
401
- mockInbox.getState.mockImplementation(() =>
402
- Promise.resolve({
454
+ mockInbox.getState.mockImplementation(() => {
455
+ // treeInProgress must be > any sealed checkpoint. On L1, a checkpoint can only be proposed
456
+ // after its messages are sealed, so treeInProgress > checkpointNumber for all published checkpoints.
457
+ const maxFromMessages =
458
+ this.messages.length > 0 ? Math.max(...this.messages.map(m => Number(m.checkpointNumber))) + 1 : 0;
459
+ const maxFromCheckpoints =
460
+ this.checkpoints.length > 0
461
+ ? Math.max(...this.checkpoints.filter(cp => !cp.pruned).map(cp => Number(cp.checkpointNumber))) + 1
462
+ : 0;
463
+ const treeInProgress = Math.max(maxFromMessages, maxFromCheckpoints, INITIAL_CHECKPOINT_NUMBER);
464
+ return Promise.resolve({
403
465
  messagesRollingHash: this.messagesRollingHash,
404
466
  totalMessagesInserted: BigInt(this.messages.length),
405
- treeInProgress: 0n,
406
- }),
407
- );
467
+ treeInProgress: BigInt(treeInProgress),
468
+ });
469
+ });
408
470
 
409
471
  // Mock the wrapper methods for fetching message events
410
472
  mockInbox.getMessageSentEvents.mockImplementation((fromBlock: bigint, toBlock: bigint) =>
@@ -425,10 +487,13 @@ export class FakeL1State {
425
487
  publicClient.getChainId.mockResolvedValue(1);
426
488
  publicClient.getBlockNumber.mockImplementation(() => Promise.resolve(this.l1BlockNumber));
427
489
 
428
- // Use async function pattern that existing test uses for getBlock
429
-
430
- publicClient.getBlock.mockImplementation((async (args: { blockNumber?: bigint } = {}) => {
431
- const blockNum = args.blockNumber ?? (await publicClient.getBlockNumber());
490
+ publicClient.getBlock.mockImplementation((async (args: { blockNumber?: bigint; blockTag?: string } = {}) => {
491
+ let blockNum: bigint;
492
+ if (args.blockTag === 'finalized') {
493
+ blockNum = this.finalizedL1BlockNumber;
494
+ } else {
495
+ blockNum = args.blockNumber ?? (await publicClient.getBlockNumber());
496
+ }
432
497
  return {
433
498
  number: blockNum,
434
499
  timestamp: BigInt(blockNum) * BigInt(this.config.ethereumSlotDuration) + this.config.l1GenesisTime,
@@ -502,10 +567,8 @@ export class FakeL1State {
502
567
  checkpointNumber: cpData.checkpointNumber,
503
568
  archive: cpData.checkpoint.archive.root,
504
569
  versionedBlobHashes: cpData.blobHashes.map(h => Buffer.from(h.slice(2), 'hex')),
505
- // These are intentionally undefined to skip hash validation in the archiver
506
- // (validation is skipped when these fields are falsy)
507
- payloadDigest: undefined,
508
- attestationsHash: undefined,
570
+ attestationsHash: cpData.attestationsHash,
571
+ payloadDigest: cpData.payloadDigest,
509
572
  },
510
573
  }));
511
574
  }
@@ -531,14 +594,17 @@ export class FakeL1State {
531
594
  }));
532
595
  }
533
596
 
534
- private makeRollupTx(checkpoint: Checkpoint, signers: Secp256k1Signer[]): Transaction {
597
+ private async makeRollupTx(
598
+ checkpoint: Checkpoint,
599
+ signers: Secp256k1Signer[],
600
+ ): Promise<{ tx: Transaction; attestationsHash: Buffer32; payloadDigest: Buffer32 }> {
535
601
  const attestations = signers
536
602
  .map(signer => makeCheckpointAttestationFromCheckpoint(checkpoint, signer))
537
603
  .map(attestation => CommitteeAttestation.fromSignature(attestation.signature))
538
604
  .map(committeeAttestation => committeeAttestation.toViem());
539
605
 
540
606
  const header = checkpoint.header.toViem();
541
- const blobInput = getPrefixedEthBlobCommitments(getBlobsPerL1Block(checkpoint.toBlobFields()));
607
+ const blobInput = getPrefixedEthBlobCommitments(await getBlobsPerL1Block(checkpoint.toBlobFields()));
542
608
  const archive = toHex(checkpoint.archive.root.toBuffer());
543
609
  const attestationsAndSigners = new CommitteeAttestationsAndSigners(
544
610
  attestations.map(attestation => CommitteeAttestation.fromViem(attestation)),
@@ -549,6 +615,8 @@ export class FakeL1State {
549
615
  signers[0],
550
616
  );
551
617
 
618
+ const packedAttestations = attestationsAndSigners.getPackedAttestations();
619
+
552
620
  const rollupInput = encodeFunctionData({
553
621
  abi: RollupAbi,
554
622
  functionName: 'propose',
@@ -558,7 +626,7 @@ export class FakeL1State {
558
626
  archive,
559
627
  oracleInput: { feeAssetPriceModifier: 0n },
560
628
  },
561
- attestationsAndSigners.getPackedAttestations(),
629
+ packedAttestations,
562
630
  attestationsAndSigners.getSigners().map(signer => signer.toString()),
563
631
  attestationsAndSignersSignature.toViemSignature(),
564
632
  blobInput,
@@ -579,21 +647,52 @@ export class FakeL1State {
579
647
  ],
580
648
  });
581
649
 
582
- return {
650
+ // Compute attestationsHash (same logic as CalldataRetriever)
651
+ const attestationsHash = Buffer32.fromString(
652
+ keccak256(encodeAbiParameters([this.getCommitteeAttestationsStructDef()], [packedAttestations])),
653
+ );
654
+
655
+ // Compute payloadDigest (same logic as CalldataRetriever)
656
+ const consensusPayload = ConsensusPayload.fromCheckpoint(checkpoint);
657
+ const payloadToSign = consensusPayload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation);
658
+ const payloadDigest = Buffer32.fromString(keccak256(payloadToSign));
659
+
660
+ const tx = {
583
661
  input: multiCallInput,
584
662
  hash: archive,
585
663
  blockHash: archive,
586
664
  to: MULTI_CALL_3_ADDRESS as `0x${string}`,
587
665
  } as Transaction<bigint, number>;
666
+
667
+ return { tx, attestationsHash, payloadDigest };
668
+ }
669
+
670
+ /** Extracts the CommitteeAttestations struct definition from RollupAbi for hash computation. */
671
+ private getCommitteeAttestationsStructDef(): AbiParameter {
672
+ const proposeFunction = RollupAbi.find(item => item.type === 'function' && item.name === 'propose') as
673
+ | { type: 'function'; name: string; inputs: readonly AbiParameter[] }
674
+ | undefined;
675
+
676
+ if (!proposeFunction) {
677
+ throw new Error('propose function not found in RollupAbi');
678
+ }
679
+
680
+ const attestationsParam = proposeFunction.inputs.find(param => param.name === '_attestations');
681
+ if (!attestationsParam) {
682
+ throw new Error('_attestations parameter not found in propose function');
683
+ }
684
+
685
+ const tupleParam = attestationsParam as unknown as { type: 'tuple'; components?: readonly AbiParameter[] };
686
+ return { type: 'tuple', components: tupleParam.components || [] } as AbiParameter;
588
687
  }
589
688
 
590
- private makeVersionedBlobHashes(checkpoint: Checkpoint): `0x${string}`[] {
591
- return getBlobsPerL1Block(checkpoint.toBlobFields()).map(
689
+ private async makeVersionedBlobHashes(checkpoint: Checkpoint): Promise<`0x${string}`[]> {
690
+ return (await getBlobsPerL1Block(checkpoint.toBlobFields())).map(
592
691
  b => `0x${b.getEthVersionedBlobHash().toString('hex')}` as `0x${string}`,
593
692
  );
594
693
  }
595
694
 
596
- private makeBlobsFromCheckpoint(checkpoint: Checkpoint): Blob[] {
597
- return getBlobsPerL1Block(checkpoint.toBlobFields());
695
+ private async makeBlobsFromCheckpoint(checkpoint: Checkpoint): Promise<Blob[]> {
696
+ return await getBlobsPerL1Block(checkpoint.toBlobFields());
598
697
  }
599
698
  }
@@ -56,8 +56,9 @@ export class MockPrefilledArchiver extends MockArchiver {
56
56
  }
57
57
 
58
58
  const fromBlock = this.l2Blocks.length;
59
- // TODO: Add L2 blocks and checkpoints separately once archiver has the apis for that.
60
- this.addProposedBlocks(this.prefilled.slice(fromBlock, fromBlock + numBlocks).flatMap(c => c.blocks));
59
+ const checkpointsToAdd = this.prefilled.slice(fromBlock, fromBlock + numBlocks);
60
+ this.addProposedBlocks(checkpointsToAdd.flatMap(c => c.blocks));
61
+ this.checkpointList.push(...checkpointsToAdd);
61
62
  return Promise.resolve();
62
63
  }
63
64
  }
@@ -44,6 +44,7 @@ export class MockL1ToL2MessageSource implements L1ToL2MessageSource {
44
44
  checkpointed: tip,
45
45
  proven: tip,
46
46
  finalized: tip,
47
+ proposedCheckpoint: tip,
47
48
  });
48
49
  }
49
50
  }