@aztec/archiver 0.0.1-commit.e2b2873ed → 0.0.1-commit.e304674f1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/README.md +12 -6
  2. package/dest/archiver.d.ts +11 -8
  3. package/dest/archiver.d.ts.map +1 -1
  4. package/dest/archiver.js +79 -114
  5. package/dest/config.d.ts +3 -3
  6. package/dest/config.d.ts.map +1 -1
  7. package/dest/config.js +2 -1
  8. package/dest/errors.d.ts +34 -10
  9. package/dest/errors.d.ts.map +1 -1
  10. package/dest/errors.js +45 -16
  11. package/dest/factory.d.ts +4 -5
  12. package/dest/factory.d.ts.map +1 -1
  13. package/dest/factory.js +29 -26
  14. package/dest/index.d.ts +2 -1
  15. package/dest/index.d.ts.map +1 -1
  16. package/dest/index.js +1 -0
  17. package/dest/l1/bin/retrieve-calldata.js +32 -28
  18. package/dest/l1/calldata_retriever.d.ts +73 -50
  19. package/dest/l1/calldata_retriever.d.ts.map +1 -1
  20. package/dest/l1/calldata_retriever.js +191 -259
  21. package/dest/l1/data_retrieval.d.ts +11 -11
  22. package/dest/l1/data_retrieval.d.ts.map +1 -1
  23. package/dest/l1/data_retrieval.js +35 -34
  24. package/dest/l1/spire_proposer.d.ts +5 -5
  25. package/dest/l1/spire_proposer.d.ts.map +1 -1
  26. package/dest/l1/spire_proposer.js +9 -17
  27. package/dest/modules/data_source_base.d.ts +14 -7
  28. package/dest/modules/data_source_base.d.ts.map +1 -1
  29. package/dest/modules/data_source_base.js +39 -77
  30. package/dest/modules/data_store_updater.d.ts +25 -12
  31. package/dest/modules/data_store_updater.d.ts.map +1 -1
  32. package/dest/modules/data_store_updater.js +125 -94
  33. package/dest/modules/instrumentation.d.ts +15 -2
  34. package/dest/modules/instrumentation.d.ts.map +1 -1
  35. package/dest/modules/instrumentation.js +19 -2
  36. package/dest/modules/l1_synchronizer.d.ts +7 -9
  37. package/dest/modules/l1_synchronizer.d.ts.map +1 -1
  38. package/dest/modules/l1_synchronizer.js +176 -136
  39. package/dest/modules/validation.d.ts +1 -1
  40. package/dest/modules/validation.d.ts.map +1 -1
  41. package/dest/modules/validation.js +2 -2
  42. package/dest/store/block_store.d.ts +66 -28
  43. package/dest/store/block_store.d.ts.map +1 -1
  44. package/dest/store/block_store.js +356 -135
  45. package/dest/store/contract_class_store.d.ts +2 -3
  46. package/dest/store/contract_class_store.d.ts.map +1 -1
  47. package/dest/store/contract_class_store.js +7 -67
  48. package/dest/store/contract_instance_store.d.ts +1 -1
  49. package/dest/store/contract_instance_store.d.ts.map +1 -1
  50. package/dest/store/contract_instance_store.js +6 -2
  51. package/dest/store/kv_archiver_store.d.ts +61 -24
  52. package/dest/store/kv_archiver_store.d.ts.map +1 -1
  53. package/dest/store/kv_archiver_store.js +75 -27
  54. package/dest/store/l2_tips_cache.d.ts +20 -0
  55. package/dest/store/l2_tips_cache.d.ts.map +1 -0
  56. package/dest/store/l2_tips_cache.js +109 -0
  57. package/dest/store/log_store.d.ts +6 -3
  58. package/dest/store/log_store.d.ts.map +1 -1
  59. package/dest/store/log_store.js +93 -16
  60. package/dest/store/message_store.d.ts +5 -1
  61. package/dest/store/message_store.d.ts.map +1 -1
  62. package/dest/store/message_store.js +21 -9
  63. package/dest/test/fake_l1_state.d.ts +21 -1
  64. package/dest/test/fake_l1_state.d.ts.map +1 -1
  65. package/dest/test/fake_l1_state.js +133 -26
  66. package/dest/test/mock_archiver.d.ts +1 -1
  67. package/dest/test/mock_archiver.d.ts.map +1 -1
  68. package/dest/test/mock_archiver.js +3 -2
  69. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  70. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  71. package/dest/test/mock_l1_to_l2_message_source.js +2 -1
  72. package/dest/test/mock_l2_block_source.d.ts +26 -5
  73. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  74. package/dest/test/mock_l2_block_source.js +160 -89
  75. package/dest/test/mock_structs.d.ts +4 -1
  76. package/dest/test/mock_structs.d.ts.map +1 -1
  77. package/dest/test/mock_structs.js +13 -1
  78. package/dest/test/noop_l1_archiver.d.ts +4 -1
  79. package/dest/test/noop_l1_archiver.d.ts.map +1 -1
  80. package/dest/test/noop_l1_archiver.js +5 -2
  81. package/package.json +13 -13
  82. package/src/archiver.ts +101 -138
  83. package/src/config.ts +8 -1
  84. package/src/errors.ts +70 -26
  85. package/src/factory.ts +30 -16
  86. package/src/index.ts +1 -0
  87. package/src/l1/README.md +25 -68
  88. package/src/l1/bin/retrieve-calldata.ts +40 -27
  89. package/src/l1/calldata_retriever.ts +250 -379
  90. package/src/l1/data_retrieval.ts +31 -37
  91. package/src/l1/spire_proposer.ts +7 -15
  92. package/src/modules/data_source_base.ts +78 -98
  93. package/src/modules/data_store_updater.ts +138 -124
  94. package/src/modules/instrumentation.ts +29 -2
  95. package/src/modules/l1_synchronizer.ts +196 -168
  96. package/src/modules/validation.ts +2 -2
  97. package/src/store/block_store.ts +451 -172
  98. package/src/store/contract_class_store.ts +8 -106
  99. package/src/store/contract_instance_store.ts +8 -5
  100. package/src/store/kv_archiver_store.ts +115 -41
  101. package/src/store/l2_tips_cache.ts +128 -0
  102. package/src/store/log_store.ts +126 -27
  103. package/src/store/message_store.ts +27 -10
  104. package/src/structs/inbox_message.ts +1 -1
  105. package/src/test/fake_l1_state.ts +178 -30
  106. package/src/test/mock_archiver.ts +3 -2
  107. package/src/test/mock_l1_to_l2_message_source.ts +1 -0
  108. package/src/test/mock_l2_block_source.ts +209 -82
  109. package/src/test/mock_structs.ts +20 -6
  110. package/src/test/noop_l1_archiver.ts +7 -2
package/src/archiver.ts CHANGED
@@ -1,5 +1,4 @@
1
1
  import type { BlobClientInterface } from '@aztec/blob-client/client';
2
- import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
3
2
  import { EpochCache } from '@aztec/epoch-cache';
4
3
  import { BlockTagTooOldError, RollupContract } from '@aztec/ethereum/contracts';
5
4
  import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
@@ -15,32 +14,30 @@ import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/runni
15
14
  import { DateProvider } from '@aztec/foundation/timer';
16
15
  import {
17
16
  type ArchiverEmitter,
18
- type CheckpointId,
19
- GENESIS_CHECKPOINT_HEADER_HASH,
20
17
  L2Block,
21
18
  type L2BlockSink,
22
19
  type L2Tips,
23
20
  type ValidateCheckpointResult,
24
21
  } from '@aztec/stdlib/block';
25
- import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
22
+ import { type ProposedCheckpointInput, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
26
23
  import {
27
24
  type L1RollupConstants,
28
- getEpochNumberAtTimestamp,
25
+ getEpochAtSlot,
29
26
  getSlotAtNextL1Block,
30
- getSlotAtTimestamp,
31
27
  getSlotRangeForEpoch,
32
28
  getTimestampRangeForEpoch,
33
29
  } from '@aztec/stdlib/epoch-helpers';
34
30
  import { type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client';
35
31
 
36
32
  import { type ArchiverConfig, mapArchiverConfig } from './config.js';
37
- import { NoBlobBodiesFoundError } from './errors.js';
33
+ import { BlockAlreadyCheckpointedError, NoBlobBodiesFoundError } from './errors.js';
38
34
  import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
39
35
  import { ArchiverDataSourceBase } from './modules/data_source_base.js';
40
36
  import { ArchiverDataStoreUpdater } from './modules/data_store_updater.js';
41
37
  import type { ArchiverInstrumentation } from './modules/instrumentation.js';
42
38
  import type { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js';
43
39
  import type { KVArchiverDataStore } from './store/kv_archiver_store.js';
40
+ import { L2TipsCache } from './store/l2_tips_cache.js';
44
41
 
45
42
  /** Export ArchiverEmitter for use in factory and tests. */
46
43
  export type { ArchiverEmitter };
@@ -83,6 +80,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
83
80
  /** Helper to handle updates to the store */
84
81
  private readonly updater: ArchiverDataStoreUpdater;
85
82
 
83
+ /** In-memory cache for L2 chain tips. */
84
+ private readonly l2TipsCache: L2TipsCache;
85
+
86
86
  public readonly tracer: Tracer;
87
87
 
88
88
  /**
@@ -91,11 +91,10 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
91
91
  * @param debugClient - A client for interacting with the Ethereum node for debug/trace methods.
92
92
  * @param rollup - Rollup contract instance.
93
93
  * @param inbox - Inbox contract instance.
94
- * @param l1Addresses - L1 contract addresses (registry, governance proposer, slash factory, slashing proposer).
94
+ * @param l1Addresses - L1 contract addresses (registry, governance proposer, slashing proposer).
95
95
  * @param dataStore - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data.
96
96
  * @param config - Archiver configuration options.
97
97
  * @param blobClient - Client for retrieving blob data.
98
- * @param epochCache - Cache for epoch-related data.
99
98
  * @param dateProvider - Provider for current date/time.
100
99
  * @param instrumentation - Instrumentation for metrics and tracing.
101
100
  * @param l1Constants - L1 rollup constants.
@@ -105,10 +104,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
105
104
  private readonly publicClient: ViemPublicClient,
106
105
  private readonly debugClient: ViemPublicDebugClient,
107
106
  private readonly rollup: RollupContract,
108
- private readonly l1Addresses: Pick<
109
- L1ContractAddresses,
110
- 'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress'
111
- > & { slashingProposerAddress: EthAddress },
107
+ private readonly l1Addresses: Pick<L1ContractAddresses, 'registryAddress' | 'governanceProposerAddress'> & {
108
+ slashingProposerAddress: EthAddress;
109
+ },
112
110
  readonly dataStore: KVArchiverDataStore,
113
111
  private config: {
114
112
  pollingIntervalMs: number;
@@ -119,9 +117,13 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
119
117
  },
120
118
  private readonly blobClient: BlobClientInterface,
121
119
  instrumentation: ArchiverInstrumentation,
122
- protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
120
+ protected override readonly l1Constants: L1RollupConstants & {
121
+ l1StartBlockHash: Buffer32;
122
+ genesisArchiveRoot: Fr;
123
+ },
123
124
  synchronizer: ArchiverL1Synchronizer,
124
125
  events: ArchiverEmitter,
126
+ l2TipsCache?: L2TipsCache,
125
127
  private readonly log: Logger = createLogger('archiver'),
126
128
  ) {
127
129
  super(dataStore, l1Constants);
@@ -130,7 +132,10 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
130
132
  this.initialSyncPromise = promiseWithResolvers();
131
133
  this.synchronizer = synchronizer;
132
134
  this.events = events;
133
- this.updater = new ArchiverDataStoreUpdater(this.dataStore);
135
+ this.l2TipsCache = l2TipsCache ?? new L2TipsCache(this.dataStore.blockStore);
136
+ this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache, {
137
+ rollupManaLimit: l1Constants.rollupManaLimit,
138
+ });
134
139
 
135
140
  // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
136
141
  // are done as fast as possible. This then gets updated once the initial sync completes.
@@ -203,6 +208,10 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
203
208
  });
204
209
  }
205
210
 
211
+ public async setProposedCheckpoint(pending: ProposedCheckpointInput): Promise<void> {
212
+ await this.updater.setProposedCheckpoint(pending);
213
+ }
214
+
206
215
  /**
207
216
  * Processes all queued blocks, adding them to the store.
208
217
  * Called at the beginning of each sync iteration.
@@ -235,10 +244,15 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
235
244
  }
236
245
 
237
246
  try {
238
- await this.updater.addProposedBlocks([block]);
247
+ await this.updater.addProposedBlock(block);
239
248
  this.log.debug(`Added block ${block.number} to store`);
240
249
  resolve();
241
250
  } catch (err: any) {
251
+ if (err instanceof BlockAlreadyCheckpointedError) {
252
+ this.log.debug(`Proposed block ${block.number} matches already checkpointed block, ignoring late proposal`);
253
+ resolve();
254
+ continue;
255
+ }
242
256
  this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
243
257
  reject(err);
244
258
  }
@@ -330,16 +344,49 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
330
344
  return Promise.resolve(this.synchronizer.getL1Timestamp());
331
345
  }
332
346
 
333
- public getL2SlotNumber(): Promise<SlotNumber | undefined> {
347
+ public async getSyncedL2SlotNumber(): Promise<SlotNumber | undefined> {
348
+ // The synced L2 slot is the latest slot for which we have all L1 data,
349
+ // either because we have seen all L1 blocks for that slot, or because
350
+ // we have seen the corresponding checkpoint.
351
+
352
+ let slotFromL1Sync: SlotNumber | undefined;
334
353
  const l1Timestamp = this.synchronizer.getL1Timestamp();
335
- return Promise.resolve(l1Timestamp === undefined ? undefined : getSlotAtTimestamp(l1Timestamp, this.l1Constants));
354
+ if (l1Timestamp !== undefined) {
355
+ const nextL1BlockSlot = getSlotAtNextL1Block(l1Timestamp, this.l1Constants);
356
+ if (Number(nextL1BlockSlot) > 0) {
357
+ slotFromL1Sync = SlotNumber.add(nextL1BlockSlot, -1);
358
+ }
359
+ }
360
+
361
+ let slotFromCheckpoint: SlotNumber | undefined;
362
+ const latestCheckpointNumber = await this.store.getSynchedCheckpointNumber();
363
+ if (latestCheckpointNumber > 0) {
364
+ const checkpointData = await this.store.getCheckpointData(latestCheckpointNumber);
365
+ if (checkpointData) {
366
+ slotFromCheckpoint = checkpointData.header.slotNumber;
367
+ }
368
+ }
369
+
370
+ if (slotFromL1Sync === undefined && slotFromCheckpoint === undefined) {
371
+ return undefined;
372
+ }
373
+ return SlotNumber(Math.max(slotFromL1Sync ?? 0, slotFromCheckpoint ?? 0));
336
374
  }
337
375
 
338
- public getL2EpochNumber(): Promise<EpochNumber | undefined> {
339
- const l1Timestamp = this.synchronizer.getL1Timestamp();
340
- return Promise.resolve(
341
- l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(l1Timestamp, this.l1Constants),
342
- );
376
+ public async getSyncedL2EpochNumber(): Promise<EpochNumber | undefined> {
377
+ const syncedSlot = await this.getSyncedL2SlotNumber();
378
+ if (syncedSlot === undefined) {
379
+ return undefined;
380
+ }
381
+ // An epoch is fully synced when all its slots are synced.
382
+ // We check if syncedSlot is the last slot of its epoch; if so, that epoch is fully synced.
383
+ // Otherwise, only the previous epoch is fully synced.
384
+ const epoch = getEpochAtSlot(syncedSlot, this.l1Constants);
385
+ const [, endSlot] = getSlotRangeForEpoch(epoch, this.l1Constants);
386
+ if (syncedSlot >= endSlot) {
387
+ return epoch;
388
+ }
389
+ return Number(epoch) > 0 ? EpochNumber(Number(epoch) - 1) : undefined;
343
390
  }
344
391
 
345
392
  public async isEpochComplete(epochNumber: EpochNumber): Promise<boolean> {
@@ -391,115 +438,11 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
391
438
  return true;
392
439
  }
393
440
 
394
- public async getL2Tips(): Promise<L2Tips> {
395
- const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber] = await Promise.all([
396
- this.getBlockNumber(),
397
- this.getProvenBlockNumber(),
398
- this.getCheckpointedL2BlockNumber(),
399
- this.getFinalizedL2BlockNumber(),
400
- ] as const);
401
-
402
- const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
403
-
404
- // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks
405
- const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] =
406
- await Promise.all([
407
- latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined,
408
- provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined,
409
- finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined,
410
- checkpointedBlockNumber > beforeInitialblockNumber
411
- ? this.getCheckpointedBlock(checkpointedBlockNumber)
412
- : undefined,
413
- ] as const);
414
-
415
- if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) {
416
- throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`);
417
- }
418
-
419
- // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number.
420
- if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) {
421
- throw new Error(
422
- `Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`,
423
- );
424
- }
425
-
426
- if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) {
427
- throw new Error(
428
- `Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`,
429
- );
430
- }
431
-
432
- if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) {
433
- throw new Error(
434
- `Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`,
435
- );
436
- }
437
-
438
- const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
439
- const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
440
- const finalizedBlockHeaderHash =
441
- (await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
442
- const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
443
-
444
- // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks
445
- const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([
446
- provenCheckpointedBlock !== undefined
447
- ? await this.getCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1)
448
- : [undefined],
449
- finalizedCheckpointedBlock !== undefined
450
- ? await this.getCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1)
451
- : [undefined],
452
- checkpointedBlock !== undefined ? await this.getCheckpoints(checkpointedBlock?.checkpointNumber, 1) : [undefined],
453
- ]);
454
-
455
- const initialcheckpointId: CheckpointId = {
456
- number: CheckpointNumber.ZERO,
457
- hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(),
458
- };
459
-
460
- const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => {
461
- if (checkpoint === undefined) {
462
- return initialcheckpointId;
463
- }
464
- return {
465
- number: checkpoint.checkpoint.number,
466
- hash: checkpoint.checkpoint.hash().toString(),
467
- };
468
- };
469
-
470
- const l2Tips: L2Tips = {
471
- proposed: {
472
- number: latestBlockNumber,
473
- hash: latestBlockHeaderHash.toString(),
474
- },
475
- proven: {
476
- block: {
477
- number: provenBlockNumber,
478
- hash: provenBlockHeaderHash.toString(),
479
- },
480
- checkpoint: makeCheckpointId(provenBlockCheckpoint),
481
- },
482
- finalized: {
483
- block: {
484
- number: finalizedBlockNumber,
485
- hash: finalizedBlockHeaderHash.toString(),
486
- },
487
- checkpoint: makeCheckpointId(finalizedBlockCheckpoint),
488
- },
489
- checkpointed: {
490
- block: {
491
- number: checkpointedBlockNumber,
492
- hash: checkpointedBlockHeaderHash.toString(),
493
- },
494
- checkpoint: makeCheckpointId(checkpointedBlockCheckpoint),
495
- },
496
- };
497
-
498
- return l2Tips;
441
+ public getL2Tips(): Promise<L2Tips> {
442
+ return this.l2TipsCache.getL2Tips();
499
443
  }
500
444
 
501
445
  public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise<void> {
502
- // TODO(pw/mbps): This still assumes 1 block per checkpoint
503
446
  const currentBlocks = await this.getL2Tips();
504
447
  const currentL2Block = currentBlocks.proposed.number;
505
448
  const currentProvenBlock = currentBlocks.proven.block.number;
@@ -511,8 +454,25 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
511
454
  if (!targetL2Block) {
512
455
  throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
513
456
  }
514
- const targetL1BlockNumber = targetL2Block.l1.blockNumber;
515
457
  const targetCheckpointNumber = targetL2Block.checkpointNumber;
458
+
459
+ // Rollback operates at checkpoint granularity: the target block must be the last block of its checkpoint.
460
+ const checkpointData = await this.store.getCheckpointData(targetCheckpointNumber);
461
+ if (checkpointData) {
462
+ const lastBlockInCheckpoint = BlockNumber(checkpointData.startBlock + checkpointData.blockCount - 1);
463
+ if (targetL2BlockNumber !== lastBlockInCheckpoint) {
464
+ const previousCheckpointBoundary =
465
+ checkpointData.startBlock > 1 ? BlockNumber(checkpointData.startBlock - 1) : BlockNumber(0);
466
+ throw new Error(
467
+ `Target L2 block ${targetL2BlockNumber} is not at a checkpoint boundary. ` +
468
+ `Checkpoint ${targetCheckpointNumber} spans blocks ${checkpointData.startBlock} to ${lastBlockInCheckpoint}. ` +
469
+ `Use block ${lastBlockInCheckpoint} to roll back to this checkpoint, ` +
470
+ `or block ${previousCheckpointBoundary} to roll back to the previous one.`,
471
+ );
472
+ }
473
+ }
474
+
475
+ const targetL1BlockNumber = targetL2Block.l1.blockNumber;
516
476
  const targetL1Block = await this.publicClient.getBlock({
517
477
  blockNumber: targetL1BlockNumber,
518
478
  includeTransactions: false,
@@ -529,15 +489,18 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
529
489
  await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
530
490
  this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
531
491
  await this.store.setCheckpointSynchedL1BlockNumber(targetL1BlockNumber);
532
- await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash });
492
+ await this.store.setMessageSyncState(
493
+ { l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash },
494
+ undefined,
495
+ );
533
496
  if (targetL2BlockNumber < currentProvenBlock) {
534
- this.log.info(`Clearing proven L2 block number`);
535
- await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
497
+ this.log.info(`Rolling back proven L2 checkpoint to ${targetCheckpointNumber}`);
498
+ await this.updater.setProvenCheckpointNumber(targetCheckpointNumber);
499
+ }
500
+ const currentFinalizedBlock = currentBlocks.finalized.block.number;
501
+ if (targetL2BlockNumber < currentFinalizedBlock) {
502
+ this.log.info(`Rolling back finalized L2 checkpoint to ${targetCheckpointNumber}`);
503
+ await this.updater.setFinalizedCheckpointNumber(targetCheckpointNumber);
536
504
  }
537
- // TODO(palla/reorg): Set the finalized block when we add support for it.
538
- // if (targetL2BlockNumber < currentFinalizedBlock) {
539
- // this.log.info(`Clearing finalized L2 block number`);
540
- // await this.store.setFinalizedL2BlockNumber(0);
541
- // }
542
505
  }
543
506
  }
package/src/config.ts CHANGED
@@ -8,7 +8,12 @@ import {
8
8
  getConfigFromMappings,
9
9
  numberConfigHelper,
10
10
  } from '@aztec/foundation/config';
11
- import { type ChainConfig, chainConfigMappings } from '@aztec/stdlib/config';
11
+ import {
12
+ type ChainConfig,
13
+ type PipelineConfig,
14
+ chainConfigMappings,
15
+ pipelineConfigMappings,
16
+ } from '@aztec/stdlib/config';
12
17
  import type { ArchiverSpecificConfig } from '@aztec/stdlib/interfaces/server';
13
18
 
14
19
  /**
@@ -21,11 +26,13 @@ import type { ArchiverSpecificConfig } from '@aztec/stdlib/interfaces/server';
21
26
  export type ArchiverConfig = ArchiverSpecificConfig &
22
27
  L1ReaderConfig &
23
28
  L1ContractsConfig &
29
+ PipelineConfig & // required to pass through to epoch cache
24
30
  BlobClientConfig &
25
31
  ChainConfig;
26
32
 
27
33
  export const archiverConfigMappings: ConfigMappingsType<ArchiverConfig> = {
28
34
  ...blobClientConfigMapping,
35
+ ...pipelineConfigMappings,
29
36
  archiverPollingIntervalMS: {
30
37
  env: 'ARCHIVER_POLLING_INTERVAL_MS',
31
38
  description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.',
package/src/errors.ts CHANGED
@@ -6,24 +6,9 @@ export class NoBlobBodiesFoundError extends Error {
6
6
  }
7
7
  }
8
8
 
9
- export class InitialBlockNumberNotSequentialError extends Error {
10
- constructor(
11
- public readonly newBlockNumber: number,
12
- public readonly previousBlockNumber: number | undefined,
13
- ) {
14
- super(
15
- `Cannot insert new block ${newBlockNumber} given previous block number in store is ${
16
- previousBlockNumber ?? 'undefined'
17
- }`,
18
- );
19
- }
20
- }
21
-
22
9
  export class BlockNumberNotSequentialError extends Error {
23
10
  constructor(newBlockNumber: number, previous: number | undefined) {
24
- super(
25
- `Cannot insert new block ${newBlockNumber} given previous block number in batch is ${previous ?? 'undefined'}`,
26
- );
11
+ super(`Cannot insert new block ${newBlockNumber} given previous block number is ${previous ?? 'undefined'}`);
27
12
  }
28
13
  }
29
14
 
@@ -41,17 +26,13 @@ export class InitialCheckpointNumberNotSequentialError extends Error {
41
26
  }
42
27
 
43
28
  export class CheckpointNumberNotSequentialError extends Error {
44
- constructor(newCheckpointNumber: number, previous: number | undefined) {
45
- super(
46
- `Cannot insert new checkpoint ${newCheckpointNumber} given previous checkpoint number in batch is ${previous ?? 'undefined'}`,
47
- );
48
- }
49
- }
50
-
51
- export class CheckpointNumberNotConsistentError extends Error {
52
- constructor(newCheckpointNumber: number, previous: number | undefined) {
29
+ constructor(
30
+ newCheckpointNumber: number,
31
+ previous: number | undefined,
32
+ source: 'confirmed' | 'proposed' = 'confirmed',
33
+ ) {
53
34
  super(
54
- `Cannot insert block for new checkpoint ${newCheckpointNumber} given previous block was checkpoint ${previous ?? 'undefined'}`,
35
+ `Cannot insert new checkpoint ${newCheckpointNumber} given previous ${source} checkpoint number is ${previous ?? 'undefined'}`,
55
36
  );
56
37
  }
57
38
  }
@@ -89,6 +70,69 @@ export class BlockNotFoundError extends Error {
89
70
  }
90
71
  }
91
72
 
73
+ /** Thrown when a proposed block matches a block that was already checkpointed. This is expected for late proposals. */
74
+ export class BlockAlreadyCheckpointedError extends Error {
75
+ constructor(public readonly blockNumber: number) {
76
+ super(`Block ${blockNumber} has already been checkpointed with the same content`);
77
+ this.name = 'BlockAlreadyCheckpointedError';
78
+ }
79
+ }
80
+
81
+ /** Thrown when logs are added for a tag whose last stored log has a higher block number than the new log. */
82
+ export class OutOfOrderLogInsertionError extends Error {
83
+ constructor(
84
+ public readonly logType: 'private' | 'public',
85
+ public readonly tag: string,
86
+ public readonly lastBlockNumber: number,
87
+ public readonly newBlockNumber: number,
88
+ ) {
89
+ super(
90
+ `Out-of-order ${logType} log insertion for tag ${tag}: ` +
91
+ `last existing log is from block ${lastBlockNumber} but new log is from block ${newBlockNumber}`,
92
+ );
93
+ this.name = 'OutOfOrderLogInsertionError';
94
+ }
95
+ }
96
+
97
+ /** Thrown when L1 to L2 messages are requested for a checkpoint whose message tree hasn't been sealed yet. */
98
+ export class L1ToL2MessagesNotReadyError extends Error {
99
+ constructor(
100
+ public readonly checkpointNumber: number,
101
+ public readonly inboxTreeInProgress: bigint,
102
+ ) {
103
+ super(
104
+ `Cannot get L1 to L2 messages for checkpoint ${checkpointNumber}: ` +
105
+ `inbox tree in progress is ${inboxTreeInProgress}, messages not yet sealed`,
106
+ );
107
+ this.name = 'L1ToL2MessagesNotReadyError';
108
+ }
109
+ }
110
+
111
+ /** Thrown when a proposed checkpoint number is stale (already processed). */
112
+ export class ProposedCheckpointStaleError extends Error {
113
+ constructor(
114
+ public readonly proposedCheckpointNumber: number,
115
+ public readonly currentProposedNumber: number,
116
+ ) {
117
+ super(`Stale proposed checkpoint ${proposedCheckpointNumber}: current proposed is ${currentProposedNumber}`);
118
+ this.name = 'ProposedCheckpointStaleError';
119
+ }
120
+ }
121
+
122
+ /** Thrown when a proposed checkpoint number is not the expected confirmed + 1. */
123
+ export class ProposedCheckpointNotSequentialError extends Error {
124
+ constructor(
125
+ public readonly proposedCheckpointNumber: number,
126
+ public readonly confirmedCheckpointNumber: number,
127
+ ) {
128
+ super(
129
+ `Proposed checkpoint ${proposedCheckpointNumber} is not sequential: expected ${confirmedCheckpointNumber + 1} (confirmed + 1)`,
130
+ );
131
+ this.name = 'ProposedCheckpointNotSequentialError';
132
+ }
133
+ }
134
+
135
+ /** Thrown when a proposed block conflicts with an already checkpointed block (different content). */
92
136
  export class CannotOverwriteCheckpointedBlockError extends Error {
93
137
  constructor(
94
138
  public readonly blockNumber: number,
package/src/factory.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  import { EpochCache } from '@aztec/epoch-cache';
2
2
  import { createEthereumChain } from '@aztec/ethereum/chain';
3
+ import { makeL1HttpTransport } from '@aztec/ethereum/client';
3
4
  import { InboxContract, RollupContract } from '@aztec/ethereum/contracts';
4
5
  import type { ViemPublicDebugClient } from '@aztec/ethereum/types';
5
6
  import { BlockNumber } from '@aztec/foundation/branded-types';
@@ -7,38 +8,37 @@ import { Buffer32 } from '@aztec/foundation/buffer';
7
8
  import { merge } from '@aztec/foundation/collection';
8
9
  import { Fr } from '@aztec/foundation/curves/bn254';
9
10
  import { DateProvider } from '@aztec/foundation/timer';
10
- import type { DataStoreConfig } from '@aztec/kv-store/config';
11
11
  import { createStore } from '@aztec/kv-store/lmdb-v2';
12
12
  import { protocolContractNames } from '@aztec/protocol-contracts';
13
13
  import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle';
14
14
  import { FunctionType, decodeFunctionSignature } from '@aztec/stdlib/abi';
15
15
  import type { ArchiverEmitter } from '@aztec/stdlib/block';
16
- import { type ContractClassPublic, computePublicBytecodeCommitment } from '@aztec/stdlib/contract';
17
- import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers';
16
+ import { type ContractClassPublicWithCommitment, computePublicBytecodeCommitment } from '@aztec/stdlib/contract';
17
+ import type { DataStoreConfig } from '@aztec/stdlib/kv-store';
18
18
  import { getTelemetryClient } from '@aztec/telemetry-client';
19
19
 
20
20
  import { EventEmitter } from 'events';
21
- import { createPublicClient, fallback, http } from 'viem';
21
+ import { createPublicClient } from 'viem';
22
22
 
23
23
  import { Archiver, type ArchiverDeps } from './archiver.js';
24
24
  import { type ArchiverConfig, mapArchiverConfig } from './config.js';
25
25
  import { ArchiverInstrumentation } from './modules/instrumentation.js';
26
26
  import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js';
27
27
  import { ARCHIVER_DB_VERSION, KVArchiverDataStore } from './store/kv_archiver_store.js';
28
+ import { L2TipsCache } from './store/l2_tips_cache.js';
28
29
 
29
30
  export const ARCHIVER_STORE_NAME = 'archiver';
30
31
 
31
32
  /** Creates an archiver store. */
32
33
  export async function createArchiverStore(
33
34
  userConfig: Pick<ArchiverConfig, 'archiverStoreMapSizeKb' | 'maxLogs'> & DataStoreConfig,
34
- l1Constants: Pick<L1RollupConstants, 'epochDuration'>,
35
35
  ) {
36
36
  const config = {
37
37
  ...userConfig,
38
38
  dataStoreMapSizeKb: userConfig.archiverStoreMapSizeKb ?? userConfig.dataStoreMapSizeKb,
39
39
  };
40
40
  const store = await createStore(ARCHIVER_STORE_NAME, ARCHIVER_DB_VERSION, config);
41
- return new KVArchiverDataStore(store, config.maxLogs, l1Constants);
41
+ return new KVArchiverDataStore(store, config.maxLogs);
42
42
  }
43
43
 
44
44
  /**
@@ -53,14 +53,15 @@ export async function createArchiver(
53
53
  deps: ArchiverDeps,
54
54
  opts: { blockUntilSync: boolean } = { blockUntilSync: true },
55
55
  ): Promise<Archiver> {
56
- const archiverStore = await createArchiverStore(config, { epochDuration: config.aztecEpochDuration });
56
+ const archiverStore = await createArchiverStore(config);
57
57
  await registerProtocolContracts(archiverStore);
58
58
 
59
59
  // Create Ethereum clients
60
60
  const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId);
61
+ const httpTimeout = config.l1HttpTimeoutMS;
61
62
  const publicClient = createPublicClient({
62
63
  chain: chain.chainInfo,
63
- transport: fallback(config.l1RpcUrls.map(url => http(url, { batch: false }))),
64
+ transport: makeL1HttpTransport(config.l1RpcUrls, { timeout: httpTimeout }),
64
65
  pollingInterval: config.viemPollingIntervalMS,
65
66
  });
66
67
 
@@ -68,7 +69,7 @@ export async function createArchiver(
68
69
  const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls;
69
70
  const debugClient = createPublicClient({
70
71
  chain: chain.chainInfo,
71
- transport: fallback(debugRpcUrls.map(url => http(url, { batch: false }))),
72
+ transport: makeL1HttpTransport(debugRpcUrls, { timeout: httpTimeout }),
72
73
  pollingInterval: config.viemPollingIntervalMS,
73
74
  }) as ViemPublicDebugClient;
74
75
 
@@ -84,6 +85,7 @@ export async function createArchiver(
84
85
  genesisArchiveRoot,
85
86
  slashingProposerAddress,
86
87
  targetCommitteeSize,
88
+ rollupManaLimit,
87
89
  ] = await Promise.all([
88
90
  rollup.getL1StartBlock(),
89
91
  rollup.getL1GenesisTime(),
@@ -91,6 +93,7 @@ export async function createArchiver(
91
93
  rollup.getGenesisArchiveTreeRoot(),
92
94
  rollup.getSlashingProposerAddress(),
93
95
  rollup.getTargetCommitteeSize(),
96
+ rollup.getManaLimit(),
94
97
  ] as const);
95
98
 
96
99
  const l1StartBlockHash = await publicClient
@@ -109,6 +112,7 @@ export async function createArchiver(
109
112
  proofSubmissionEpochs: Number(proofSubmissionEpochs),
110
113
  targetCommitteeSize,
111
114
  genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()),
115
+ rollupManaLimit: Number(rollupManaLimit),
112
116
  };
113
117
 
114
118
  const archiverConfig = merge(
@@ -128,13 +132,15 @@ export async function createArchiver(
128
132
  // Create the event emitter that will be shared by archiver and synchronizer
129
133
  const events = new EventEmitter() as ArchiverEmitter;
130
134
 
135
+ // Create L2 tips cache shared by archiver and synchronizer
136
+ const l2TipsCache = new L2TipsCache(archiverStore.blockStore);
137
+
131
138
  // Create the L1 synchronizer
132
139
  const synchronizer = new ArchiverL1Synchronizer(
133
140
  publicClient,
134
141
  debugClient,
135
142
  rollup,
136
143
  inbox,
137
- { ...config.l1Contracts, slashingProposerAddress },
138
144
  archiverStore,
139
145
  archiverConfig,
140
146
  deps.blobClient,
@@ -144,6 +150,8 @@ export async function createArchiver(
144
150
  l1Constants,
145
151
  events,
146
152
  instrumentation.tracer,
153
+ l2TipsCache,
154
+ undefined, // log (use default)
147
155
  );
148
156
 
149
157
  const archiver = new Archiver(
@@ -158,22 +166,29 @@ export async function createArchiver(
158
166
  l1Constants,
159
167
  synchronizer,
160
168
  events,
169
+ l2TipsCache,
161
170
  );
162
171
 
163
172
  await archiver.start(opts.blockUntilSync);
164
173
  return archiver;
165
174
  }
166
175
 
167
- /** Registers protocol contracts in the archiver store. */
176
+ /** Registers protocol contracts in the archiver store. Idempotent — skips contracts that already exist (e.g. on node restart). */
168
177
  export async function registerProtocolContracts(store: KVArchiverDataStore) {
169
178
  const blockNumber = 0;
170
179
  for (const name of protocolContractNames) {
171
180
  const provider = new BundledProtocolContractsProvider();
172
181
  const contract = await provider.getProtocolContractArtifact(name);
173
- const contractClassPublic: ContractClassPublic = {
182
+
183
+ // Skip if already registered (happens on node restart with a persisted store).
184
+ if (await store.getContractClass(contract.contractClass.id)) {
185
+ continue;
186
+ }
187
+
188
+ const publicBytecodeCommitment = await computePublicBytecodeCommitment(contract.contractClass.packedBytecode);
189
+ const contractClassPublic: ContractClassPublicWithCommitment = {
174
190
  ...contract.contractClass,
175
- privateFunctions: [],
176
- utilityFunctions: [],
191
+ publicBytecodeCommitment,
177
192
  };
178
193
 
179
194
  const publicFunctionSignatures = contract.artifact.functions
@@ -181,8 +196,7 @@ export async function registerProtocolContracts(store: KVArchiverDataStore) {
181
196
  .map(fn => decodeFunctionSignature(fn.name, fn.parameters));
182
197
 
183
198
  await store.registerContractFunctionSignatures(publicFunctionSignatures);
184
- const bytecodeCommitment = await computePublicBytecodeCommitment(contractClassPublic.packedBytecode);
185
- await store.addContractClasses([contractClassPublic], [bytecodeCommitment], BlockNumber(blockNumber));
199
+ await store.addContractClasses([contractClassPublic], BlockNumber(blockNumber));
186
200
  await store.addContractInstances([contract.instance], BlockNumber(blockNumber));
187
201
  }
188
202
  }