@aztec/archiver 0.0.1-commit.2ed92850 → 0.0.1-commit.358457c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dest/archiver.d.ts +7 -3
  2. package/dest/archiver.d.ts.map +1 -1
  3. package/dest/archiver.js +24 -93
  4. package/dest/factory.d.ts +3 -1
  5. package/dest/factory.d.ts.map +1 -1
  6. package/dest/factory.js +11 -10
  7. package/dest/index.d.ts +2 -1
  8. package/dest/index.d.ts.map +1 -1
  9. package/dest/index.js +1 -0
  10. package/dest/l1/bin/retrieve-calldata.js +36 -33
  11. package/dest/l1/calldata_retriever.d.ts +73 -50
  12. package/dest/l1/calldata_retriever.d.ts.map +1 -1
  13. package/dest/l1/calldata_retriever.js +190 -259
  14. package/dest/l1/data_retrieval.d.ts +4 -7
  15. package/dest/l1/data_retrieval.d.ts.map +1 -1
  16. package/dest/l1/data_retrieval.js +10 -14
  17. package/dest/l1/spire_proposer.d.ts +5 -5
  18. package/dest/l1/spire_proposer.d.ts.map +1 -1
  19. package/dest/l1/spire_proposer.js +9 -17
  20. package/dest/l1/validate_trace.d.ts +6 -3
  21. package/dest/l1/validate_trace.d.ts.map +1 -1
  22. package/dest/l1/validate_trace.js +13 -9
  23. package/dest/modules/data_source_base.d.ts +11 -6
  24. package/dest/modules/data_source_base.d.ts.map +1 -1
  25. package/dest/modules/data_source_base.js +28 -72
  26. package/dest/modules/data_store_updater.d.ts +9 -2
  27. package/dest/modules/data_store_updater.d.ts.map +1 -1
  28. package/dest/modules/data_store_updater.js +40 -19
  29. package/dest/modules/instrumentation.d.ts +15 -2
  30. package/dest/modules/instrumentation.d.ts.map +1 -1
  31. package/dest/modules/instrumentation.js +36 -12
  32. package/dest/modules/l1_synchronizer.d.ts +4 -8
  33. package/dest/modules/l1_synchronizer.d.ts.map +1 -1
  34. package/dest/modules/l1_synchronizer.js +16 -12
  35. package/dest/store/block_store.d.ts +21 -17
  36. package/dest/store/block_store.d.ts.map +1 -1
  37. package/dest/store/block_store.js +71 -19
  38. package/dest/store/contract_class_store.d.ts +1 -1
  39. package/dest/store/contract_class_store.d.ts.map +1 -1
  40. package/dest/store/contract_class_store.js +11 -7
  41. package/dest/store/kv_archiver_store.d.ts +21 -7
  42. package/dest/store/kv_archiver_store.d.ts.map +1 -1
  43. package/dest/store/kv_archiver_store.js +20 -3
  44. package/dest/store/l2_tips_cache.d.ts +19 -0
  45. package/dest/store/l2_tips_cache.d.ts.map +1 -0
  46. package/dest/store/l2_tips_cache.js +89 -0
  47. package/dest/store/log_store.d.ts +1 -1
  48. package/dest/store/log_store.d.ts.map +1 -1
  49. package/dest/store/log_store.js +57 -37
  50. package/dest/test/fake_l1_state.d.ts +6 -1
  51. package/dest/test/fake_l1_state.d.ts.map +1 -1
  52. package/dest/test/fake_l1_state.js +56 -18
  53. package/dest/test/index.js +3 -1
  54. package/dest/test/mock_archiver.d.ts +1 -1
  55. package/dest/test/mock_archiver.d.ts.map +1 -1
  56. package/dest/test/mock_archiver.js +3 -2
  57. package/dest/test/mock_l2_block_source.d.ts +22 -7
  58. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  59. package/dest/test/mock_l2_block_source.js +127 -84
  60. package/dest/test/mock_structs.d.ts +3 -2
  61. package/dest/test/mock_structs.d.ts.map +1 -1
  62. package/dest/test/mock_structs.js +7 -5
  63. package/dest/test/noop_l1_archiver.d.ts +23 -0
  64. package/dest/test/noop_l1_archiver.d.ts.map +1 -0
  65. package/dest/test/noop_l1_archiver.js +68 -0
  66. package/package.json +14 -13
  67. package/src/archiver.ts +32 -112
  68. package/src/factory.ts +26 -12
  69. package/src/index.ts +1 -0
  70. package/src/l1/README.md +25 -68
  71. package/src/l1/bin/retrieve-calldata.ts +46 -39
  72. package/src/l1/calldata_retriever.ts +249 -379
  73. package/src/l1/data_retrieval.ts +7 -17
  74. package/src/l1/spire_proposer.ts +7 -15
  75. package/src/l1/validate_trace.ts +24 -6
  76. package/src/modules/data_source_base.ts +56 -95
  77. package/src/modules/data_store_updater.ts +43 -18
  78. package/src/modules/instrumentation.ts +44 -12
  79. package/src/modules/l1_synchronizer.ts +17 -15
  80. package/src/store/block_store.ts +90 -41
  81. package/src/store/contract_class_store.ts +11 -7
  82. package/src/store/kv_archiver_store.ts +40 -8
  83. package/src/store/l2_tips_cache.ts +89 -0
  84. package/src/store/log_store.ts +98 -36
  85. package/src/test/fake_l1_state.ts +75 -17
  86. package/src/test/index.ts +3 -0
  87. package/src/test/mock_archiver.ts +3 -2
  88. package/src/test/mock_l2_block_source.ts +164 -84
  89. package/src/test/mock_structs.ts +22 -6
  90. package/src/test/noop_l1_archiver.ts +109 -0
package/src/archiver.ts CHANGED
@@ -1,5 +1,4 @@
1
1
  import type { BlobClientInterface } from '@aztec/blob-client/client';
2
- import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
3
2
  import { EpochCache } from '@aztec/epoch-cache';
4
3
  import { BlockTagTooOldError, RollupContract } from '@aztec/ethereum/contracts';
5
4
  import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
@@ -15,8 +14,6 @@ import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/runni
15
14
  import { DateProvider } from '@aztec/foundation/timer';
16
15
  import {
17
16
  type ArchiverEmitter,
18
- type CheckpointId,
19
- GENESIS_CHECKPOINT_HEADER_HASH,
20
17
  L2Block,
21
18
  type L2BlockSink,
22
19
  type L2Tips,
@@ -26,6 +23,7 @@ import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
26
23
  import {
27
24
  type L1RollupConstants,
28
25
  getEpochNumberAtTimestamp,
26
+ getSlotAtNextL1Block,
29
27
  getSlotAtTimestamp,
30
28
  getSlotRangeForEpoch,
31
29
  getTimestampRangeForEpoch,
@@ -40,6 +38,7 @@ import { ArchiverDataStoreUpdater } from './modules/data_store_updater.js';
40
38
  import type { ArchiverInstrumentation } from './modules/instrumentation.js';
41
39
  import type { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js';
42
40
  import type { KVArchiverDataStore } from './store/kv_archiver_store.js';
41
+ import { L2TipsCache } from './store/l2_tips_cache.js';
43
42
 
44
43
  /** Export ArchiverEmitter for use in factory and tests. */
45
44
  export type { ArchiverEmitter };
@@ -68,7 +67,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
68
67
  public readonly events: ArchiverEmitter;
69
68
 
70
69
  /** A loop in which we will be continually fetching new checkpoints. */
71
- private runningPromise: RunningPromise;
70
+ protected runningPromise: RunningPromise;
72
71
 
73
72
  /** L1 synchronizer that handles fetching checkpoints and messages from L1. */
74
73
  private readonly synchronizer: ArchiverL1Synchronizer;
@@ -82,6 +81,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
82
81
  /** Helper to handle updates to the store */
83
82
  private readonly updater: ArchiverDataStoreUpdater;
84
83
 
84
+ /** In-memory cache for L2 chain tips. */
85
+ private readonly l2TipsCache: L2TipsCache;
86
+
85
87
  public readonly tracer: Tracer;
86
88
 
87
89
  /**
@@ -121,6 +123,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
121
123
  protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
122
124
  synchronizer: ArchiverL1Synchronizer,
123
125
  events: ArchiverEmitter,
126
+ l2TipsCache?: L2TipsCache,
124
127
  private readonly log: Logger = createLogger('archiver'),
125
128
  ) {
126
129
  super(dataStore, l1Constants);
@@ -129,7 +132,8 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
129
132
  this.initialSyncPromise = promiseWithResolvers();
130
133
  this.synchronizer = synchronizer;
131
134
  this.events = events;
132
- this.updater = new ArchiverDataStoreUpdater(this.dataStore);
135
+ this.l2TipsCache = l2TipsCache ?? new L2TipsCache(this.dataStore.blockStore);
136
+ this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache);
133
137
 
134
138
  // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
135
139
  // are done as fast as possible. This then gets updated once the initial sync completes.
@@ -158,7 +162,11 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
158
162
 
159
163
  await this.blobClient.testSources();
160
164
  await this.synchronizer.testEthereumNodeSynced();
161
- await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
165
+ await validateAndLogTraceAvailability(
166
+ this.debugClient,
167
+ this.config.ethereumAllowNoDebugHosts ?? false,
168
+ this.log.getBindings(),
169
+ );
162
170
 
163
171
  // Log initial state for the archiver
164
172
  const { l1StartBlock } = this.l1Constants;
@@ -212,8 +220,23 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
212
220
  const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
213
221
  this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
214
222
 
223
+ // Calculate slot threshold for validation
224
+ const l1Timestamp = this.synchronizer.getL1Timestamp();
225
+ const slotAtNextL1Block =
226
+ l1Timestamp === undefined ? undefined : getSlotAtNextL1Block(l1Timestamp, this.l1Constants);
227
+
215
228
  // Process each block individually to properly resolve/reject each promise
216
229
  for (const { block, resolve, reject } of queuedItems) {
230
+ const blockSlot = block.header.globalVariables.slotNumber;
231
+ if (slotAtNextL1Block !== undefined && blockSlot < slotAtNextL1Block) {
232
+ this.log.warn(
233
+ `Rejecting proposed block ${block.number} for past slot ${blockSlot} (current is ${slotAtNextL1Block})`,
234
+ { block: block.toBlockInfo(), l1Timestamp, slotAtNextL1Block },
235
+ );
236
+ reject(new Error(`Block ${block.number} is for past slot ${blockSlot} (current is ${slotAtNextL1Block})`));
237
+ continue;
238
+ }
239
+
217
240
  try {
218
241
  await this.updater.addProposedBlocks([block]);
219
242
  this.log.debug(`Added block ${block.number} to store`);
@@ -371,111 +394,8 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
371
394
  return true;
372
395
  }
373
396
 
374
- public async getL2Tips(): Promise<L2Tips> {
375
- const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber] = await Promise.all([
376
- this.getBlockNumber(),
377
- this.getProvenBlockNumber(),
378
- this.getCheckpointedL2BlockNumber(),
379
- this.getFinalizedL2BlockNumber(),
380
- ] as const);
381
-
382
- const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
383
-
384
- // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks
385
- const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] =
386
- await Promise.all([
387
- latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined,
388
- provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined,
389
- finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined,
390
- checkpointedBlockNumber > beforeInitialblockNumber
391
- ? this.getCheckpointedBlock(checkpointedBlockNumber)
392
- : undefined,
393
- ] as const);
394
-
395
- if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) {
396
- throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`);
397
- }
398
-
399
- // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number.
400
- if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) {
401
- throw new Error(
402
- `Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`,
403
- );
404
- }
405
-
406
- if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) {
407
- throw new Error(
408
- `Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`,
409
- );
410
- }
411
-
412
- if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) {
413
- throw new Error(
414
- `Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`,
415
- );
416
- }
417
-
418
- const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
419
- const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
420
- const finalizedBlockHeaderHash =
421
- (await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
422
- const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
423
-
424
- // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks
425
- const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([
426
- provenCheckpointedBlock !== undefined
427
- ? await this.getCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1)
428
- : [undefined],
429
- finalizedCheckpointedBlock !== undefined
430
- ? await this.getCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1)
431
- : [undefined],
432
- checkpointedBlock !== undefined ? await this.getCheckpoints(checkpointedBlock?.checkpointNumber, 1) : [undefined],
433
- ]);
434
-
435
- const initialcheckpointId: CheckpointId = {
436
- number: CheckpointNumber.ZERO,
437
- hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(),
438
- };
439
-
440
- const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => {
441
- if (checkpoint === undefined) {
442
- return initialcheckpointId;
443
- }
444
- return {
445
- number: checkpoint.checkpoint.number,
446
- hash: checkpoint.checkpoint.hash().toString(),
447
- };
448
- };
449
-
450
- const l2Tips: L2Tips = {
451
- proposed: {
452
- number: latestBlockNumber,
453
- hash: latestBlockHeaderHash.toString(),
454
- },
455
- proven: {
456
- block: {
457
- number: provenBlockNumber,
458
- hash: provenBlockHeaderHash.toString(),
459
- },
460
- checkpoint: makeCheckpointId(provenBlockCheckpoint),
461
- },
462
- finalized: {
463
- block: {
464
- number: finalizedBlockNumber,
465
- hash: finalizedBlockHeaderHash.toString(),
466
- },
467
- checkpoint: makeCheckpointId(finalizedBlockCheckpoint),
468
- },
469
- checkpointed: {
470
- block: {
471
- number: checkpointedBlockNumber,
472
- hash: checkpointedBlockHeaderHash.toString(),
473
- },
474
- checkpoint: makeCheckpointId(checkpointedBlockCheckpoint),
475
- },
476
- };
477
-
478
- return l2Tips;
397
+ public getL2Tips(): Promise<L2Tips> {
398
+ return this.l2TipsCache.getL2Tips();
479
399
  }
480
400
 
481
401
  public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise<void> {
@@ -512,7 +432,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
512
432
  await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash });
513
433
  if (targetL2BlockNumber < currentProvenBlock) {
514
434
  this.log.info(`Clearing proven L2 block number`);
515
- await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
435
+ await this.updater.setProvenCheckpointNumber(CheckpointNumber.ZERO);
516
436
  }
517
437
  // TODO(palla/reorg): Set the finalized block when we add support for it.
518
438
  // if (targetL2BlockNumber < currentFinalizedBlock) {
package/src/factory.ts CHANGED
@@ -6,7 +6,6 @@ import { BlockNumber } from '@aztec/foundation/branded-types';
6
6
  import { Buffer32 } from '@aztec/foundation/buffer';
7
7
  import { merge } from '@aztec/foundation/collection';
8
8
  import { Fr } from '@aztec/foundation/curves/bn254';
9
- import { createLogger } from '@aztec/foundation/log';
10
9
  import { DateProvider } from '@aztec/foundation/timer';
11
10
  import type { DataStoreConfig } from '@aztec/kv-store/config';
12
11
  import { createStore } from '@aztec/kv-store/lmdb-v2';
@@ -26,6 +25,7 @@ import { type ArchiverConfig, mapArchiverConfig } from './config.js';
26
25
  import { ArchiverInstrumentation } from './modules/instrumentation.js';
27
26
  import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js';
28
27
  import { ARCHIVER_DB_VERSION, KVArchiverDataStore } from './store/kv_archiver_store.js';
28
+ import { L2TipsCache } from './store/l2_tips_cache.js';
29
29
 
30
30
  export const ARCHIVER_STORE_NAME = 'archiver';
31
31
 
@@ -38,7 +38,7 @@ export async function createArchiverStore(
38
38
  ...userConfig,
39
39
  dataStoreMapSizeKb: userConfig.archiverStoreMapSizeKb ?? userConfig.dataStoreMapSizeKb,
40
40
  };
41
- const store = await createStore(ARCHIVER_STORE_NAME, ARCHIVER_DB_VERSION, config, createLogger('archiver:lmdb'));
41
+ const store = await createStore(ARCHIVER_STORE_NAME, ARCHIVER_DB_VERSION, config);
42
42
  return new KVArchiverDataStore(store, config.maxLogs, l1Constants);
43
43
  }
44
44
 
@@ -78,14 +78,21 @@ export async function createArchiver(
78
78
  const inbox = new InboxContract(publicClient, config.l1Contracts.inboxAddress);
79
79
 
80
80
  // Fetch L1 constants from rollup contract
81
- const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] =
82
- await Promise.all([
83
- rollup.getL1StartBlock(),
84
- rollup.getL1GenesisTime(),
85
- rollup.getProofSubmissionEpochs(),
86
- rollup.getGenesisArchiveTreeRoot(),
87
- rollup.getSlashingProposerAddress(),
88
- ] as const);
81
+ const [
82
+ l1StartBlock,
83
+ l1GenesisTime,
84
+ proofSubmissionEpochs,
85
+ genesisArchiveRoot,
86
+ slashingProposerAddress,
87
+ targetCommitteeSize,
88
+ ] = await Promise.all([
89
+ rollup.getL1StartBlock(),
90
+ rollup.getL1GenesisTime(),
91
+ rollup.getProofSubmissionEpochs(),
92
+ rollup.getGenesisArchiveTreeRoot(),
93
+ rollup.getSlashingProposerAddress(),
94
+ rollup.getTargetCommitteeSize(),
95
+ ] as const);
89
96
 
90
97
  const l1StartBlockHash = await publicClient
91
98
  .getBlock({ blockNumber: l1StartBlock, includeTransactions: false })
@@ -101,6 +108,7 @@ export async function createArchiver(
101
108
  slotDuration,
102
109
  ethereumSlotDuration,
103
110
  proofSubmissionEpochs: Number(proofSubmissionEpochs),
111
+ targetCommitteeSize,
104
112
  genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()),
105
113
  };
106
114
 
@@ -121,13 +129,15 @@ export async function createArchiver(
121
129
  // Create the event emitter that will be shared by archiver and synchronizer
122
130
  const events = new EventEmitter() as ArchiverEmitter;
123
131
 
132
+ // Create L2 tips cache shared by archiver and synchronizer
133
+ const l2TipsCache = new L2TipsCache(archiverStore.blockStore);
134
+
124
135
  // Create the L1 synchronizer
125
136
  const synchronizer = new ArchiverL1Synchronizer(
126
137
  publicClient,
127
138
  debugClient,
128
139
  rollup,
129
140
  inbox,
130
- { ...config.l1Contracts, slashingProposerAddress },
131
141
  archiverStore,
132
142
  archiverConfig,
133
143
  deps.blobClient,
@@ -137,6 +147,8 @@ export async function createArchiver(
137
147
  l1Constants,
138
148
  events,
139
149
  instrumentation.tracer,
150
+ l2TipsCache,
151
+ undefined, // log (use default)
140
152
  );
141
153
 
142
154
  const archiver = new Archiver(
@@ -151,13 +163,15 @@ export async function createArchiver(
151
163
  l1Constants,
152
164
  synchronizer,
153
165
  events,
166
+ l2TipsCache,
154
167
  );
155
168
 
156
169
  await archiver.start(opts.blockUntilSync);
157
170
  return archiver;
158
171
  }
159
172
 
160
- async function registerProtocolContracts(store: KVArchiverDataStore) {
173
+ /** Registers protocol contracts in the archiver store. */
174
+ export async function registerProtocolContracts(store: KVArchiverDataStore) {
161
175
  const blockNumber = 0;
162
176
  for (const name of protocolContractNames) {
163
177
  const provider = new BundledProtocolContractsProvider();
package/src/index.ts CHANGED
@@ -8,5 +8,6 @@ export * from './config.js';
8
8
  export { type L1PublishedData } from './structs/published.js';
9
9
  export { KVArchiverDataStore, ARCHIVER_DB_VERSION } from './store/kv_archiver_store.js';
10
10
  export { ContractInstanceStore } from './store/contract_instance_store.js';
11
+ export { L2TipsCache } from './store/l2_tips_cache.js';
11
12
 
12
13
  export { retrieveCheckpointsFromRollup, retrieveL2ProofVerifiedEvents } from './l1/data_retrieval.js';
package/src/l1/README.md CHANGED
@@ -5,29 +5,27 @@ Modules and classes to handle data retrieval from L1 for the archiver.
5
5
  ## Calldata Retriever
6
6
 
7
7
  The sequencer publisher bundles multiple operations into a single multicall3 transaction for gas
8
- efficiency. A typical transaction includes:
8
+ efficiency. The archiver needs to extract the `propose` calldata from these bundled transactions
9
+ to reconstruct L2 blocks.
9
10
 
10
- 1. Attestation invalidations (if needed): `invalidateBadAttestation`, `invalidateInsufficientAttestations`
11
- 2. Block proposal: `propose` (exactly one per transaction to the rollup contract)
12
- 3. Governance and slashing (if needed): votes, payload creation/execution
11
+ The retriever uses hash matching against `attestationsHash` and `payloadDigest` from the
12
+ `CheckpointProposed` L1 event to verify it has found the correct propose calldata. These hashes
13
+ are always required.
13
14
 
14
- The archiver needs to extract the `propose` calldata from these bundled transactions to reconstruct
15
- L2 blocks. This class needs to handle scenarios where the transaction was submitted via multicall3,
16
- as well as alternative ways for submitting the `propose` call that other clients might use.
15
+ ### Multicall3 Decoding with Hash Matching
17
16
 
18
- ### Multicall3 Validation and Decoding
19
-
20
- First attempt to decode the transaction as a multicall3 `aggregate3` call with validation:
17
+ First attempt to decode the transaction as a multicall3 `aggregate3` call:
21
18
 
22
19
  - Check if transaction is to multicall3 address (`0xcA11bde05977b3631167028862bE2a173976CA11`)
23
20
  - Decode as `aggregate3(Call3[] calldata calls)`
24
- - Allow calls to known addresses and methods (rollup, governance, slashing contracts, etc.)
25
- - Find the single `propose` call to the rollup contract
26
- - Verify exactly one `propose` call exists
27
- - Extract and return the propose calldata
21
+ - Find all calls matching the rollup contract address and the `propose` function selector
22
+ - Verify each candidate by computing `attestationsHash` (keccak256 of ABI-encoded attestations)
23
+ and `payloadDigest` (keccak256 of the consensus payload signing hash) and comparing against
24
+ expected values from the `CheckpointProposed` event
25
+ - Return the verified candidate (if multiple verify, return the first with a warning)
28
26
 
29
- This step handles the common case efficiently without requiring expensive trace or debug RPC calls.
30
- Any validation failure triggers fallback to the next step.
27
+ This approach works regardless of what other calls are in the multicall3 bundle, because hash
28
+ matching identifies the correct propose call without needing an allowlist.
31
29
 
32
30
  ### Direct Propose Call
33
31
 
@@ -35,64 +33,23 @@ Second attempt to decode the transaction as a direct `propose` call to the rollu
35
33
 
36
34
  - Check if transaction is to the rollup address
37
35
  - Decode as `propose` function call
38
- - Verify the function is indeed `propose`
36
+ - Verify against expected hashes
39
37
  - Return the transaction input as the propose calldata
40
38
 
41
- This handles scenarios where clients submit transactions directly to the rollup contract without
42
- using multicall3 for bundling. Any validation failure triggers fallback to the next step.
43
-
44
39
  ### Spire Proposer Call
45
40
 
46
- Given existing attempts to route the call via the Spire proposer, we also check if the tx is `to` the
47
- proposer known address, and if so, we try decoding it as either a multicall3 or a direct call to the
48
- rollup contract.
49
-
50
- Similar as with the multicall3 check, we check that there are no other calls in the Spire proposer, so
51
- we are absolutely sure that the only call is the successful one to the rollup. Any extraneous call would
52
- imply an unexpected path to calling `propose` in the rollup contract, and since we cannot verify if the
53
- calldata arguments we extracted are the correct ones (see the section below), we cannot know for sure which
54
- one is the call that succeeded, so we don't know which calldata to process.
55
-
56
- Furthermore, since the Spire proposer is upgradeable, we check if the implementation has not changed in
57
- order to decode. As usual, any validation failure triggers fallback to the next step.
58
-
59
- ### Verifying Multicall3 Arguments
60
-
61
- **This is NOT implemented for simplicity's sake**
62
-
63
- If the checks above don't hold, such as when there are multiple calls to `propose`, then we cannot
64
- reliably extract the `propose` calldata from the multicall3 arguments alone. We can try a best-effort
65
- where we try all `propose` calls we see and validate them against on-chain data. Note that we can use these
66
- same strategies if we were to obtain the calldata from another source.
67
-
68
- #### TempBlockLog Verification
69
-
70
- Read the stored `TempBlockLog` for the L2 block number from L1 and verify it matches our decoded header hash,
71
- since the `TempBlockLog` stores the hash of the proposed block header, the payload commitment, and the attestations.
72
-
73
- However, `TempBlockLog` is only stored temporarily and deleted after proven, so this method only works for recent
74
- blocks, not for historical data syncing.
75
-
76
- #### Archive Verification
77
-
78
- Verify that the archive root in the decoded propose is correct with regard to the block header. This requires
79
- hashing the block header we have retrieved, inserting it into the archive tree, and checking the resulting root
80
- against the one we got from L1.
81
-
82
- However, this requires that the archive keeps a reference to world-state, which is not the case in the current
83
- system.
84
-
85
- #### Emit Commitments in Rollup Contract
86
-
87
- Modify rollup contract to emit commitments to the block header in the `L2BlockProposed` event, allowing us to easily
88
- verify the calldata we obtained vs the emitted event.
41
+ Given existing attempts to route the call via the Spire proposer, we also check if the tx is
42
+ `to` the proposer known address. If so, we extract all wrapped calls and try each as either
43
+ a multicall3 or direct propose call, using hash matching to find and verify the correct one.
89
44
 
90
- However, modifying the rollup contract is out of scope for this change. But we can implement this approach in `v2`.
45
+ Since the Spire proposer is upgradeable, we check that the implementation has not changed in
46
+ order to decode. Any validation failure triggers fallback to the next step.
91
47
 
92
48
  ### Debug and Trace Transaction Fallback
93
49
 
94
- Last, we use L1 node's trace/debug RPC methods to definitively identify the one successful `propose` call within the tx.
95
- We can then extract the exact calldata that hit the `propose` function in the rollup contract.
50
+ Last, we use L1 node's trace/debug RPC methods to definitively identify the one successful
51
+ `propose` call within the tx. We can then extract the exact calldata that hit the `propose`
52
+ function in the rollup contract.
96
53
 
97
- This approach requires access to a debug-enabled L1 node, which may be more resource-intensive, so we only
98
- use it as a fallback when the first step fails, which should be rare in practice.
54
+ This approach requires access to a debug-enabled L1 node, which may be more resource-intensive,
55
+ so we only use it as a fallback when earlier steps fail, which should be rare in practice.
@@ -1,10 +1,11 @@
1
1
  #!/usr/bin/env node
2
2
  import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types';
3
- import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types';
3
+ import { CheckpointNumber } from '@aztec/foundation/branded-types';
4
4
  import { EthAddress } from '@aztec/foundation/eth-address';
5
5
  import { createLogger } from '@aztec/foundation/log';
6
+ import { RollupAbi } from '@aztec/l1-artifacts/RollupAbi';
6
7
 
7
- import { type Hex, createPublicClient, http } from 'viem';
8
+ import { type Hex, createPublicClient, decodeEventLog, getAbiItem, http, toEventSelector } from 'viem';
8
9
  import { mainnet } from 'viem/chains';
9
10
 
10
11
  import { CalldataRetriever } from '../calldata_retriever.js';
@@ -88,14 +89,6 @@ async function main() {
88
89
 
89
90
  logger.info(`Transaction found in block ${tx.blockNumber}`);
90
91
 
91
- // For simplicity, use zero addresses for optional contract addresses
92
- // In production, these would be fetched from the rollup contract or configuration
93
- const slashingProposerAddress = EthAddress.ZERO;
94
- const governanceProposerAddress = EthAddress.ZERO;
95
- const slashFactoryAddress = undefined;
96
-
97
- logger.info('Using zero addresses for governance/slashing (can be configured if needed)');
98
-
99
92
  // Create CalldataRetriever
100
93
  const retriever = new CalldataRetriever(
101
94
  publicClient as unknown as ViemPublicClient,
@@ -103,53 +96,67 @@ async function main() {
103
96
  targetCommitteeSize,
104
97
  undefined,
105
98
  logger,
106
- {
107
- rollupAddress,
108
- governanceProposerAddress,
109
- slashingProposerAddress,
110
- slashFactoryAddress,
111
- },
99
+ rollupAddress,
112
100
  );
113
101
 
114
- // Extract L2 block number from transaction logs
115
- logger.info('Decoding transaction to extract L2 block number...');
102
+ // Extract checkpoint number and hashes from transaction logs
103
+ logger.info('Decoding transaction to extract checkpoint number and hashes...');
116
104
  const receipt = await publicClient.getTransactionReceipt({ hash: txHash });
117
- const l2BlockProposedEvent = receipt.logs.find(log => {
105
+
106
+ // Look for CheckpointProposed event
107
+ const checkpointProposedEventAbi = getAbiItem({ abi: RollupAbi, name: 'CheckpointProposed' });
108
+ const checkpointProposedLog = receipt.logs.find(log => {
118
109
  try {
119
- // Try to match the L2BlockProposed event
120
110
  return (
121
111
  log.address.toLowerCase() === rollupAddress.toString().toLowerCase() &&
122
- log.topics[0] === '0x2f1d0e696fa5186494a2f2f89a0e0bcbb15d607f6c5eac4637e07e1e5e7d3c00' // L2BlockProposed event signature
112
+ log.topics[0] === toEventSelector(checkpointProposedEventAbi)
123
113
  );
124
114
  } catch {
125
115
  return false;
126
116
  }
127
117
  });
128
118
 
129
- let l2BlockNumber: number;
130
- if (l2BlockProposedEvent && l2BlockProposedEvent.topics[1]) {
131
- // L2 block number is typically the first indexed parameter
132
- l2BlockNumber = Number(BigInt(l2BlockProposedEvent.topics[1]));
133
- logger.info(`L2 Block Number (from event): ${l2BlockNumber}`);
134
- } else {
135
- // Fallback: try to extract from transaction data or use a default
136
- logger.warn('Could not extract L2 block number from event, using block number as fallback');
137
- l2BlockNumber = Number(tx.blockNumber);
119
+ if (!checkpointProposedLog || checkpointProposedLog.topics[1] === undefined) {
120
+ throw new Error(`Checkpoint proposed event not found`);
121
+ }
122
+
123
+ const checkpointNumber = CheckpointNumber.fromBigInt(BigInt(checkpointProposedLog.topics[1]));
124
+
125
+ // Decode the full event to extract attestationsHash and payloadDigest
126
+ const decodedEvent = decodeEventLog({
127
+ abi: RollupAbi,
128
+ data: checkpointProposedLog.data,
129
+ topics: checkpointProposedLog.topics,
130
+ });
131
+
132
+ const eventArgs = decodedEvent.args as {
133
+ checkpointNumber: bigint;
134
+ archive: Hex;
135
+ versionedBlobHashes: Hex[];
136
+ attestationsHash: Hex;
137
+ payloadDigest: Hex;
138
+ };
139
+
140
+ if (!eventArgs.attestationsHash || !eventArgs.payloadDigest) {
141
+ throw new Error(`CheckpointProposed event missing attestationsHash or payloadDigest`);
138
142
  }
139
143
 
144
+ const expectedHashes = {
145
+ attestationsHash: eventArgs.attestationsHash,
146
+ payloadDigest: eventArgs.payloadDigest,
147
+ };
148
+
149
+ logger.info(`Checkpoint Number: ${checkpointNumber}`);
150
+ logger.info(`Attestations Hash: ${expectedHashes.attestationsHash}`);
151
+ logger.info(`Payload Digest: ${expectedHashes.payloadDigest}`);
152
+
140
153
  logger.info('');
141
- logger.info('Retrieving block header from rollup transaction...');
154
+ logger.info('Retrieving checkpoint from rollup transaction...');
142
155
  logger.info('');
143
156
 
144
- // For this script, we don't have blob hashes or expected hashes, so pass empty arrays/objects
145
- const result = await retriever.getCheckpointFromRollupTx(
146
- txHash,
147
- [],
148
- CheckpointNumber.fromBlockNumber(BlockNumber(l2BlockNumber)),
149
- {},
150
- );
157
+ const result = await retriever.getCheckpointFromRollupTx(txHash, [], checkpointNumber, expectedHashes);
151
158
 
152
- logger.info(' Successfully retrieved block header!');
159
+ logger.info(' Successfully retrieved block header!');
153
160
  logger.info('');
154
161
  logger.info('Block Header Details:');
155
162
  logger.info('====================');