@aztec/archiver 0.0.1-commit.1bea0213 → 0.0.1-commit.217f559981

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/dest/archiver.d.ts +7 -3
  2. package/dest/archiver.d.ts.map +1 -1
  3. package/dest/archiver.js +24 -93
  4. package/dest/factory.d.ts +3 -1
  5. package/dest/factory.d.ts.map +1 -1
  6. package/dest/factory.js +11 -7
  7. package/dest/index.d.ts +2 -1
  8. package/dest/index.d.ts.map +1 -1
  9. package/dest/index.js +1 -0
  10. package/dest/l1/bin/retrieve-calldata.js +18 -19
  11. package/dest/l1/calldata_retriever.d.ts +7 -1
  12. package/dest/l1/calldata_retriever.d.ts.map +1 -1
  13. package/dest/l1/calldata_retriever.js +17 -4
  14. package/dest/l1/data_retrieval.d.ts +3 -2
  15. package/dest/l1/data_retrieval.d.ts.map +1 -1
  16. package/dest/l1/data_retrieval.js +4 -3
  17. package/dest/l1/validate_trace.d.ts +6 -3
  18. package/dest/l1/validate_trace.d.ts.map +1 -1
  19. package/dest/l1/validate_trace.js +13 -9
  20. package/dest/modules/data_source_base.d.ts +11 -6
  21. package/dest/modules/data_source_base.d.ts.map +1 -1
  22. package/dest/modules/data_source_base.js +28 -72
  23. package/dest/modules/data_store_updater.d.ts +9 -2
  24. package/dest/modules/data_store_updater.d.ts.map +1 -1
  25. package/dest/modules/data_store_updater.js +40 -19
  26. package/dest/modules/instrumentation.d.ts +4 -2
  27. package/dest/modules/instrumentation.d.ts.map +1 -1
  28. package/dest/modules/instrumentation.js +26 -12
  29. package/dest/modules/l1_synchronizer.d.ts +3 -2
  30. package/dest/modules/l1_synchronizer.d.ts.map +1 -1
  31. package/dest/modules/l1_synchronizer.js +8 -9
  32. package/dest/store/block_store.d.ts +19 -15
  33. package/dest/store/block_store.d.ts.map +1 -1
  34. package/dest/store/block_store.js +71 -19
  35. package/dest/store/contract_class_store.d.ts +1 -1
  36. package/dest/store/contract_class_store.d.ts.map +1 -1
  37. package/dest/store/contract_class_store.js +11 -7
  38. package/dest/store/kv_archiver_store.d.ts +21 -7
  39. package/dest/store/kv_archiver_store.d.ts.map +1 -1
  40. package/dest/store/kv_archiver_store.js +20 -3
  41. package/dest/store/l2_tips_cache.d.ts +19 -0
  42. package/dest/store/l2_tips_cache.d.ts.map +1 -0
  43. package/dest/store/l2_tips_cache.js +89 -0
  44. package/dest/store/log_store.d.ts +1 -1
  45. package/dest/store/log_store.d.ts.map +1 -1
  46. package/dest/store/log_store.js +56 -36
  47. package/dest/test/fake_l1_state.d.ts +4 -1
  48. package/dest/test/fake_l1_state.d.ts.map +1 -1
  49. package/dest/test/fake_l1_state.js +15 -9
  50. package/dest/test/index.js +3 -1
  51. package/dest/test/mock_archiver.d.ts +1 -1
  52. package/dest/test/mock_archiver.d.ts.map +1 -1
  53. package/dest/test/mock_archiver.js +3 -2
  54. package/dest/test/mock_l2_block_source.d.ts +21 -6
  55. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  56. package/dest/test/mock_l2_block_source.js +127 -84
  57. package/dest/test/mock_structs.d.ts +3 -2
  58. package/dest/test/mock_structs.d.ts.map +1 -1
  59. package/dest/test/mock_structs.js +7 -5
  60. package/dest/test/noop_l1_archiver.d.ts +23 -0
  61. package/dest/test/noop_l1_archiver.d.ts.map +1 -0
  62. package/dest/test/noop_l1_archiver.js +68 -0
  63. package/package.json +14 -13
  64. package/src/archiver.ts +32 -112
  65. package/src/factory.ts +26 -11
  66. package/src/index.ts +1 -0
  67. package/src/l1/bin/retrieve-calldata.ts +17 -23
  68. package/src/l1/calldata_retriever.ts +25 -3
  69. package/src/l1/data_retrieval.ts +4 -1
  70. package/src/l1/validate_trace.ts +24 -6
  71. package/src/modules/data_source_base.ts +56 -95
  72. package/src/modules/data_store_updater.ts +43 -18
  73. package/src/modules/instrumentation.ts +24 -12
  74. package/src/modules/l1_synchronizer.ts +9 -8
  75. package/src/store/block_store.ts +87 -38
  76. package/src/store/contract_class_store.ts +11 -7
  77. package/src/store/kv_archiver_store.ts +40 -8
  78. package/src/store/l2_tips_cache.ts +89 -0
  79. package/src/store/log_store.ts +95 -33
  80. package/src/test/fake_l1_state.ts +17 -9
  81. package/src/test/index.ts +3 -0
  82. package/src/test/mock_archiver.ts +3 -2
  83. package/src/test/mock_l2_block_source.ts +163 -83
  84. package/src/test/mock_structs.ts +22 -6
  85. package/src/test/noop_l1_archiver.ts +109 -0
@@ -0,0 +1,23 @@
1
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
+ import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers';
3
+ import { type TelemetryClient } from '@aztec/telemetry-client';
4
+ import { Archiver } from '../archiver.js';
5
+ import { ArchiverInstrumentation } from '../modules/instrumentation.js';
6
+ import type { KVArchiverDataStore } from '../store/kv_archiver_store.js';
7
+ /**
8
+ * Archiver with mocked L1 connectivity for testing.
9
+ * Uses mock L1 clients and a noop synchronizer, enabling tests that
10
+ * don't require real Ethereum connectivity.
11
+ */
12
+ export declare class NoopL1Archiver extends Archiver {
13
+ constructor(dataStore: KVArchiverDataStore, l1Constants: L1RollupConstants & {
14
+ genesisArchiveRoot: Fr;
15
+ }, instrumentation: ArchiverInstrumentation);
16
+ /** Override start to skip L1 validation checks. */
17
+ start(_blockUntilSynced?: boolean): Promise<void>;
18
+ }
19
+ /** Creates an archiver with mocked L1 connectivity for testing. */
20
+ export declare function createNoopL1Archiver(dataStore: KVArchiverDataStore, l1Constants: L1RollupConstants & {
21
+ genesisArchiveRoot: Fr;
22
+ }, telemetry?: TelemetryClient): Promise<NoopL1Archiver>;
23
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibm9vcF9sMV9hcmNoaXZlci5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL3Rlc3Qvbm9vcF9sMV9hcmNoaXZlci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFJQSxPQUFPLEVBQUUsRUFBRSxFQUFFLE1BQU0sZ0NBQWdDLENBQUM7QUFJcEQsT0FBTyxLQUFLLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSw2QkFBNkIsQ0FBQztBQUNyRSxPQUFPLEVBQUUsS0FBSyxlQUFlLEVBQW1DLE1BQU0seUJBQXlCLENBQUM7QUFLaEcsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLGdCQUFnQixDQUFDO0FBQzFDLE9BQU8sRUFBRSx1QkFBdUIsRUFBRSxNQUFNLCtCQUErQixDQUFDO0FBRXhFLE9BQU8sS0FBSyxFQUFFLG1CQUFtQixFQUFFLE1BQU0sK0JBQStCLENBQUM7QUF5QnpFOzs7O0dBSUc7QUFDSCxxQkFBYSxjQUFlLFNBQVEsUUFBUTtJQUMxQyxZQUNFLFNBQVMsRUFBRSxtQkFBbUIsRUFDOUIsV0FBVyxFQUFFLGlCQUFpQixHQUFHO1FBQUUsa0JBQWtCLEVBQUUsRUFBRSxDQUFBO0tBQUUsRUFDM0QsZUFBZSxFQUFFLHVCQUF1QixFQXVDekM7SUFFRCxtREFBbUQ7SUFDbkMsS0FBSyxDQUFDLGlCQUFpQixDQUFDLEVBQUUsT0FBTyxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FJaEU7Q0FDRjtBQUVELG1FQUFtRTtBQUNuRSx3QkFBc0Isb0JBQW9CLENBQ3hDLFNBQVMsRUFBRSxtQkFBbUIsRUFDOUIsV0FBVyxFQUFFLGlCQUFpQixHQUFHO0lBQUUsa0JBQWtCLEVBQUUsRUFBRSxDQUFBO0NBQUUsRUFDM0QsU0FBUyxHQUFFLGVBQXNDLEdBQ2hELE9BQU8sQ0FBQyxjQUFjLENBQUMsQ0FHekIifQ==
@@ -0,0 +1 @@
1
+ {"version":3,"file":"noop_l1_archiver.d.ts","sourceRoot":"","sources":["../../src/test/noop_l1_archiver.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAIpD,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,KAAK,eAAe,EAAmC,MAAM,yBAAyB,CAAC;AAKhG,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,uBAAuB,EAAE,MAAM,+BAA+B,CAAC;AAExE,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,+BAA+B,CAAC;AAyBzE;;;;GAIG;AACH,qBAAa,cAAe,SAAQ,QAAQ;IAC1C,YACE,SAAS,EAAE,mBAAmB,EAC9B,WAAW,EAAE,iBAAiB,GAAG;QAAE,kBAAkB,EAAE,EAAE,CAAA;KAAE,EAC3D,eAAe,EAAE,uBAAuB,EAuCzC;IAED,mDAAmD;IACnC,KAAK,CAAC,iBAAiB,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,CAIhE;CACF;AAED,mEAAmE;AACnE,wBAAsB,oBAAoB,CACxC,SAAS,EAAE,mBAAmB,EAC9B,WAAW,EAAE,iBAAiB,GAAG;IAAE,kBAAkB,EAAE,EAAE,CAAA;CAAE,EAC3D,SAAS,GAAE,eAAsC,GAChD,OAAO,CAAC,cAAc,CAAC,CAGzB"}
@@ -0,0 +1,68 @@
1
+ import { Buffer32 } from '@aztec/foundation/buffer';
2
+ import { EthAddress } from '@aztec/foundation/eth-address';
3
+ import { getTelemetryClient } from '@aztec/telemetry-client';
4
+ import { mock } from 'jest-mock-extended';
5
+ import { EventEmitter } from 'node:events';
6
+ import { Archiver } from '../archiver.js';
7
+ import { ArchiverInstrumentation } from '../modules/instrumentation.js';
8
+ /** Noop L1 synchronizer for testing without L1 connectivity. */ class NoopL1Synchronizer {
9
+ tracer;
10
+ constructor(tracer){
11
+ this.tracer = tracer;
12
+ }
13
+ setConfig(_config) {}
14
+ getL1BlockNumber() {
15
+ return 0n;
16
+ }
17
+ getL1Timestamp() {
18
+ return 0n;
19
+ }
20
+ testEthereumNodeSynced() {
21
+ return Promise.resolve();
22
+ }
23
+ syncFromL1(_initialSyncComplete) {
24
+ return Promise.resolve();
25
+ }
26
+ }
27
+ /**
28
+ * Archiver with mocked L1 connectivity for testing.
29
+ * Uses mock L1 clients and a noop synchronizer, enabling tests that
30
+ * don't require real Ethereum connectivity.
31
+ */ export class NoopL1Archiver extends Archiver {
32
+ constructor(dataStore, l1Constants, instrumentation){
33
+ // Create mocks for L1 clients
34
+ const publicClient = mock();
35
+ const debugClient = mock();
36
+ const rollup = mock();
37
+ const blobClient = mock();
38
+ // Mock methods called during start()
39
+ blobClient.testSources.mockResolvedValue();
40
+ publicClient.getBlockNumber.mockResolvedValue(1n);
41
+ const events = new EventEmitter();
42
+ const synchronizer = new NoopL1Synchronizer(instrumentation.tracer);
43
+ super(publicClient, debugClient, rollup, {
44
+ registryAddress: EthAddress.ZERO,
45
+ governanceProposerAddress: EthAddress.ZERO,
46
+ slashFactoryAddress: EthAddress.ZERO,
47
+ slashingProposerAddress: EthAddress.ZERO
48
+ }, dataStore, {
49
+ pollingIntervalMs: 1000,
50
+ batchSize: 100,
51
+ skipValidateCheckpointAttestations: true,
52
+ maxAllowedEthClientDriftSeconds: 300,
53
+ ethereumAllowNoDebugHosts: true
54
+ }, blobClient, instrumentation, {
55
+ ...l1Constants,
56
+ l1StartBlockHash: Buffer32.random()
57
+ }, synchronizer, events);
58
+ }
59
+ /** Override start to skip L1 validation checks. */ start(_blockUntilSynced) {
60
+ // Just start the running promise without L1 checks
61
+ this.runningPromise.start();
62
+ return Promise.resolve();
63
+ }
64
+ }
65
+ /** Creates an archiver with mocked L1 connectivity for testing. */ export async function createNoopL1Archiver(dataStore, l1Constants, telemetry = getTelemetryClient()) {
66
+ const instrumentation = await ArchiverInstrumentation.new(telemetry, ()=>dataStore.estimateSize());
67
+ return new NoopL1Archiver(dataStore, l1Constants, instrumentation);
68
+ }
package/package.json CHANGED
@@ -1,10 +1,11 @@
1
1
  {
2
2
  "name": "@aztec/archiver",
3
- "version": "0.0.1-commit.1bea0213",
3
+ "version": "0.0.1-commit.217f559981",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
7
7
  "./test": "./dest/test/index.js",
8
+ "./test/noop-l1": "./dest/test/noop_l1_archiver.js",
8
9
  "./config": "./dest/config.js"
9
10
  },
10
11
  "typedocOptions": {
@@ -64,18 +65,18 @@
64
65
  ]
65
66
  },
66
67
  "dependencies": {
67
- "@aztec/blob-client": "0.0.1-commit.1bea0213",
68
- "@aztec/blob-lib": "0.0.1-commit.1bea0213",
69
- "@aztec/constants": "0.0.1-commit.1bea0213",
70
- "@aztec/epoch-cache": "0.0.1-commit.1bea0213",
71
- "@aztec/ethereum": "0.0.1-commit.1bea0213",
72
- "@aztec/foundation": "0.0.1-commit.1bea0213",
73
- "@aztec/kv-store": "0.0.1-commit.1bea0213",
74
- "@aztec/l1-artifacts": "0.0.1-commit.1bea0213",
75
- "@aztec/noir-protocol-circuits-types": "0.0.1-commit.1bea0213",
76
- "@aztec/protocol-contracts": "0.0.1-commit.1bea0213",
77
- "@aztec/stdlib": "0.0.1-commit.1bea0213",
78
- "@aztec/telemetry-client": "0.0.1-commit.1bea0213",
68
+ "@aztec/blob-client": "0.0.1-commit.217f559981",
69
+ "@aztec/blob-lib": "0.0.1-commit.217f559981",
70
+ "@aztec/constants": "0.0.1-commit.217f559981",
71
+ "@aztec/epoch-cache": "0.0.1-commit.217f559981",
72
+ "@aztec/ethereum": "0.0.1-commit.217f559981",
73
+ "@aztec/foundation": "0.0.1-commit.217f559981",
74
+ "@aztec/kv-store": "0.0.1-commit.217f559981",
75
+ "@aztec/l1-artifacts": "0.0.1-commit.217f559981",
76
+ "@aztec/noir-protocol-circuits-types": "0.0.1-commit.217f559981",
77
+ "@aztec/protocol-contracts": "0.0.1-commit.217f559981",
78
+ "@aztec/stdlib": "0.0.1-commit.217f559981",
79
+ "@aztec/telemetry-client": "0.0.1-commit.217f559981",
79
80
  "lodash.groupby": "^4.6.0",
80
81
  "lodash.omit": "^4.5.0",
81
82
  "tslib": "^2.5.0",
package/src/archiver.ts CHANGED
@@ -1,5 +1,4 @@
1
1
  import type { BlobClientInterface } from '@aztec/blob-client/client';
2
- import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
3
2
  import { EpochCache } from '@aztec/epoch-cache';
4
3
  import { BlockTagTooOldError, RollupContract } from '@aztec/ethereum/contracts';
5
4
  import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
@@ -15,8 +14,6 @@ import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/runni
15
14
  import { DateProvider } from '@aztec/foundation/timer';
16
15
  import {
17
16
  type ArchiverEmitter,
18
- type CheckpointId,
19
- GENESIS_CHECKPOINT_HEADER_HASH,
20
17
  L2Block,
21
18
  type L2BlockSink,
22
19
  type L2Tips,
@@ -26,6 +23,7 @@ import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
26
23
  import {
27
24
  type L1RollupConstants,
28
25
  getEpochNumberAtTimestamp,
26
+ getSlotAtNextL1Block,
29
27
  getSlotAtTimestamp,
30
28
  getSlotRangeForEpoch,
31
29
  getTimestampRangeForEpoch,
@@ -40,6 +38,7 @@ import { ArchiverDataStoreUpdater } from './modules/data_store_updater.js';
40
38
  import type { ArchiverInstrumentation } from './modules/instrumentation.js';
41
39
  import type { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js';
42
40
  import type { KVArchiverDataStore } from './store/kv_archiver_store.js';
41
+ import { L2TipsCache } from './store/l2_tips_cache.js';
43
42
 
44
43
  /** Export ArchiverEmitter for use in factory and tests. */
45
44
  export type { ArchiverEmitter };
@@ -68,7 +67,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
68
67
  public readonly events: ArchiverEmitter;
69
68
 
70
69
  /** A loop in which we will be continually fetching new checkpoints. */
71
- private runningPromise: RunningPromise;
70
+ protected runningPromise: RunningPromise;
72
71
 
73
72
  /** L1 synchronizer that handles fetching checkpoints and messages from L1. */
74
73
  private readonly synchronizer: ArchiverL1Synchronizer;
@@ -82,6 +81,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
82
81
  /** Helper to handle updates to the store */
83
82
  private readonly updater: ArchiverDataStoreUpdater;
84
83
 
84
+ /** In-memory cache for L2 chain tips. */
85
+ private readonly l2TipsCache: L2TipsCache;
86
+
85
87
  public readonly tracer: Tracer;
86
88
 
87
89
  /**
@@ -121,6 +123,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
121
123
  protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
122
124
  synchronizer: ArchiverL1Synchronizer,
123
125
  events: ArchiverEmitter,
126
+ l2TipsCache?: L2TipsCache,
124
127
  private readonly log: Logger = createLogger('archiver'),
125
128
  ) {
126
129
  super(dataStore, l1Constants);
@@ -129,7 +132,8 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
129
132
  this.initialSyncPromise = promiseWithResolvers();
130
133
  this.synchronizer = synchronizer;
131
134
  this.events = events;
132
- this.updater = new ArchiverDataStoreUpdater(this.dataStore);
135
+ this.l2TipsCache = l2TipsCache ?? new L2TipsCache(this.dataStore.blockStore);
136
+ this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache);
133
137
 
134
138
  // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
135
139
  // are done as fast as possible. This then gets updated once the initial sync completes.
@@ -158,7 +162,11 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
158
162
 
159
163
  await this.blobClient.testSources();
160
164
  await this.synchronizer.testEthereumNodeSynced();
161
- await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
165
+ await validateAndLogTraceAvailability(
166
+ this.debugClient,
167
+ this.config.ethereumAllowNoDebugHosts ?? false,
168
+ this.log.getBindings(),
169
+ );
162
170
 
163
171
  // Log initial state for the archiver
164
172
  const { l1StartBlock } = this.l1Constants;
@@ -212,8 +220,23 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
212
220
  const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
213
221
  this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
214
222
 
223
+ // Calculate slot threshold for validation
224
+ const l1Timestamp = this.synchronizer.getL1Timestamp();
225
+ const slotAtNextL1Block =
226
+ l1Timestamp === undefined ? undefined : getSlotAtNextL1Block(l1Timestamp, this.l1Constants);
227
+
215
228
  // Process each block individually to properly resolve/reject each promise
216
229
  for (const { block, resolve, reject } of queuedItems) {
230
+ const blockSlot = block.header.globalVariables.slotNumber;
231
+ if (slotAtNextL1Block !== undefined && blockSlot < slotAtNextL1Block) {
232
+ this.log.warn(
233
+ `Rejecting proposed block ${block.number} for past slot ${blockSlot} (current is ${slotAtNextL1Block})`,
234
+ { block: block.toBlockInfo(), l1Timestamp, slotAtNextL1Block },
235
+ );
236
+ reject(new Error(`Block ${block.number} is for past slot ${blockSlot} (current is ${slotAtNextL1Block})`));
237
+ continue;
238
+ }
239
+
217
240
  try {
218
241
  await this.updater.addProposedBlocks([block]);
219
242
  this.log.debug(`Added block ${block.number} to store`);
@@ -371,111 +394,8 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
371
394
  return true;
372
395
  }
373
396
 
374
- public async getL2Tips(): Promise<L2Tips> {
375
- const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber, finalizedBlockNumber] = await Promise.all([
376
- this.getBlockNumber(),
377
- this.getProvenBlockNumber(),
378
- this.getCheckpointedL2BlockNumber(),
379
- this.getFinalizedL2BlockNumber(),
380
- ] as const);
381
-
382
- const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
383
-
384
- // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks
385
- const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] =
386
- await Promise.all([
387
- latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined,
388
- provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined,
389
- finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined,
390
- checkpointedBlockNumber > beforeInitialblockNumber
391
- ? this.getCheckpointedBlock(checkpointedBlockNumber)
392
- : undefined,
393
- ] as const);
394
-
395
- if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) {
396
- throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`);
397
- }
398
-
399
- // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number.
400
- if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) {
401
- throw new Error(
402
- `Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`,
403
- );
404
- }
405
-
406
- if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) {
407
- throw new Error(
408
- `Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`,
409
- );
410
- }
411
-
412
- if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) {
413
- throw new Error(
414
- `Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`,
415
- );
416
- }
417
-
418
- const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
419
- const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
420
- const finalizedBlockHeaderHash =
421
- (await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
422
- const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
423
-
424
- // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks
425
- const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([
426
- provenCheckpointedBlock !== undefined
427
- ? await this.getCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1)
428
- : [undefined],
429
- finalizedCheckpointedBlock !== undefined
430
- ? await this.getCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1)
431
- : [undefined],
432
- checkpointedBlock !== undefined ? await this.getCheckpoints(checkpointedBlock?.checkpointNumber, 1) : [undefined],
433
- ]);
434
-
435
- const initialcheckpointId: CheckpointId = {
436
- number: CheckpointNumber.ZERO,
437
- hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(),
438
- };
439
-
440
- const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => {
441
- if (checkpoint === undefined) {
442
- return initialcheckpointId;
443
- }
444
- return {
445
- number: checkpoint.checkpoint.number,
446
- hash: checkpoint.checkpoint.hash().toString(),
447
- };
448
- };
449
-
450
- const l2Tips: L2Tips = {
451
- proposed: {
452
- number: latestBlockNumber,
453
- hash: latestBlockHeaderHash.toString(),
454
- },
455
- proven: {
456
- block: {
457
- number: provenBlockNumber,
458
- hash: provenBlockHeaderHash.toString(),
459
- },
460
- checkpoint: makeCheckpointId(provenBlockCheckpoint),
461
- },
462
- finalized: {
463
- block: {
464
- number: finalizedBlockNumber,
465
- hash: finalizedBlockHeaderHash.toString(),
466
- },
467
- checkpoint: makeCheckpointId(finalizedBlockCheckpoint),
468
- },
469
- checkpointed: {
470
- block: {
471
- number: checkpointedBlockNumber,
472
- hash: checkpointedBlockHeaderHash.toString(),
473
- },
474
- checkpoint: makeCheckpointId(checkpointedBlockCheckpoint),
475
- },
476
- };
477
-
478
- return l2Tips;
397
+ public getL2Tips(): Promise<L2Tips> {
398
+ return this.l2TipsCache.getL2Tips();
479
399
  }
480
400
 
481
401
  public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise<void> {
@@ -512,7 +432,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
512
432
  await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash });
513
433
  if (targetL2BlockNumber < currentProvenBlock) {
514
434
  this.log.info(`Clearing proven L2 block number`);
515
- await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
435
+ await this.updater.setProvenCheckpointNumber(CheckpointNumber.ZERO);
516
436
  }
517
437
  // TODO(palla/reorg): Set the finalized block when we add support for it.
518
438
  // if (targetL2BlockNumber < currentFinalizedBlock) {
package/src/factory.ts CHANGED
@@ -6,7 +6,6 @@ import { BlockNumber } from '@aztec/foundation/branded-types';
6
6
  import { Buffer32 } from '@aztec/foundation/buffer';
7
7
  import { merge } from '@aztec/foundation/collection';
8
8
  import { Fr } from '@aztec/foundation/curves/bn254';
9
- import { createLogger } from '@aztec/foundation/log';
10
9
  import { DateProvider } from '@aztec/foundation/timer';
11
10
  import type { DataStoreConfig } from '@aztec/kv-store/config';
12
11
  import { createStore } from '@aztec/kv-store/lmdb-v2';
@@ -26,6 +25,7 @@ import { type ArchiverConfig, mapArchiverConfig } from './config.js';
26
25
  import { ArchiverInstrumentation } from './modules/instrumentation.js';
27
26
  import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js';
28
27
  import { ARCHIVER_DB_VERSION, KVArchiverDataStore } from './store/kv_archiver_store.js';
28
+ import { L2TipsCache } from './store/l2_tips_cache.js';
29
29
 
30
30
  export const ARCHIVER_STORE_NAME = 'archiver';
31
31
 
@@ -38,7 +38,7 @@ export async function createArchiverStore(
38
38
  ...userConfig,
39
39
  dataStoreMapSizeKb: userConfig.archiverStoreMapSizeKb ?? userConfig.dataStoreMapSizeKb,
40
40
  };
41
- const store = await createStore(ARCHIVER_STORE_NAME, ARCHIVER_DB_VERSION, config, createLogger('archiver:lmdb'));
41
+ const store = await createStore(ARCHIVER_STORE_NAME, ARCHIVER_DB_VERSION, config);
42
42
  return new KVArchiverDataStore(store, config.maxLogs, l1Constants);
43
43
  }
44
44
 
@@ -78,14 +78,21 @@ export async function createArchiver(
78
78
  const inbox = new InboxContract(publicClient, config.l1Contracts.inboxAddress);
79
79
 
80
80
  // Fetch L1 constants from rollup contract
81
- const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] =
82
- await Promise.all([
83
- rollup.getL1StartBlock(),
84
- rollup.getL1GenesisTime(),
85
- rollup.getProofSubmissionEpochs(),
86
- rollup.getGenesisArchiveTreeRoot(),
87
- rollup.getSlashingProposerAddress(),
88
- ] as const);
81
+ const [
82
+ l1StartBlock,
83
+ l1GenesisTime,
84
+ proofSubmissionEpochs,
85
+ genesisArchiveRoot,
86
+ slashingProposerAddress,
87
+ targetCommitteeSize,
88
+ ] = await Promise.all([
89
+ rollup.getL1StartBlock(),
90
+ rollup.getL1GenesisTime(),
91
+ rollup.getProofSubmissionEpochs(),
92
+ rollup.getGenesisArchiveTreeRoot(),
93
+ rollup.getSlashingProposerAddress(),
94
+ rollup.getTargetCommitteeSize(),
95
+ ] as const);
89
96
 
90
97
  const l1StartBlockHash = await publicClient
91
98
  .getBlock({ blockNumber: l1StartBlock, includeTransactions: false })
@@ -101,6 +108,7 @@ export async function createArchiver(
101
108
  slotDuration,
102
109
  ethereumSlotDuration,
103
110
  proofSubmissionEpochs: Number(proofSubmissionEpochs),
111
+ targetCommitteeSize,
104
112
  genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()),
105
113
  };
106
114
 
@@ -121,6 +129,9 @@ export async function createArchiver(
121
129
  // Create the event emitter that will be shared by archiver and synchronizer
122
130
  const events = new EventEmitter() as ArchiverEmitter;
123
131
 
132
+ // Create L2 tips cache shared by archiver and synchronizer
133
+ const l2TipsCache = new L2TipsCache(archiverStore.blockStore);
134
+
124
135
  // Create the L1 synchronizer
125
136
  const synchronizer = new ArchiverL1Synchronizer(
126
137
  publicClient,
@@ -137,6 +148,8 @@ export async function createArchiver(
137
148
  l1Constants,
138
149
  events,
139
150
  instrumentation.tracer,
151
+ l2TipsCache,
152
+ undefined, // log (use default)
140
153
  );
141
154
 
142
155
  const archiver = new Archiver(
@@ -151,13 +164,15 @@ export async function createArchiver(
151
164
  l1Constants,
152
165
  synchronizer,
153
166
  events,
167
+ l2TipsCache,
154
168
  );
155
169
 
156
170
  await archiver.start(opts.blockUntilSync);
157
171
  return archiver;
158
172
  }
159
173
 
160
- async function registerProtocolContracts(store: KVArchiverDataStore) {
174
+ /** Registers protocol contracts in the archiver store. */
175
+ export async function registerProtocolContracts(store: KVArchiverDataStore) {
161
176
  const blockNumber = 0;
162
177
  for (const name of protocolContractNames) {
163
178
  const provider = new BundledProtocolContractsProvider();
package/src/index.ts CHANGED
@@ -8,5 +8,6 @@ export * from './config.js';
8
8
  export { type L1PublishedData } from './structs/published.js';
9
9
  export { KVArchiverDataStore, ARCHIVER_DB_VERSION } from './store/kv_archiver_store.js';
10
10
  export { ContractInstanceStore } from './store/contract_instance_store.js';
11
+ export { L2TipsCache } from './store/l2_tips_cache.js';
11
12
 
12
13
  export { retrieveCheckpointsFromRollup, retrieveL2ProofVerifiedEvents } from './l1/data_retrieval.js';
@@ -1,10 +1,11 @@
1
1
  #!/usr/bin/env node
2
2
  import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types';
3
- import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types';
3
+ import { CheckpointNumber } from '@aztec/foundation/branded-types';
4
4
  import { EthAddress } from '@aztec/foundation/eth-address';
5
5
  import { createLogger } from '@aztec/foundation/log';
6
+ import { RollupAbi } from '@aztec/l1-artifacts/RollupAbi';
6
7
 
7
- import { type Hex, createPublicClient, http } from 'viem';
8
+ import { type Hex, createPublicClient, getAbiItem, http, toEventSelector } from 'viem';
8
9
  import { mainnet } from 'viem/chains';
9
10
 
10
11
  import { CalldataRetriever } from '../calldata_retriever.js';
@@ -111,43 +112,36 @@ async function main() {
111
112
  },
112
113
  );
113
114
 
114
- // Extract L2 block number from transaction logs
115
- logger.info('Decoding transaction to extract L2 block number...');
115
+ // Extract checkpoint number from transaction logs
116
+ logger.info('Decoding transaction to extract checkpoint number...');
116
117
  const receipt = await publicClient.getTransactionReceipt({ hash: txHash });
117
- const l2BlockProposedEvent = receipt.logs.find(log => {
118
+
119
+ // Look for CheckpointProposed event (emitted when a checkpoint is proposed to the rollup)
120
+ // Event signature: CheckpointProposed(uint256 indexed checkpointNumber, bytes32 indexed archive, bytes32[], bytes32, bytes32)
121
+ // Hash: keccak256("CheckpointProposed(uint256,bytes32,bytes32[],bytes32,bytes32)")
122
+ const checkpointProposedEvent = receipt.logs.find(log => {
118
123
  try {
119
- // Try to match the L2BlockProposed event
120
124
  return (
121
125
  log.address.toLowerCase() === rollupAddress.toString().toLowerCase() &&
122
- log.topics[0] === '0x2f1d0e696fa5186494a2f2f89a0e0bcbb15d607f6c5eac4637e07e1e5e7d3c00' // L2BlockProposed event signature
126
+ log.topics[0] === toEventSelector(getAbiItem({ abi: RollupAbi, name: 'CheckpointProposed' }))
123
127
  );
124
128
  } catch {
125
129
  return false;
126
130
  }
127
131
  });
128
132
 
129
- let l2BlockNumber: number;
130
- if (l2BlockProposedEvent && l2BlockProposedEvent.topics[1]) {
131
- // L2 block number is typically the first indexed parameter
132
- l2BlockNumber = Number(BigInt(l2BlockProposedEvent.topics[1]));
133
- logger.info(`L2 Block Number (from event): ${l2BlockNumber}`);
134
- } else {
135
- // Fallback: try to extract from transaction data or use a default
136
- logger.warn('Could not extract L2 block number from event, using block number as fallback');
137
- l2BlockNumber = Number(tx.blockNumber);
133
+ if (!checkpointProposedEvent || checkpointProposedEvent.topics[1] === undefined) {
134
+ throw new Error(`Checkpoint proposed event not found`);
138
135
  }
139
136
 
137
+ const checkpointNumber = CheckpointNumber.fromBigInt(BigInt(checkpointProposedEvent.topics[1]));
138
+
140
139
  logger.info('');
141
- logger.info('Retrieving block header from rollup transaction...');
140
+ logger.info('Retrieving checkpoint from rollup transaction...');
142
141
  logger.info('');
143
142
 
144
143
  // For this script, we don't have blob hashes or expected hashes, so pass empty arrays/objects
145
- const result = await retriever.getCheckpointFromRollupTx(
146
- txHash,
147
- [],
148
- CheckpointNumber.fromBlockNumber(BlockNumber(l2BlockNumber)),
149
- {},
150
- );
144
+ const result = await retriever.getCheckpointFromRollupTx(txHash, [], checkpointNumber, {});
151
145
 
152
146
  logger.info(' Successfully retrieved block header!');
153
147
  logger.info('');
@@ -39,6 +39,14 @@ import type { CallInfo } from './types.js';
39
39
  * in order to reconstruct an L2 block header.
40
40
  */
41
41
  export class CalldataRetriever {
42
+ /** Tx hashes we've already logged for trace+debug failure (log once per tx per process). */
43
+ private static readonly traceFailureWarnedTxHashes = new Set<string>();
44
+
45
+ /** Clears the trace-failure warned set. For testing only. */
46
+ static resetTraceFailureWarnedForTesting(): void {
47
+ CalldataRetriever.traceFailureWarnedTxHashes.clear();
48
+ }
49
+
42
50
  /** Pre-computed valid contract calls for validation */
43
51
  private readonly validContractCalls: ValidContractCall[];
44
52
 
@@ -84,6 +92,7 @@ export class CalldataRetriever {
84
92
  header: CheckpointHeader;
85
93
  attestations: CommitteeAttestation[];
86
94
  blockHash: string;
95
+ feeAssetPriceModifier: bigint;
87
96
  }> {
88
97
  this.logger.trace(`Fetching checkpoint ${checkpointNumber} from rollup tx ${txHash}`, {
89
98
  willValidateHashes: !!expectedHashes.attestationsHash || !!expectedHashes.payloadDigest,
@@ -313,7 +322,8 @@ export class CalldataRetriever {
313
322
  this.logger.debug(`Successfully traced using trace_transaction, found ${calls.length} calls`);
314
323
  } catch (err) {
315
324
  const traceError = err instanceof Error ? err : new Error(String(err));
316
- this.logger.verbose(`Failed trace_transaction for ${txHash}`, { traceError });
325
+ this.logger.verbose(`Failed trace_transaction for ${txHash}: ${traceError.message}`);
326
+ this.logger.debug(`Trace failure details for ${txHash}`, { traceError });
317
327
 
318
328
  try {
319
329
  // Fall back to debug_traceTransaction (Geth RPC)
@@ -322,7 +332,16 @@ export class CalldataRetriever {
322
332
  this.logger.debug(`Successfully traced using debug_traceTransaction, found ${calls.length} calls`);
323
333
  } catch (debugErr) {
324
334
  const debugError = debugErr instanceof Error ? debugErr : new Error(String(debugErr));
325
- this.logger.warn(`All tracing methods failed for tx ${txHash}`, {
335
+ // Log once per tx so we don't spam on every sync cycle when sync point doesn't advance
336
+ if (!CalldataRetriever.traceFailureWarnedTxHashes.has(txHash)) {
337
+ CalldataRetriever.traceFailureWarnedTxHashes.add(txHash);
338
+ this.logger.warn(
339
+ `Cannot decode L1 tx ${txHash}: trace and debug RPC failed or unavailable. ` +
340
+ `trace_transaction: ${traceError.message}; debug_traceTransaction: ${debugError.message}`,
341
+ );
342
+ }
343
+ // Full error objects can be very long; keep at debug only
344
+ this.logger.debug(`Trace/debug failure details for tx ${txHash}`, {
326
345
  traceError,
327
346
  debugError,
328
347
  txHash,
@@ -403,6 +422,7 @@ export class CalldataRetriever {
403
422
  header: CheckpointHeader;
404
423
  attestations: CommitteeAttestation[];
405
424
  blockHash: string;
425
+ feeAssetPriceModifier: bigint;
406
426
  } {
407
427
  const { functionName: rollupFunctionName, args: rollupArgs } = decodeFunctionData({
408
428
  abi: RollupAbi,
@@ -458,7 +478,8 @@ export class CalldataRetriever {
458
478
  if (expectedHashes.payloadDigest) {
459
479
  // Use ConsensusPayload to compute the digest - this ensures we match the exact logic
460
480
  // used by the network for signing and verification
461
- const consensusPayload = new ConsensusPayload(header, archiveRoot);
481
+ const feeAssetPriceModifier = decodedArgs.oracleInput.feeAssetPriceModifier;
482
+ const consensusPayload = new ConsensusPayload(header, archiveRoot, feeAssetPriceModifier);
462
483
  const payloadToSign = consensusPayload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation);
463
484
  const computedPayloadDigest = keccak256(payloadToSign);
464
485
 
@@ -495,6 +516,7 @@ export class CalldataRetriever {
495
516
  header,
496
517
  attestations,
497
518
  blockHash,
519
+ feeAssetPriceModifier: decodedArgs.oracleInput.feeAssetPriceModifier,
498
520
  };
499
521
  }
500
522
  }
@@ -38,6 +38,7 @@ import { CalldataRetriever } from './calldata_retriever.js';
38
38
  export type RetrievedCheckpoint = {
39
39
  checkpointNumber: CheckpointNumber;
40
40
  archiveRoot: Fr;
41
+ feeAssetPriceModifier: bigint;
41
42
  header: CheckpointHeader;
42
43
  checkpointBlobData: CheckpointBlobData;
43
44
  l1: L1PublishedData;
@@ -49,6 +50,7 @@ export type RetrievedCheckpoint = {
49
50
  export async function retrievedToPublishedCheckpoint({
50
51
  checkpointNumber,
51
52
  archiveRoot,
53
+ feeAssetPriceModifier,
52
54
  header: checkpointHeader,
53
55
  checkpointBlobData,
54
56
  l1,
@@ -100,7 +102,7 @@ export async function retrievedToPublishedCheckpoint({
100
102
  }),
101
103
  });
102
104
 
103
- const body = Body.fromTxBlobData(checkpointBlobData.blocks[0].txs);
105
+ const body = Body.fromTxBlobData(blockBlobData.txs);
104
106
 
105
107
  const blobFields = encodeBlockBlobData(blockBlobData);
106
108
  await spongeBlob.absorb(blobFields);
@@ -128,6 +130,7 @@ export async function retrievedToPublishedCheckpoint({
128
130
  header: checkpointHeader,
129
131
  blocks: l2Blocks,
130
132
  number: checkpointNumber,
133
+ feeAssetPriceModifier: feeAssetPriceModifier,
131
134
  });
132
135
 
133
136
  return PublishedCheckpoint.from({ checkpoint, l1, attestations });