@aztec/archiver 0.0.1-commit.b655e406 → 0.0.1-commit.fce3e4f

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dest/archiver/archiver.d.ts +30 -20
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +294 -208
  4. package/dest/archiver/archiver_store.d.ts +1 -1
  5. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +5 -4
  8. package/dest/archiver/config.d.ts +1 -1
  9. package/dest/archiver/config.d.ts.map +1 -1
  10. package/dest/archiver/config.js +5 -0
  11. package/dest/archiver/data_retrieval.d.ts +17 -17
  12. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  13. package/dest/archiver/data_retrieval.js +110 -86
  14. package/dest/archiver/errors.d.ts +1 -1
  15. package/dest/archiver/errors.d.ts.map +1 -1
  16. package/dest/archiver/index.d.ts +1 -1
  17. package/dest/archiver/instrumentation.d.ts +3 -3
  18. package/dest/archiver/instrumentation.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/block_store.d.ts +1 -1
  20. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  21. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +1 -1
  22. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  23. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +1 -1
  24. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  25. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +2 -2
  26. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  27. package/dest/archiver/kv_archiver_store/log_store.d.ts +1 -1
  28. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  29. package/dest/archiver/kv_archiver_store/message_store.d.ts +1 -1
  30. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  31. package/dest/archiver/structs/data_retrieval.d.ts +1 -1
  32. package/dest/archiver/structs/inbox_message.d.ts +1 -1
  33. package/dest/archiver/structs/published.d.ts +3 -2
  34. package/dest/archiver/structs/published.d.ts.map +1 -1
  35. package/dest/archiver/validation.d.ts +10 -4
  36. package/dest/archiver/validation.d.ts.map +1 -1
  37. package/dest/archiver/validation.js +29 -21
  38. package/dest/factory.d.ts +1 -1
  39. package/dest/index.d.ts +2 -2
  40. package/dest/index.d.ts.map +1 -1
  41. package/dest/index.js +1 -1
  42. package/dest/rpc/index.d.ts +2 -2
  43. package/dest/test/index.d.ts +1 -1
  44. package/dest/test/mock_archiver.d.ts +1 -1
  45. package/dest/test/mock_archiver.d.ts.map +1 -1
  46. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  47. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  48. package/dest/test/mock_l2_block_source.d.ts +7 -6
  49. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  50. package/dest/test/mock_l2_block_source.js +1 -1
  51. package/dest/test/mock_structs.d.ts +1 -1
  52. package/package.json +17 -17
  53. package/src/archiver/archiver.ts +380 -244
  54. package/src/archiver/archiver_store_test_suite.ts +5 -4
  55. package/src/archiver/config.ts +5 -0
  56. package/src/archiver/data_retrieval.ts +156 -125
  57. package/src/archiver/instrumentation.ts +2 -2
  58. package/src/archiver/structs/published.ts +2 -1
  59. package/src/archiver/validation.ts +52 -27
  60. package/src/index.ts +1 -1
  61. package/src/test/mock_l2_block_source.ts +7 -6
@@ -13,31 +13,31 @@ import { Fr } from '@aztec/foundation/fields';
13
13
  import { createLogger } from '@aztec/foundation/log';
14
14
  import { promiseWithResolvers } from '@aztec/foundation/promise';
15
15
  import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
16
- import { sleep } from '@aztec/foundation/sleep';
17
16
  import { count } from '@aztec/foundation/string';
18
- import { Timer, elapsed } from '@aztec/foundation/timer';
17
+ import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
19
18
  import { ContractClassPublishedEvent, PrivateFunctionBroadcastedEvent, UtilityFunctionBroadcastedEvent } from '@aztec/protocol-contracts/class-registry';
20
19
  import { ContractInstancePublishedEvent, ContractInstanceUpdatedEvent } from '@aztec/protocol-contracts/instance-registry';
21
- import { L2BlockSourceEvents } from '@aztec/stdlib/block';
20
+ import { L2Block, L2BlockSourceEvents, PublishedL2Block } from '@aztec/stdlib/block';
22
21
  import { computePublicBytecodeCommitment, isValidPrivateFunctionMembershipProof, isValidUtilityFunctionMembershipProof } from '@aztec/stdlib/contract';
23
22
  import { getEpochAtSlot, getEpochNumberAtTimestamp, getSlotAtTimestamp, getSlotRangeForEpoch, getTimestampRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
24
- import { Attributes, getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
23
+ import { getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
25
24
  import { EventEmitter } from 'events';
26
25
  import groupBy from 'lodash.groupby';
27
26
  import { createPublicClient, fallback, http } from 'viem';
28
- import { retrieveBlocksFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedBlockToPublishedL2Block } from './data_retrieval.js';
27
+ import { retrieveCheckpointsFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedToPublishedCheckpoint } from './data_retrieval.js';
29
28
  import { InitialBlockNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
30
29
  import { ArchiverInstrumentation } from './instrumentation.js';
31
- import { validateBlockAttestations } from './validation.js';
30
+ import { validateCheckpointAttestations } from './validation.js';
32
31
  function mapArchiverConfig(config) {
33
32
  return {
34
33
  pollingIntervalMs: config.archiverPollingIntervalMS,
35
34
  batchSize: config.archiverBatchSize,
36
- skipValidateBlockAttestations: config.skipValidateBlockAttestations
35
+ skipValidateBlockAttestations: config.skipValidateBlockAttestations,
36
+ maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds
37
37
  };
38
38
  }
39
39
  /**
40
- * Pulls L2 blocks in a non-blocking manner and provides interface for their retrieval.
40
+ * Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
41
41
  * Responsible for handling robust L1 polling so that other components do not need to
42
42
  * concern themselves with it.
43
43
  */ export class Archiver extends EventEmitter {
@@ -47,10 +47,11 @@ function mapArchiverConfig(config) {
47
47
  config;
48
48
  blobSinkClient;
49
49
  epochCache;
50
+ dateProvider;
50
51
  instrumentation;
51
52
  l1constants;
52
53
  log;
53
- /** A loop in which we will be continually fetching new L2 blocks. */ runningPromise;
54
+ /** A loop in which we will be continually fetching new checkpoints. */ runningPromise;
54
55
  rollup;
55
56
  inbox;
56
57
  store;
@@ -68,13 +69,16 @@ function mapArchiverConfig(config) {
68
69
  * @param pollingIntervalMs - The interval for polling for L1 logs (in milliseconds).
69
70
  * @param store - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data.
70
71
  * @param log - A logger.
71
- */ constructor(publicClient, l1Addresses, dataStore, config, blobSinkClient, epochCache, instrumentation, l1constants, log = createLogger('archiver')){
72
- super(), this.publicClient = publicClient, this.l1Addresses = l1Addresses, this.dataStore = dataStore, this.config = config, this.blobSinkClient = blobSinkClient, this.epochCache = epochCache, this.instrumentation = instrumentation, this.l1constants = l1constants, this.log = log, this.initialSyncComplete = false;
72
+ */ constructor(publicClient, l1Addresses, dataStore, config, blobSinkClient, epochCache, dateProvider, instrumentation, l1constants, log = createLogger('archiver')){
73
+ super(), this.publicClient = publicClient, this.l1Addresses = l1Addresses, this.dataStore = dataStore, this.config = config, this.blobSinkClient = blobSinkClient, this.epochCache = epochCache, this.dateProvider = dateProvider, this.instrumentation = instrumentation, this.l1constants = l1constants, this.log = log, this.initialSyncComplete = false;
73
74
  this.tracer = instrumentation.tracer;
74
75
  this.store = new ArchiverStoreHelper(dataStore);
75
76
  this.rollup = new RollupContract(publicClient, l1Addresses.rollupAddress);
76
77
  this.inbox = new InboxContract(publicClient, l1Addresses.inboxAddress);
77
78
  this.initialSyncPromise = promiseWithResolvers();
79
+ // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
80
+ // are done as fast as possible. This then gets updated once the initial sync completes.
81
+ this.runningPromise = new RunningPromise(()=>this.sync(), this.log, this.config.pollingIntervalMs / 10, makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError));
78
82
  }
79
83
  /**
80
84
  * Creates a new instance of the Archiver and blocks until it syncs from chain.
@@ -113,11 +117,12 @@ function mapArchiverConfig(config) {
113
117
  };
114
118
  const opts = merge({
115
119
  pollingIntervalMs: 10_000,
116
- batchSize: 100
120
+ batchSize: 100,
121
+ maxAllowedEthClientDriftSeconds: 300
117
122
  }, mapArchiverConfig(config));
118
123
  const epochCache = deps.epochCache ?? await EpochCache.create(config.l1Contracts.rollupAddress, config, deps);
119
124
  const telemetry = deps.telemetry ?? getTelemetryClient();
120
- const archiver = new Archiver(publicClient, config.l1Contracts, archiverStore, opts, deps.blobSinkClient, epochCache, await ArchiverInstrumentation.new(telemetry, ()=>archiverStore.estimateSize()), l1Constants);
125
+ const archiver = new Archiver(publicClient, config.l1Contracts, archiverStore, opts, deps.blobSinkClient, epochCache, deps.dateProvider ?? new DateProvider(), await ArchiverInstrumentation.new(telemetry, ()=>archiverStore.estimateSize()), l1Constants);
121
126
  await archiver.start(blockUntilSynced);
122
127
  return archiver;
123
128
  }
@@ -128,48 +133,48 @@ function mapArchiverConfig(config) {
128
133
  * Starts sync process.
129
134
  * @param blockUntilSynced - If true, blocks until the archiver has fully synced.
130
135
  */ async start(blockUntilSynced) {
131
- if (this.runningPromise) {
136
+ if (this.runningPromise.isRunning()) {
132
137
  throw new Error('Archiver is already running');
133
138
  }
134
139
  await this.blobSinkClient.testSources();
140
+ await this.testEthereumNodeSynced();
141
+ // Log initial state for the archiver
142
+ const { l1StartBlock } = this.l1constants;
143
+ const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
144
+ const currentL2Block = await this.getBlockNumber();
145
+ this.log.info(`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2 block ${currentL2Block}`, {
146
+ blocksSynchedTo,
147
+ messagesSynchedTo,
148
+ currentL2Block
149
+ });
150
+ // Start sync loop, and return the wait for initial sync if we are asked to block until synced
151
+ this.runningPromise.start();
135
152
  if (blockUntilSynced) {
136
- while(!await this.syncSafe(true)){
137
- this.log.info(`Retrying initial archiver sync in ${this.config.pollingIntervalMs}ms`);
138
- await sleep(this.config.pollingIntervalMs);
139
- }
153
+ return this.waitForInitialSync();
140
154
  }
141
- this.runningPromise = new RunningPromise(()=>this.sync(false), this.log, this.config.pollingIntervalMs, makeLoggingErrorHandler(this.log, // Ignored errors will not log to the console
142
- // We ignore NoBlobBodiesFound as the message may not have been passed to the blob sink yet
143
- NoBlobBodiesFoundError));
144
- this.runningPromise.start();
145
155
  }
146
156
  syncImmediate() {
147
- if (!this.runningPromise) {
148
- throw new Error('Archiver is not running');
149
- }
150
157
  return this.runningPromise.trigger();
151
158
  }
152
159
  waitForInitialSync() {
153
160
  return this.initialSyncPromise.promise;
154
161
  }
155
- async syncSafe(initialRun) {
156
- try {
157
- await this.sync(initialRun);
158
- return true;
159
- } catch (error) {
160
- if (error instanceof NoBlobBodiesFoundError) {
161
- this.log.error(`Error syncing archiver: ${error.message}`);
162
- } else if (error instanceof BlockTagTooOldError) {
163
- this.log.warn(`Re-running archiver sync: ${error.message}`);
164
- } else {
165
- this.log.error('Error during archiver sync', error);
166
- }
167
- return false;
162
+ /** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */ async testEthereumNodeSynced() {
163
+ const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
164
+ if (maxAllowedDelay === 0) {
165
+ return;
166
+ }
167
+ const { number, timestamp: l1Timestamp } = await this.publicClient.getBlock({
168
+ includeTransactions: false
169
+ });
170
+ const currentTime = BigInt(this.dateProvider.nowInSeconds());
171
+ if (currentTime - l1Timestamp > BigInt(maxAllowedDelay)) {
172
+ throw new Error(`Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`);
168
173
  }
169
174
  }
170
175
  /**
171
176
  * Fetches logs from L1 contracts and processes them.
172
- */ async sync(initialRun) {
177
+ */ async sync() {
173
178
  /**
174
179
  * We keep track of three "pointers" to L1 blocks:
175
180
  * 1. the last L1 block that published an L2 block
@@ -179,8 +184,6 @@ function mapArchiverConfig(config) {
179
184
  * We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
180
185
  * We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
181
186
  * The archiver will stay back, until there's data on L1 that will move the pointers forward.
182
- *
183
- * This code does not handle reorgs.
184
187
  */ const { l1StartBlock, l1StartBlockHash } = this.l1constants;
185
188
  const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = {
186
189
  l1BlockNumber: l1StartBlock,
@@ -191,12 +194,12 @@ function mapArchiverConfig(config) {
191
194
  });
192
195
  const currentL1BlockNumber = currentL1Block.number;
193
196
  const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
194
- if (initialRun) {
195
- this.log.info(`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo}` + ` to current L1 block ${currentL1BlockNumber} with hash ${currentL1BlockHash.toString()}`, {
196
- blocksSynchedTo,
197
- messagesSynchedTo
198
- });
199
- }
197
+ this.log.trace(`Starting new archiver sync iteration`, {
198
+ blocksSynchedTo,
199
+ messagesSynchedTo,
200
+ currentL1BlockNumber,
201
+ currentL1BlockHash
202
+ });
200
203
  // ********** Ensuring Consistency of data pulled from L1 **********
201
204
  /**
202
205
  * There are a number of calls in this sync operation to L1 for retrieving
@@ -219,23 +222,40 @@ function mapArchiverConfig(config) {
219
222
  const currentL1Timestamp = !this.l1Timestamp || !this.l1BlockNumber || this.l1BlockNumber !== currentL1BlockNumber ? (await this.publicClient.getBlock({
220
223
  blockNumber: currentL1BlockNumber
221
224
  })).timestamp : this.l1Timestamp;
222
- // ********** Events that are processed per L2 block **********
225
+ // Warn if the latest L1 block timestamp is too old
226
+ const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
227
+ const now = this.dateProvider.nowInSeconds();
228
+ if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
229
+ this.log.warn(`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`, {
230
+ currentL1BlockNumber,
231
+ currentL1Timestamp,
232
+ now,
233
+ maxAllowedDelay
234
+ });
235
+ }
236
+ // ********** Events that are processed per checkpoint **********
223
237
  if (currentL1BlockNumber > blocksSynchedTo) {
224
- // First we retrieve new L2 blocks and store them in the DB. This will also update the
225
- // pending chain validation status, proven block number, and synched L1 block number.
226
- const rollupStatus = await this.handleL2blocks(blocksSynchedTo, currentL1BlockNumber);
238
+ // First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
239
+ // pending chain validation status, proven checkpoint number, and synched L1 block number.
240
+ const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
227
241
  // Then we prune the current epoch if it'd reorg on next submission.
228
- // Note that we don't do this before retrieving L2 blocks because we may need to retrieve
229
- // blocks from more than 2 epochs ago, so we want to make sure we have the latest view of
242
+ // Note that we don't do this before retrieving checkpoints because we may need to retrieve
243
+ // checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
230
244
  // the chain locally before we start unwinding stuff. This can be optimized by figuring out
231
- // up to which point we're pruning, and then requesting L2 blocks up to that point only.
232
- const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.provenBlockNumber, currentL1BlockNumber, currentL1Timestamp);
233
- // And lastly we check if we are missing any L2 blocks behind us due to a possible L1 reorg.
245
+ // up to which point we're pruning, and then requesting checkpoints up to that point only.
246
+ const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp);
247
+ // If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
248
+ // past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
249
+ // we get a valid checkpoint to advance the syncpoint.
250
+ if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
251
+ await this.store.setBlockSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
252
+ }
253
+ // And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
234
254
  // We only do this if rollup cant prune on the next submission. Otherwise we will end up
235
- // re-syncing the blocks we have just unwound above. We also dont do this if the last block is invalid,
255
+ // re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
236
256
  // since the archiver will rightfully refuse to sync up to it.
237
257
  if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
238
- await this.checkForNewBlocksBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
258
+ await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
239
259
  }
240
260
  this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
241
261
  }
@@ -244,14 +264,17 @@ function mapArchiverConfig(config) {
244
264
  // but the corresponding blocks have not been processed (see #12631).
245
265
  this.l1Timestamp = currentL1Timestamp;
246
266
  this.l1BlockNumber = currentL1BlockNumber;
247
- this.initialSyncComplete = true;
248
- this.initialSyncPromise.resolve();
249
- if (initialRun) {
250
- this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete.`, {
267
+ // We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace)
268
+ // so if the initial sync took too long, we still go for another iteration.
269
+ if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= await this.publicClient.getBlockNumber()) {
270
+ this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, {
251
271
  l1BlockNumber: currentL1BlockNumber,
252
272
  syncPoint: await this.store.getSynchPoint(),
253
273
  ...await this.getL2Tips()
254
274
  });
275
+ this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs);
276
+ this.initialSyncComplete = true;
277
+ this.initialSyncPromise.resolve();
255
278
  }
256
279
  }
257
280
  /** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */ async canPrune(currentL1BlockNumber, currentL1Timestamp) {
@@ -268,30 +291,30 @@ function mapArchiverConfig(config) {
268
291
  }
269
292
  return result;
270
293
  }
271
- /** Checks if there'd be a reorg for the next block submission and start pruning now. */ async handleEpochPrune(provenBlockNumber, currentL1BlockNumber, currentL1Timestamp) {
294
+ /** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */ async handleEpochPrune(provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp) {
272
295
  const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
273
- const localPendingBlockNumber = await this.getBlockNumber();
274
- const canPrune = localPendingBlockNumber > provenBlockNumber && rollupCanPrune;
296
+ const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
297
+ const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
275
298
  if (canPrune) {
276
299
  const timer = new Timer();
277
- const pruneFrom = provenBlockNumber + 1;
278
- const header = await this.getBlockHeader(Number(pruneFrom));
300
+ const pruneFrom = provenCheckpointNumber + 1;
301
+ const header = await this.getCheckpointHeader(Number(pruneFrom));
279
302
  if (header === undefined) {
280
- throw new Error(`Missing block header ${pruneFrom}`);
303
+ throw new Error(`Missing checkpoint header ${pruneFrom}`);
281
304
  }
282
- const pruneFromSlotNumber = header.globalVariables.slotNumber.toBigInt();
305
+ const pruneFromSlotNumber = header.slotNumber;
283
306
  const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
284
- const blocksToUnwind = localPendingBlockNumber - provenBlockNumber;
285
- const blocks = await this.getBlocks(Number(provenBlockNumber) + 1, Number(blocksToUnwind));
307
+ const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
308
+ const checkpoints = await this.getCheckpoints(Number(provenCheckpointNumber) + 1, Number(checkpointsToUnwind));
286
309
  // Emit an event for listening services to react to the chain prune
287
310
  this.emit(L2BlockSourceEvents.L2PruneDetected, {
288
311
  type: L2BlockSourceEvents.L2PruneDetected,
289
312
  epochNumber: pruneFromEpochNumber,
290
- blocks
313
+ blocks: checkpoints.flatMap((c)=>L2Block.fromCheckpoint(c))
291
314
  });
292
- this.log.debug(`L2 prune from ${provenBlockNumber + 1} to ${localPendingBlockNumber} will occur on next block submission.`);
293
- await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
294
- this.log.warn(`Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + `to ${provenBlockNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest block is ${await this.getBlockNumber()}.`);
315
+ this.log.debug(`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`);
316
+ await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
317
+ this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + `Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
295
318
  this.instrumentation.processPrune(timer.ms());
296
319
  // TODO(palla/reorg): Do we need to set the block synched L1 block number here?
297
320
  // Seems like the next iteration should handle this.
@@ -334,7 +357,7 @@ function mapArchiverConfig(config) {
334
357
  });
335
358
  // Compare message count and rolling hash. If they match, no need to retrieve anything.
336
359
  if (remoteMessagesState.totalMessagesInserted === localMessagesInserted && remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer16.ZERO)) {
337
- this.log.debug(`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`);
360
+ this.log.trace(`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`);
338
361
  return;
339
362
  }
340
363
  // Check if our syncpoint is still valid. If not, there was an L1 reorg and we need to re-retrieve messages.
@@ -460,162 +483,173 @@ function mapArchiverConfig(config) {
460
483
  }
461
484
  return Buffer32.fromString(block.hash);
462
485
  }
463
- async handleL2blocks(blocksSynchedTo, currentL1BlockNumber) {
464
- const localPendingBlockNumber = await this.getBlockNumber();
486
+ async handleCheckpoints(blocksSynchedTo, currentL1BlockNumber) {
487
+ const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
465
488
  const initialValidationResult = await this.store.getPendingChainValidationStatus();
466
- const [provenBlockNumber, provenArchive, pendingBlockNumber, pendingArchive, archiveForLocalPendingBlockNumber] = await this.rollup.status(BigInt(localPendingBlockNumber), {
489
+ const [rollupProvenCheckpointNumber, provenArchive, rollupPendingCheckpointNumber, pendingArchive, archiveForLocalPendingCheckpointNumber] = await this.rollup.status(BigInt(localPendingCheckpointNumber), {
467
490
  blockNumber: currentL1BlockNumber
468
491
  });
492
+ const provenCheckpointNumber = Number(rollupProvenCheckpointNumber);
493
+ const pendingCheckpointNumber = Number(rollupPendingCheckpointNumber);
469
494
  const rollupStatus = {
470
- provenBlockNumber: Number(provenBlockNumber),
495
+ provenCheckpointNumber,
471
496
  provenArchive,
472
- pendingBlockNumber: Number(pendingBlockNumber),
497
+ pendingCheckpointNumber,
473
498
  pendingArchive,
474
499
  validationResult: initialValidationResult
475
500
  };
476
501
  this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
477
- localPendingBlockNumber,
502
+ localPendingCheckpointNumber,
478
503
  blocksSynchedTo,
479
504
  currentL1BlockNumber,
480
- archiveForLocalPendingBlockNumber,
505
+ archiveForLocalPendingCheckpointNumber,
481
506
  ...rollupStatus
482
507
  });
483
- const updateProvenBlock = async ()=>{
484
- // Annoying edge case: if proven block is moved back to 0 due to a reorg at the beginning of the chain,
485
- // we need to set it to zero. This is an edge case because we dont have a block zero (initial block is one),
486
- // so localBlockForDestinationProvenBlockNumber would not be found below.
487
- if (provenBlockNumber === 0n) {
488
- const localProvenBlockNumber = await this.store.getProvenL2BlockNumber();
489
- if (localProvenBlockNumber !== Number(provenBlockNumber)) {
490
- await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
491
- this.log.info(`Rolled back proven chain to block ${provenBlockNumber}`, {
492
- provenBlockNumber
508
+ const updateProvenCheckpoint = async ()=>{
509
+ // Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
510
+ // we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
511
+ // so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
512
+ if (provenCheckpointNumber === 0) {
513
+ const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
514
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
515
+ await this.setProvenCheckpointNumber(provenCheckpointNumber);
516
+ this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, {
517
+ provenCheckpointNumber
493
518
  });
494
519
  }
495
520
  }
496
- const localBlockForDestinationProvenBlockNumber = await this.getBlock(Number(provenBlockNumber));
497
- // Sanity check. I've hit what seems to be a state where the proven block is set to a value greater than the latest
498
- // synched block when requesting L2Tips from the archiver. This is the only place where the proven block is set.
499
- const synched = await this.store.getSynchedL2BlockNumber();
500
- if (localBlockForDestinationProvenBlockNumber && synched < localBlockForDestinationProvenBlockNumber?.number) {
501
- this.log.error(`Hit local block greater than last synched block: ${localBlockForDestinationProvenBlockNumber.number} > ${synched}`);
521
+ const localCheckpointForDestinationProvenCheckpointNumber = await this.getCheckpoint(provenCheckpointNumber);
522
+ // Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
523
+ // synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
524
+ const synched = await this.getSynchedCheckpointNumber();
525
+ if (localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.number) {
526
+ this.log.error(`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.number} > ${synched}`);
502
527
  }
503
- this.log.trace(`Local block for remote proven block ${provenBlockNumber} is ${localBlockForDestinationProvenBlockNumber?.archive.root.toString() ?? 'undefined'}`);
504
- if (localBlockForDestinationProvenBlockNumber && provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString()) {
505
- const localProvenBlockNumber = await this.store.getProvenL2BlockNumber();
506
- if (localProvenBlockNumber !== Number(provenBlockNumber)) {
507
- await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
508
- this.log.info(`Updated proven chain to block ${provenBlockNumber}`, {
509
- provenBlockNumber
528
+ this.log.trace(`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'}`);
529
+ const lastProvenBlockNumber = await this.getLastBlockNumberInCheckpoint(provenCheckpointNumber);
530
+ if (localCheckpointForDestinationProvenCheckpointNumber && provenArchive === localCheckpointForDestinationProvenCheckpointNumber.archive.root.toString()) {
531
+ const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
532
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
533
+ await this.setProvenCheckpointNumber(provenCheckpointNumber);
534
+ this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, {
535
+ provenCheckpointNumber
510
536
  });
511
- const provenSlotNumber = localBlockForDestinationProvenBlockNumber.header.globalVariables.slotNumber.toBigInt();
537
+ const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
512
538
  const provenEpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
513
539
  this.emit(L2BlockSourceEvents.L2BlockProven, {
514
540
  type: L2BlockSourceEvents.L2BlockProven,
515
- blockNumber: provenBlockNumber,
541
+ blockNumber: BigInt(lastProvenBlockNumber),
516
542
  slotNumber: provenSlotNumber,
517
543
  epochNumber: provenEpochNumber
518
544
  });
519
545
  } else {
520
- this.log.trace(`Proven block ${provenBlockNumber} already stored.`);
546
+ this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
521
547
  }
522
548
  }
523
- this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
549
+ this.instrumentation.updateLastProvenBlock(lastProvenBlockNumber);
524
550
  };
525
- // This is an edge case that we only hit if there are no proposed blocks.
526
- // If we have 0 blocks locally and there are no blocks onchain there is nothing to do.
527
- const noBlocks = localPendingBlockNumber === 0 && pendingBlockNumber === 0n;
528
- if (noBlocks) {
551
+ // This is an edge case that we only hit if there are no proposed checkpoints.
552
+ // If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
553
+ const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
554
+ if (noCheckpoints) {
529
555
  await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
530
- this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no blocks on chain`);
556
+ this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`);
531
557
  return rollupStatus;
532
558
  }
533
- await updateProvenBlock();
559
+ await updateProvenCheckpoint();
534
560
  // Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
535
- // are any state that could be impacted by it. If we have no blocks, there is no impact.
536
- if (localPendingBlockNumber > 0) {
537
- const localPendingBlock = await this.getBlock(localPendingBlockNumber);
538
- if (localPendingBlock === undefined) {
539
- throw new Error(`Missing block ${localPendingBlockNumber}`);
561
+ // are any state that could be impacted by it. If we have no checkpoints, there is no impact.
562
+ if (localPendingCheckpointNumber > 0) {
563
+ const localPendingCheckpoint = await this.getCheckpoint(localPendingCheckpointNumber);
564
+ if (localPendingCheckpoint === undefined) {
565
+ throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
540
566
  }
541
- const localPendingArchiveRoot = localPendingBlock.archive.root.toString();
542
- const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingArchiveRoot;
543
- if (noBlockSinceLast) {
567
+ const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
568
+ const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive === localPendingArchiveRoot;
569
+ if (noCheckpointSinceLast) {
544
570
  // We believe the following line causes a problem when we encounter L1 re-orgs.
545
571
  // Basically, by setting the synched L1 block number here, we are saying that we have
546
- // processed all blocks up to the current L1 block number and we will not attempt to retrieve logs from
572
+ // processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
547
573
  // this block again (or any blocks before).
548
- // However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing blocks
574
+ // However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
549
575
  // We must only set this block number based on actually retrieved logs.
550
576
  // TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
551
577
  // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
552
- this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
578
+ this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
553
579
  return rollupStatus;
554
580
  }
555
- const localPendingBlockInChain = archiveForLocalPendingBlockNumber === localPendingArchiveRoot;
556
- if (!localPendingBlockInChain) {
557
- // If our local pending block tip is not in the chain on L1 a "prune" must have happened
581
+ const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber === localPendingArchiveRoot;
582
+ if (!localPendingCheckpointInChain) {
583
+ // If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
558
584
  // or the L1 have reorged.
559
585
  // In any case, we have to figure out how far into the past the action will take us.
560
- // For simplicity here, we will simply rewind until we end in a block that is also on the chain on L1.
561
- this.log.debug(`L2 prune has been detected due to local pending block ${localPendingBlockNumber} not in chain`, {
562
- localPendingBlockNumber,
586
+ // For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
587
+ this.log.debug(`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`, {
588
+ localPendingCheckpointNumber,
563
589
  localPendingArchiveRoot,
564
- archiveForLocalPendingBlockNumber
590
+ archiveForLocalPendingCheckpointNumber
565
591
  });
566
- let tipAfterUnwind = localPendingBlockNumber;
592
+ let tipAfterUnwind = localPendingCheckpointNumber;
567
593
  while(true){
568
- const candidateBlock = await this.getBlock(Number(tipAfterUnwind));
569
- if (candidateBlock === undefined) {
594
+ const candidateCheckpoint = await this.getCheckpoint(tipAfterUnwind);
595
+ if (candidateCheckpoint === undefined) {
570
596
  break;
571
597
  }
572
- const archiveAtContract = await this.rollup.archiveAt(BigInt(candidateBlock.number));
573
- if (archiveAtContract === candidateBlock.archive.root.toString()) {
598
+ const archiveAtContract = await this.rollup.archiveAt(BigInt(candidateCheckpoint.number));
599
+ this.log.trace(`Checking local checkpoint ${candidateCheckpoint.number} with archive ${candidateCheckpoint.archive.root}`, {
600
+ archiveAtContract,
601
+ archiveLocal: candidateCheckpoint.archive.root.toString()
602
+ });
603
+ if (archiveAtContract === candidateCheckpoint.archive.root.toString()) {
574
604
  break;
575
605
  }
576
606
  tipAfterUnwind--;
577
607
  }
578
- const blocksToUnwind = localPendingBlockNumber - tipAfterUnwind;
579
- await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
580
- this.log.warn(`Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + `due to mismatched block hashes at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest block is ${await this.getBlockNumber()}.`);
608
+ const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
609
+ await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
610
+ this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
581
611
  }
582
612
  }
583
- // Retrieve L2 blocks in batches. Each batch is estimated to accommodate up to L2 'blockBatchSize' blocks,
613
+ // Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
584
614
  // computed using the L2 block time vs the L1 block time.
585
615
  let searchStartBlock = blocksSynchedTo;
586
616
  let searchEndBlock = blocksSynchedTo;
587
- let lastRetrievedBlock;
617
+ let lastRetrievedCheckpoint;
618
+ let lastL1BlockWithCheckpoint = undefined;
588
619
  do {
589
620
  [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
590
- this.log.trace(`Retrieving L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
621
+ this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
591
622
  // TODO(md): Retrieve from blob sink then from consensus client, then from peers
592
- const retrievedBlocks = await retrieveBlocksFromRollup(this.rollup.getContract(), this.publicClient, this.blobSinkClient, searchStartBlock, searchEndBlock, this.log);
593
- if (retrievedBlocks.length === 0) {
623
+ const retrievedCheckpoints = await retrieveCheckpointsFromRollup(this.rollup.getContract(), this.publicClient, this.blobSinkClient, searchStartBlock, searchEndBlock, this.log);
624
+ if (retrievedCheckpoints.length === 0) {
594
625
  // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
595
626
  // See further details in earlier comments.
596
- this.log.trace(`Retrieved no new L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
627
+ this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
597
628
  continue;
598
629
  }
599
- const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber;
600
- this.log.debug(`Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${searchStartBlock} and ${searchEndBlock} with last processed L1 block ${lastProcessedL1BlockNumber}.`);
601
- const publishedBlocks = await Promise.all(retrievedBlocks.map((b)=>retrievedBlockToPublishedL2Block(b)));
602
- const validBlocks = [];
603
- for (const block of publishedBlocks){
630
+ this.log.debug(`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`, {
631
+ lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
632
+ searchStartBlock,
633
+ searchEndBlock
634
+ });
635
+ const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map((b)=>retrievedToPublishedCheckpoint(b)));
636
+ const validCheckpoints = [];
637
+ for (const published of publishedCheckpoints){
604
638
  const validationResult = this.config.skipValidateBlockAttestations ? {
605
639
  valid: true
606
- } : await validateBlockAttestations(block, this.epochCache, this.l1constants, this.log);
607
- // Only update the validation result if it has changed, so we can keep track of the first invalid block
608
- // in case there is a sequence of more than one invalid block, as we need to invalidate the first one.
609
- // There is an exception though: if an invalid block is invalidated and replaced with another invalid block,
640
+ } : await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
641
+ // Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
642
+ // in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
643
+ // There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
610
644
  // we need to update the validation result, since we need to be able to invalidate the new one.
611
- // See test 'chain progresses if an invalid block is invalidated with an invalid one' for more info.
645
+ // See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
612
646
  if (rollupStatus.validationResult?.valid !== validationResult.valid || !rollupStatus.validationResult.valid && !validationResult.valid && rollupStatus.validationResult.block.blockNumber === validationResult.block.blockNumber) {
613
647
  rollupStatus.validationResult = validationResult;
614
648
  }
615
649
  if (!validationResult.valid) {
616
- this.log.warn(`Skipping block ${block.block.number} due to invalid attestations`, {
617
- blockHash: block.block.hash(),
618
- l1BlockNumber: block.l1.blockNumber,
650
+ this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
651
+ checkpointHash: published.checkpoint.hash(),
652
+ l1BlockNumber: published.l1.blockNumber,
619
653
  ...pick(validationResult, 'reason')
620
654
  });
621
655
  // Emit event for invalid block detection
@@ -625,18 +659,18 @@ function mapArchiverConfig(config) {
625
659
  });
626
660
  continue;
627
661
  }
628
- validBlocks.push(block);
629
- this.log.debug(`Ingesting new L2 block ${block.block.number} with ${block.block.body.txEffects.length} txs`, {
630
- blockHash: block.block.hash(),
631
- l1BlockNumber: block.l1.blockNumber,
632
- ...block.block.header.globalVariables.toInspect(),
633
- ...block.block.getStats()
662
+ validCheckpoints.push(published);
663
+ this.log.debug(`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`, {
664
+ checkpointHash: published.checkpoint.hash(),
665
+ l1BlockNumber: published.l1.blockNumber,
666
+ ...published.checkpoint.header.toInspect(),
667
+ blocks: published.checkpoint.blocks.map((b)=>b.getStats())
634
668
  });
635
669
  }
636
670
  try {
637
671
  const updatedValidationResult = rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
638
- const [processDuration] = await elapsed(()=>this.store.addBlocks(validBlocks, updatedValidationResult));
639
- this.instrumentation.processNewBlocks(processDuration / validBlocks.length, validBlocks.map((b)=>b.block));
672
+ const [processDuration] = await elapsed(()=>this.addCheckpoints(validCheckpoints, updatedValidationResult));
673
+ this.instrumentation.processNewBlocks(processDuration / validCheckpoints.length, validCheckpoints.flatMap((c)=>c.checkpoint.blocks));
640
674
  } catch (err) {
641
675
  if (err instanceof InitialBlockNumberNotSequentialError) {
642
676
  const { previousBlockNumber, newBlockNumber } = err;
@@ -652,58 +686,58 @@ function mapArchiverConfig(config) {
652
686
  }
653
687
  throw err;
654
688
  }
655
- for (const block of validBlocks){
656
- this.log.info(`Downloaded L2 block ${block.block.number}`, {
657
- blockHash: await block.block.hash(),
658
- blockNumber: block.block.number,
659
- txCount: block.block.body.txEffects.length,
660
- globalVariables: block.block.header.globalVariables.toInspect(),
661
- archiveRoot: block.block.archive.root.toString(),
662
- archiveNextLeafIndex: block.block.archive.nextAvailableLeafIndex
689
+ for (const checkpoint of validCheckpoints){
690
+ this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
691
+ checkpointHash: checkpoint.checkpoint.hash(),
692
+ checkpointNumber: checkpoint.checkpoint.number,
693
+ blockCount: checkpoint.checkpoint.blocks.length,
694
+ txCount: checkpoint.checkpoint.blocks.reduce((acc, b)=>acc + b.body.txEffects.length, 0),
695
+ header: checkpoint.checkpoint.header.toInspect(),
696
+ archiveRoot: checkpoint.checkpoint.archive.root.toString(),
697
+ archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex
663
698
  });
664
699
  }
665
- lastRetrievedBlock = validBlocks.at(-1) ?? lastRetrievedBlock;
700
+ lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
701
+ lastL1BlockWithCheckpoint = publishedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
666
702
  }while (searchEndBlock < currentL1BlockNumber)
667
703
  // Important that we update AFTER inserting the blocks.
668
- await updateProvenBlock();
704
+ await updateProvenCheckpoint();
669
705
  return {
670
706
  ...rollupStatus,
671
- lastRetrievedBlock
707
+ lastRetrievedCheckpoint,
708
+ lastL1BlockWithCheckpoint
672
709
  };
673
710
  }
674
- async checkForNewBlocksBeforeL1SyncPoint(status, blocksSynchedTo, currentL1BlockNumber) {
675
- const { lastRetrievedBlock, pendingBlockNumber } = status;
676
- // Compare the last L2 block we have (either retrieved in this round or loaded from store) with what the
711
+ async checkForNewCheckpointsBeforeL1SyncPoint(status, blocksSynchedTo, currentL1BlockNumber) {
712
+ const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
713
+ // Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
677
714
  // rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
678
- const latestLocalL2BlockNumber = lastRetrievedBlock?.block.number ?? await this.store.getSynchedL2BlockNumber();
679
- if (latestLocalL2BlockNumber < pendingBlockNumber) {
715
+ const latestLocalCheckpointNumber = lastRetrievedCheckpoint?.checkpoint.number ?? await this.getSynchedCheckpointNumber();
716
+ if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
680
717
  // Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
681
- // but still havent reached the pending block according to the call to the rollup contract.
682
- // We suspect an L1 reorg that added blocks *behind* us. If that is the case, it must have happened between the
683
- // last L2 block we saw and the current one, so we reset the last synched L1 block number. In the edge case we
684
- // don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
685
- const latestLocalL2Block = lastRetrievedBlock ?? (latestLocalL2BlockNumber > 0 ? await this.store.getPublishedBlocks(latestLocalL2BlockNumber, 1).then(([b])=>b) : undefined);
686
- const targetL1BlockNumber = latestLocalL2Block?.l1.blockNumber ?? maxBigint(currentL1BlockNumber - 64n, 0n);
687
- const latestLocalL2BlockArchive = latestLocalL2Block?.block.archive.root.toString();
688
- this.log.warn(`Failed to reach L2 block ${pendingBlockNumber} at ${currentL1BlockNumber} (latest is ${latestLocalL2BlockNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
689
- latestLocalL2BlockNumber,
690
- latestLocalL2BlockArchive,
718
+ // but still haven't reached the pending checkpoint according to the call to the rollup contract.
719
+ // We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
720
+ // the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
721
+ // we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
722
+ const latestLocalCheckpoint = lastRetrievedCheckpoint ?? (latestLocalCheckpointNumber > 0 ? await this.getPublishedCheckpoints(latestLocalCheckpointNumber, 1).then(([c])=>c) : undefined);
723
+ const targetL1BlockNumber = latestLocalCheckpoint?.l1.blockNumber ?? maxBigint(currentL1BlockNumber - 64n, 0n);
724
+ const latestLocalCheckpointArchive = latestLocalCheckpoint?.checkpoint.archive.root.toString();
725
+ this.log.warn(`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
726
+ latestLocalCheckpointNumber,
727
+ latestLocalCheckpointArchive,
691
728
  blocksSynchedTo,
692
729
  currentL1BlockNumber,
693
730
  ...status
694
731
  });
695
732
  await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
696
733
  } else {
697
- this.log.trace(`No new blocks behind L1 sync point to retrieve.`, {
698
- latestLocalL2BlockNumber,
699
- pendingBlockNumber
734
+ this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
735
+ latestLocalCheckpointNumber,
736
+ pendingCheckpointNumber
700
737
  });
701
738
  }
702
739
  }
703
740
  /** Resumes the archiver after a stop. */ resume() {
704
- if (!this.runningPromise) {
705
- throw new Error(`Archiver was never started`);
706
- }
707
741
  if (this.runningPromise.isRunning()) {
708
742
  this.log.warn(`Archiver already running`);
709
743
  }
@@ -715,7 +749,7 @@ function mapArchiverConfig(config) {
715
749
  * @returns A promise signalling completion of the stop process.
716
750
  */ async stop() {
717
751
  this.log.debug('Stopping...');
718
- await this.runningPromise?.stop();
752
+ await this.runningPromise.stop();
719
753
  this.log.info('Stopped.');
720
754
  return Promise.resolve();
721
755
  }
@@ -754,7 +788,7 @@ function mapArchiverConfig(config) {
754
788
  // Walk the list of blocks backwards and filter by slots matching the requested epoch.
755
789
  // We'll typically ask for blocks for a very recent epoch, so we shouldn't need an index here.
756
790
  let block = await this.getBlock(await this.store.getSynchedL2BlockNumber());
757
- const slot = (b)=>b.header.globalVariables.slotNumber.toBigInt();
791
+ const slot = (b)=>b.header.globalVariables.slotNumber;
758
792
  while(block && slot(block) >= start){
759
793
  if (slot(block) <= end) {
760
794
  blocks.push(block);
@@ -770,7 +804,7 @@ function mapArchiverConfig(config) {
770
804
  // We'll typically ask for blocks for a very recent epoch, so we shouldn't need an index here.
771
805
  let number = await this.store.getSynchedL2BlockNumber();
772
806
  let header = await this.getBlockHeader(number);
773
- const slot = (b)=>b.globalVariables.slotNumber.toBigInt();
807
+ const slot = (b)=>b.globalVariables.slotNumber;
774
808
  while(header && slot(header) >= start){
775
809
  if (slot(header) <= end) {
776
810
  blocks.push(header);
@@ -782,7 +816,7 @@ function mapArchiverConfig(config) {
782
816
  async isEpochComplete(epochNumber) {
783
817
  // The epoch is complete if the current L2 block is the last one in the epoch (or later)
784
818
  const header = await this.getBlockHeader('latest');
785
- const slot = header?.globalVariables.slotNumber.toBigInt();
819
+ const slot = header ? header.globalVariables.slotNumber : undefined;
786
820
  const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants);
787
821
  if (slot && slot >= endSlot) {
788
822
  return true;
@@ -806,6 +840,60 @@ function mapArchiverConfig(config) {
806
840
  /** Returns whether the archiver has completed an initial sync run successfully. */ isInitialSyncComplete() {
807
841
  return this.initialSyncComplete;
808
842
  }
843
+ async getPublishedCheckpoints(from, limit, proven) {
844
+ const blocks = await this.getPublishedBlocks(from, limit, proven);
845
+ return blocks.map((b)=>b.toPublishedCheckpoint());
846
+ }
847
+ async getCheckpoints(from, limit, proven) {
848
+ const published = await this.getPublishedCheckpoints(from, limit, proven);
849
+ return published.map((p)=>p.checkpoint);
850
+ }
851
+ async getCheckpoint(number) {
852
+ if (number < 0) {
853
+ number = await this.getSynchedCheckpointNumber();
854
+ }
855
+ if (number === 0) {
856
+ return undefined;
857
+ }
858
+ const published = await this.getPublishedCheckpoints(number, 1);
859
+ return published[0]?.checkpoint;
860
+ }
861
+ async getCheckpointHeader(number) {
862
+ if (number === 'latest') {
863
+ number = await this.getSynchedCheckpointNumber();
864
+ }
865
+ if (number === 0) {
866
+ return undefined;
867
+ }
868
+ const checkpoint = await this.getCheckpoint(number);
869
+ return checkpoint?.header;
870
+ }
871
+ getCheckpointNumber() {
872
+ return this.getSynchedCheckpointNumber();
873
+ }
874
+ getSynchedCheckpointNumber() {
875
+ // TODO: Checkpoint number will no longer be the same as the block number once we support multiple blocks per checkpoint.
876
+ return this.store.getSynchedL2BlockNumber();
877
+ }
878
+ getProvenCheckpointNumber() {
879
+ // TODO: Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
880
+ return this.store.getProvenL2BlockNumber();
881
+ }
882
+ setProvenCheckpointNumber(checkpointNumber) {
883
+ // TODO: Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
884
+ return this.store.setProvenL2BlockNumber(checkpointNumber);
885
+ }
886
+ unwindCheckpoints(from, checkpointsToUnwind) {
887
+ // TODO: This only works if we have one block per checkpoint.
888
+ return this.store.unwindBlocks(from, checkpointsToUnwind);
889
+ }
890
+ getLastBlockNumberInCheckpoint(checkpointNumber) {
891
+ // TODO: Checkpoint number will no longer be the same as the block number once we support multiple blocks per checkpoint.
892
+ return Promise.resolve(checkpointNumber);
893
+ }
894
+ addCheckpoints(checkpoints, pendingChainValidationStatus) {
895
+ return this.store.addBlocks(checkpoints.map((p)=>PublishedL2Block.fromPublishedCheckpoint(p)), pendingChainValidationStatus);
896
+ }
809
897
  /**
810
898
  * Gets up to `limit` amount of L2 blocks starting from `from`.
811
899
  * @param from - Number of the first block to return (inclusive).
@@ -1030,9 +1118,7 @@ function mapArchiverConfig(config) {
1030
1118
  }
1031
1119
  }
1032
1120
  _ts_decorate([
1033
- trackSpan('Archiver.sync', (initialRun)=>({
1034
- [Attributes.INITIAL_SYNC]: initialRun
1035
- }))
1121
+ trackSpan('Archiver.sync')
1036
1122
  ], Archiver.prototype, "sync", null);
1037
1123
  var Operation = /*#__PURE__*/ function(Operation) {
1038
1124
  Operation[Operation["Store"] = 0] = "Store";