@aztec/archiver 3.0.0-nightly.20251216 → 3.0.0-nightly.20251218
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +60 -36
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +366 -180
- package/dest/archiver/archiver_store.d.ts +79 -23
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +1624 -251
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +49 -17
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +320 -83
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +29 -27
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +50 -26
- package/dest/archiver/kv_archiver_store/log_store.d.ts +4 -4
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/l1/data_retrieval.d.ts +11 -8
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -1
- package/dest/archiver/l1/data_retrieval.js +25 -17
- package/dest/archiver/structs/published.d.ts +1 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.d.ts +3 -2
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +8 -15
- package/package.json +13 -13
- package/src/archiver/archiver.ts +464 -222
- package/src/archiver/archiver_store.ts +88 -22
- package/src/archiver/archiver_store_test_suite.ts +1626 -232
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/kv_archiver_store/block_store.ts +435 -94
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +62 -38
- package/src/archiver/kv_archiver_store/log_store.ts +4 -4
- package/src/archiver/l1/data_retrieval.ts +27 -13
- package/src/archiver/structs/published.ts +0 -1
- package/src/test/mock_l2_block_source.ts +9 -16
|
@@ -18,9 +18,11 @@ import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
|
18
18
|
import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
|
|
19
19
|
import { count } from '@aztec/foundation/string';
|
|
20
20
|
import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
|
|
21
|
+
import { isDefined } from '@aztec/foundation/types';
|
|
21
22
|
import { ContractClassPublishedEvent, PrivateFunctionBroadcastedEvent, UtilityFunctionBroadcastedEvent } from '@aztec/protocol-contracts/class-registry';
|
|
22
23
|
import { ContractInstancePublishedEvent, ContractInstanceUpdatedEvent } from '@aztec/protocol-contracts/instance-registry';
|
|
23
|
-
import { L2Block, L2BlockSourceEvents, PublishedL2Block } from '@aztec/stdlib/block';
|
|
24
|
+
import { CommitteeAttestation, L2Block, L2BlockSourceEvents, PublishedL2Block } from '@aztec/stdlib/block';
|
|
25
|
+
import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
24
26
|
import { computePublicBytecodeCommitment, isValidPrivateFunctionMembershipProof, isValidUtilityFunctionMembershipProof } from '@aztec/stdlib/contract';
|
|
25
27
|
import { getEpochAtSlot, getEpochNumberAtTimestamp, getSlotAtTimestamp, getSlotRangeForEpoch, getTimestampRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
|
|
26
28
|
import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
@@ -28,7 +30,7 @@ import { getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
|
|
|
28
30
|
import { EventEmitter } from 'events';
|
|
29
31
|
import groupBy from 'lodash.groupby';
|
|
30
32
|
import { createPublicClient, fallback, http } from 'viem';
|
|
31
|
-
import {
|
|
33
|
+
import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
|
|
32
34
|
import { ArchiverInstrumentation } from './instrumentation.js';
|
|
33
35
|
import { retrieveCheckpointsFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedToPublishedCheckpoint } from './l1/data_retrieval.js';
|
|
34
36
|
import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
|
|
@@ -66,6 +68,7 @@ function mapArchiverConfig(config) {
|
|
|
66
68
|
l1Timestamp;
|
|
67
69
|
initialSyncComplete;
|
|
68
70
|
initialSyncPromise;
|
|
71
|
+
/** Queue of blocks to be added to the store, processed by the sync loop. */ blockQueue;
|
|
69
72
|
tracer;
|
|
70
73
|
/**
|
|
71
74
|
* Creates a new instance of the Archiver.
|
|
@@ -78,7 +81,7 @@ function mapArchiverConfig(config) {
|
|
|
78
81
|
* @param store - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data.
|
|
79
82
|
* @param log - A logger.
|
|
80
83
|
*/ constructor(publicClient, debugClient, l1Addresses, dataStore, config, blobSinkClient, epochCache, dateProvider, instrumentation, l1constants, log = createLogger('archiver')){
|
|
81
|
-
super(), this.publicClient = publicClient, this.debugClient = debugClient, this.l1Addresses = l1Addresses, this.dataStore = dataStore, this.config = config, this.blobSinkClient = blobSinkClient, this.epochCache = epochCache, this.dateProvider = dateProvider, this.instrumentation = instrumentation, this.l1constants = l1constants, this.log = log, this.initialSyncComplete = false;
|
|
84
|
+
super(), this.publicClient = publicClient, this.debugClient = debugClient, this.l1Addresses = l1Addresses, this.dataStore = dataStore, this.config = config, this.blobSinkClient = blobSinkClient, this.epochCache = epochCache, this.dateProvider = dateProvider, this.instrumentation = instrumentation, this.l1constants = l1constants, this.log = log, this.initialSyncComplete = false, this.blockQueue = [];
|
|
82
85
|
this.tracer = instrumentation.tracer;
|
|
83
86
|
this.store = new ArchiverStoreHelper(dataStore);
|
|
84
87
|
this.rollup = new RollupContract(publicClient, l1Addresses.rollupAddress);
|
|
@@ -162,11 +165,11 @@ function mapArchiverConfig(config) {
|
|
|
162
165
|
// Log initial state for the archiver
|
|
163
166
|
const { l1StartBlock } = this.l1constants;
|
|
164
167
|
const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
|
|
165
|
-
const
|
|
166
|
-
this.log.info(`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2
|
|
168
|
+
const currentL2Checkpoint = await this.getSynchedCheckpointNumber();
|
|
169
|
+
this.log.info(`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`, {
|
|
167
170
|
blocksSynchedTo,
|
|
168
171
|
messagesSynchedTo,
|
|
169
|
-
|
|
172
|
+
currentL2Checkpoint
|
|
170
173
|
});
|
|
171
174
|
// Start sync loop, and return the wait for initial sync if we are asked to block until synced
|
|
172
175
|
this.runningPromise.start();
|
|
@@ -177,6 +180,51 @@ function mapArchiverConfig(config) {
|
|
|
177
180
|
syncImmediate() {
|
|
178
181
|
return this.runningPromise.trigger();
|
|
179
182
|
}
|
|
183
|
+
/**
|
|
184
|
+
* Queues a block to be added to the archiver store and triggers processing.
|
|
185
|
+
* The block will be processed by the sync loop.
|
|
186
|
+
* Implements the L2BlockSink interface.
|
|
187
|
+
* @param block - The L2 block to add.
|
|
188
|
+
* @returns A promise that resolves when the block has been added to the store, or rejects on error.
|
|
189
|
+
*/ addBlock(block) {
|
|
190
|
+
return new Promise((resolve, reject)=>{
|
|
191
|
+
this.blockQueue.push({
|
|
192
|
+
block,
|
|
193
|
+
resolve,
|
|
194
|
+
reject
|
|
195
|
+
});
|
|
196
|
+
this.log.debug(`Queued block ${block.number} for processing`);
|
|
197
|
+
// Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed
|
|
198
|
+
this.syncImmediate().catch((err)=>{
|
|
199
|
+
this.log.error(`Sync immediate call failed: ${err}`);
|
|
200
|
+
});
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Processes all queued blocks, adding them to the store.
|
|
205
|
+
* Called at the beginning of each sync iteration.
|
|
206
|
+
* Blocks are processed in the order they were queued.
|
|
207
|
+
*/ async processQueuedBlocks() {
|
|
208
|
+
if (this.blockQueue.length === 0) {
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
// Take all blocks from the queue
|
|
212
|
+
const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
|
|
213
|
+
this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
|
|
214
|
+
// Process each block individually to properly resolve/reject each promise
|
|
215
|
+
for (const { block, resolve, reject } of queuedItems){
|
|
216
|
+
try {
|
|
217
|
+
await this.store.addBlocks([
|
|
218
|
+
block
|
|
219
|
+
]);
|
|
220
|
+
this.log.debug(`Added block ${block.number} to store`);
|
|
221
|
+
resolve();
|
|
222
|
+
} catch (err) {
|
|
223
|
+
this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
|
|
224
|
+
reject(err);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
}
|
|
180
228
|
waitForInitialSync() {
|
|
181
229
|
return this.initialSyncPromise.promise;
|
|
182
230
|
}
|
|
@@ -193,9 +241,7 @@ function mapArchiverConfig(config) {
|
|
|
193
241
|
throw new Error(`Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`);
|
|
194
242
|
}
|
|
195
243
|
}
|
|
196
|
-
|
|
197
|
-
* Fetches logs from L1 contracts and processes them.
|
|
198
|
-
*/ async sync() {
|
|
244
|
+
async syncFromL1() {
|
|
199
245
|
/**
|
|
200
246
|
* We keep track of three "pointers" to L1 blocks:
|
|
201
247
|
* 1. the last L1 block that published an L2 block
|
|
@@ -269,7 +315,7 @@ function mapArchiverConfig(config) {
|
|
|
269
315
|
// past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
|
|
270
316
|
// we get a valid checkpoint to advance the syncpoint.
|
|
271
317
|
if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
|
|
272
|
-
await this.store.
|
|
318
|
+
await this.store.setCheckpointSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
|
|
273
319
|
}
|
|
274
320
|
// And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
|
|
275
321
|
// We only do this if rollup cant prune on the next submission. Otherwise we will end up
|
|
@@ -298,6 +344,14 @@ function mapArchiverConfig(config) {
|
|
|
298
344
|
this.initialSyncPromise.resolve();
|
|
299
345
|
}
|
|
300
346
|
}
|
|
347
|
+
/**
|
|
348
|
+
* Fetches logs from L1 contracts and processes them.
|
|
349
|
+
*/ async sync() {
|
|
350
|
+
// Process any queued blocks first, before doing L1 sync
|
|
351
|
+
await this.processQueuedBlocks();
|
|
352
|
+
// Now perform L1 sync
|
|
353
|
+
await this.syncFromL1();
|
|
354
|
+
}
|
|
301
355
|
/** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */ async canPrune(currentL1BlockNumber, currentL1Timestamp) {
|
|
302
356
|
const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
|
|
303
357
|
const result = await this.rollup.canPruneAtTime(time, {
|
|
@@ -326,12 +380,19 @@ function mapArchiverConfig(config) {
|
|
|
326
380
|
const pruneFromSlotNumber = header.slotNumber;
|
|
327
381
|
const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
|
|
328
382
|
const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
|
|
329
|
-
const
|
|
383
|
+
const checkpointPromises = Array.from({
|
|
384
|
+
length: checkpointsToUnwind
|
|
385
|
+
}).fill(0).map((_, i)=>this.store.getCheckpointData(CheckpointNumber(i + pruneFrom)));
|
|
386
|
+
const checkpoints = await Promise.all(checkpointPromises);
|
|
387
|
+
const blockPromises = await Promise.all(checkpoints.filter(isDefined).map((cp)=>this.store.getBlocksForCheckpoint(CheckpointNumber(cp.checkpointNumber))));
|
|
388
|
+
const newBlocks = blockPromises.filter(isDefined).flat();
|
|
389
|
+
// TODO(pw/mbps): Don't convert to legacy blocks here
|
|
390
|
+
const blocks = (await Promise.all(newBlocks.map((x)=>this.getBlock(x.number)))).filter(isDefined);
|
|
330
391
|
// Emit an event for listening services to react to the chain prune
|
|
331
392
|
this.emit(L2BlockSourceEvents.L2PruneDetected, {
|
|
332
393
|
type: L2BlockSourceEvents.L2PruneDetected,
|
|
333
394
|
epochNumber: pruneFromEpochNumber,
|
|
334
|
-
blocks
|
|
395
|
+
blocks
|
|
335
396
|
});
|
|
336
397
|
this.log.debug(`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`);
|
|
337
398
|
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
@@ -539,15 +600,14 @@ function mapArchiverConfig(config) {
|
|
|
539
600
|
});
|
|
540
601
|
}
|
|
541
602
|
}
|
|
542
|
-
const localCheckpointForDestinationProvenCheckpointNumber = await this.
|
|
603
|
+
const localCheckpointForDestinationProvenCheckpointNumber = await this.store.getCheckpointData(provenCheckpointNumber);
|
|
543
604
|
// Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
|
|
544
605
|
// synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
|
|
545
606
|
const synched = await this.getSynchedCheckpointNumber();
|
|
546
|
-
if (localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.
|
|
547
|
-
this.log.error(`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.
|
|
607
|
+
if (localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber) {
|
|
608
|
+
this.log.error(`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber} > ${synched}`);
|
|
548
609
|
}
|
|
549
610
|
this.log.trace(`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'}`);
|
|
550
|
-
const lastProvenBlockNumber = await this.getLastBlockNumberInCheckpoint(provenCheckpointNumber);
|
|
551
611
|
if (localCheckpointForDestinationProvenCheckpointNumber && provenArchive === localCheckpointForDestinationProvenCheckpointNumber.archive.root.toString()) {
|
|
552
612
|
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
553
613
|
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
@@ -557,23 +617,24 @@ function mapArchiverConfig(config) {
|
|
|
557
617
|
});
|
|
558
618
|
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
|
|
559
619
|
const provenEpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
|
|
620
|
+
const lastBlockNumberInCheckpoint = localCheckpointForDestinationProvenCheckpointNumber.startBlock + localCheckpointForDestinationProvenCheckpointNumber.numBlocks - 1;
|
|
560
621
|
this.emit(L2BlockSourceEvents.L2BlockProven, {
|
|
561
622
|
type: L2BlockSourceEvents.L2BlockProven,
|
|
562
|
-
blockNumber:
|
|
623
|
+
blockNumber: BlockNumber(lastBlockNumberInCheckpoint),
|
|
563
624
|
slotNumber: provenSlotNumber,
|
|
564
625
|
epochNumber: provenEpochNumber
|
|
565
626
|
});
|
|
627
|
+
this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint);
|
|
566
628
|
} else {
|
|
567
629
|
this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
|
|
568
630
|
}
|
|
569
631
|
}
|
|
570
|
-
this.instrumentation.updateLastProvenBlock(lastProvenBlockNumber);
|
|
571
632
|
};
|
|
572
633
|
// This is an edge case that we only hit if there are no proposed checkpoints.
|
|
573
634
|
// If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
|
|
574
635
|
const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
|
|
575
636
|
if (noCheckpoints) {
|
|
576
|
-
await this.store.
|
|
637
|
+
await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
|
|
577
638
|
this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`);
|
|
578
639
|
return rollupStatus;
|
|
579
640
|
}
|
|
@@ -581,7 +642,7 @@ function mapArchiverConfig(config) {
|
|
|
581
642
|
// Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
|
|
582
643
|
// are any state that could be impacted by it. If we have no checkpoints, there is no impact.
|
|
583
644
|
if (localPendingCheckpointNumber > 0) {
|
|
584
|
-
const localPendingCheckpoint = await this.
|
|
645
|
+
const localPendingCheckpoint = await this.store.getCheckpointData(localPendingCheckpointNumber);
|
|
585
646
|
if (localPendingCheckpoint === undefined) {
|
|
586
647
|
throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
|
|
587
648
|
}
|
|
@@ -612,12 +673,12 @@ function mapArchiverConfig(config) {
|
|
|
612
673
|
});
|
|
613
674
|
let tipAfterUnwind = localPendingCheckpointNumber;
|
|
614
675
|
while(true){
|
|
615
|
-
const candidateCheckpoint = await this.
|
|
676
|
+
const candidateCheckpoint = await this.store.getCheckpointData(tipAfterUnwind);
|
|
616
677
|
if (candidateCheckpoint === undefined) {
|
|
617
678
|
break;
|
|
618
679
|
}
|
|
619
|
-
const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.
|
|
620
|
-
this.log.trace(`Checking local checkpoint ${candidateCheckpoint.
|
|
680
|
+
const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.checkpointNumber);
|
|
681
|
+
this.log.trace(`Checking local checkpoint ${candidateCheckpoint.checkpointNumber} with archive ${candidateCheckpoint.archive.root}`, {
|
|
621
682
|
archiveAtContract,
|
|
622
683
|
archiveLocal: candidateCheckpoint.archive.root.toString()
|
|
623
684
|
});
|
|
@@ -641,7 +702,7 @@ function mapArchiverConfig(config) {
|
|
|
641
702
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
642
703
|
this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
643
704
|
// TODO(md): Retrieve from blob sink then from consensus client, then from peers
|
|
644
|
-
const retrievedCheckpoints = await retrieveCheckpointsFromRollup(this.rollup.getContract(), this.publicClient, this.debugClient, this.blobSinkClient, searchStartBlock, searchEndBlock, this.l1Addresses, this.instrumentation, this.log);
|
|
705
|
+
const retrievedCheckpoints = await retrieveCheckpointsFromRollup(this.rollup.getContract(), this.publicClient, this.debugClient, this.blobSinkClient, searchStartBlock, searchEndBlock, this.l1Addresses, this.instrumentation, this.log, !this.initialSyncComplete);
|
|
645
706
|
if (retrievedCheckpoints.length === 0) {
|
|
646
707
|
// We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
|
|
647
708
|
// See further details in earlier comments.
|
|
@@ -709,15 +770,14 @@ function mapArchiverConfig(config) {
|
|
|
709
770
|
const [processDuration] = await elapsed(()=>this.addCheckpoints(validCheckpoints, updatedValidationResult));
|
|
710
771
|
this.instrumentation.processNewBlocks(processDuration / validCheckpoints.length, validCheckpoints.flatMap((c)=>c.checkpoint.blocks));
|
|
711
772
|
} catch (err) {
|
|
712
|
-
if (err instanceof
|
|
713
|
-
const {
|
|
714
|
-
const
|
|
715
|
-
const updatedL1SyncPoint =
|
|
773
|
+
if (err instanceof InitialCheckpointNumberNotSequentialError) {
|
|
774
|
+
const { previousCheckpointNumber, newCheckpointNumber } = err;
|
|
775
|
+
const previousCheckpoint = previousCheckpointNumber ? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber)) : undefined;
|
|
776
|
+
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1constants.l1StartBlock;
|
|
716
777
|
await this.store.setBlockSynchedL1BlockNumber(updatedL1SyncPoint);
|
|
717
|
-
this.log.warn(`Attempting to insert
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
newBlockNumber,
|
|
778
|
+
this.log.warn(`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`, {
|
|
779
|
+
previousCheckpointNumber,
|
|
780
|
+
newCheckpointNumber,
|
|
721
781
|
updatedL1SyncPoint
|
|
722
782
|
});
|
|
723
783
|
}
|
|
@@ -756,9 +816,16 @@ function mapArchiverConfig(config) {
|
|
|
756
816
|
// We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
|
|
757
817
|
// the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
|
|
758
818
|
// we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
819
|
+
let latestLocalCheckpointArchive = undefined;
|
|
820
|
+
let targetL1BlockNumber = maxBigint(currentL1BlockNumber - 64n, 0n);
|
|
821
|
+
if (lastRetrievedCheckpoint) {
|
|
822
|
+
latestLocalCheckpointArchive = lastRetrievedCheckpoint.checkpoint.archive.root.toString();
|
|
823
|
+
targetL1BlockNumber = lastRetrievedCheckpoint.l1.blockNumber;
|
|
824
|
+
} else if (latestLocalCheckpointNumber > 0) {
|
|
825
|
+
const checkpoint = await this.store.getRangeOfCheckpoints(latestLocalCheckpointNumber, 1).then(([c])=>c);
|
|
826
|
+
latestLocalCheckpointArchive = checkpoint.archive.root.toString();
|
|
827
|
+
targetL1BlockNumber = checkpoint.l1.blockNumber;
|
|
828
|
+
}
|
|
762
829
|
this.log.warn(`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
|
|
763
830
|
latestLocalCheckpointNumber,
|
|
764
831
|
latestLocalCheckpointArchive,
|
|
@@ -822,32 +889,44 @@ function mapArchiverConfig(config) {
|
|
|
822
889
|
async getBlocksForEpoch(epochNumber) {
|
|
823
890
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
824
891
|
const blocks = [];
|
|
825
|
-
// Walk the list of
|
|
826
|
-
// We'll typically ask for
|
|
827
|
-
let
|
|
828
|
-
const slot = (b)=>b.header.
|
|
829
|
-
while(
|
|
830
|
-
if (slot(
|
|
831
|
-
blocks
|
|
892
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
893
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
894
|
+
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
895
|
+
const slot = (b)=>b.header.slotNumber;
|
|
896
|
+
while(checkpoint && slot(checkpoint) >= start){
|
|
897
|
+
if (slot(checkpoint) <= end) {
|
|
898
|
+
// push the blocks on backwards
|
|
899
|
+
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
900
|
+
for(let i = endBlock; i >= checkpoint.startBlock; i--){
|
|
901
|
+
const block = await this.getBlock(BlockNumber(i));
|
|
902
|
+
if (block) {
|
|
903
|
+
blocks.push(block);
|
|
904
|
+
}
|
|
905
|
+
}
|
|
832
906
|
}
|
|
833
|
-
|
|
907
|
+
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
834
908
|
}
|
|
835
909
|
return blocks.reverse();
|
|
836
910
|
}
|
|
837
911
|
async getBlockHeadersForEpoch(epochNumber) {
|
|
838
912
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
839
913
|
const blocks = [];
|
|
840
|
-
// Walk the list of
|
|
841
|
-
// We'll typically ask for
|
|
842
|
-
let
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
914
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
915
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
916
|
+
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
917
|
+
const slot = (b)=>b.header.slotNumber;
|
|
918
|
+
while(checkpoint && slot(checkpoint) >= start){
|
|
919
|
+
if (slot(checkpoint) <= end) {
|
|
920
|
+
// push the blocks on backwards
|
|
921
|
+
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
922
|
+
for(let i = endBlock; i >= checkpoint.startBlock; i--){
|
|
923
|
+
const block = await this.getBlockHeader(BlockNumber(i));
|
|
924
|
+
if (block) {
|
|
925
|
+
blocks.push(block);
|
|
926
|
+
}
|
|
927
|
+
}
|
|
848
928
|
}
|
|
849
|
-
|
|
850
|
-
header = await this.getBlockHeader(number);
|
|
929
|
+
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
851
930
|
}
|
|
852
931
|
return blocks.reverse();
|
|
853
932
|
}
|
|
@@ -878,29 +957,6 @@ function mapArchiverConfig(config) {
|
|
|
878
957
|
/** Returns whether the archiver has completed an initial sync run successfully. */ isInitialSyncComplete() {
|
|
879
958
|
return this.initialSyncComplete;
|
|
880
959
|
}
|
|
881
|
-
async getPublishedCheckpoints(from, limit, proven) {
|
|
882
|
-
// TODO: Implement this properly. This only works when we have one block per checkpoint.
|
|
883
|
-
const blocks = await this.getPublishedBlocks(BlockNumber(from), limit, proven);
|
|
884
|
-
return blocks.map((b)=>b.toPublishedCheckpoint());
|
|
885
|
-
}
|
|
886
|
-
async getCheckpointByArchive(archive) {
|
|
887
|
-
// TODO: Implement this properly. This only works when we have one block per checkpoint.
|
|
888
|
-
return (await this.getPublishedBlockByArchive(archive))?.block.toCheckpoint();
|
|
889
|
-
}
|
|
890
|
-
async getCheckpoints(from, limit, proven) {
|
|
891
|
-
const published = await this.getPublishedCheckpoints(from, limit, proven);
|
|
892
|
-
return published.map((p)=>p.checkpoint);
|
|
893
|
-
}
|
|
894
|
-
async getCheckpoint(number) {
|
|
895
|
-
if (number < 0) {
|
|
896
|
-
number = await this.getSynchedCheckpointNumber();
|
|
897
|
-
}
|
|
898
|
-
if (number === 0) {
|
|
899
|
-
return undefined;
|
|
900
|
-
}
|
|
901
|
-
const published = await this.getPublishedCheckpoints(number, 1);
|
|
902
|
-
return published[0]?.checkpoint;
|
|
903
|
-
}
|
|
904
960
|
async getCheckpointHeader(number) {
|
|
905
961
|
if (number === 'latest') {
|
|
906
962
|
number = await this.getSynchedCheckpointNumber();
|
|
@@ -908,66 +964,36 @@ function mapArchiverConfig(config) {
|
|
|
908
964
|
if (number === 0) {
|
|
909
965
|
return undefined;
|
|
910
966
|
}
|
|
911
|
-
const checkpoint = await this.
|
|
912
|
-
|
|
967
|
+
const checkpoint = await this.store.getCheckpointData(number);
|
|
968
|
+
if (!checkpoint) {
|
|
969
|
+
return undefined;
|
|
970
|
+
}
|
|
971
|
+
return checkpoint.header;
|
|
913
972
|
}
|
|
914
973
|
getCheckpointNumber() {
|
|
915
974
|
return this.getSynchedCheckpointNumber();
|
|
916
975
|
}
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
// Checkpoint number will no longer be the same as the block number once we support multiple blocks per checkpoint.
|
|
920
|
-
return CheckpointNumber(await this.store.getSynchedL2BlockNumber());
|
|
976
|
+
getSynchedCheckpointNumber() {
|
|
977
|
+
return this.store.getSynchedCheckpointNumber();
|
|
921
978
|
}
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
// Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
|
|
925
|
-
return CheckpointNumber(await this.store.getProvenL2BlockNumber());
|
|
979
|
+
getProvenCheckpointNumber() {
|
|
980
|
+
return this.store.getProvenCheckpointNumber();
|
|
926
981
|
}
|
|
927
982
|
setProvenCheckpointNumber(checkpointNumber) {
|
|
928
|
-
|
|
929
|
-
// Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
|
|
930
|
-
return this.store.setProvenL2BlockNumber(BlockNumber.fromCheckpointNumber(checkpointNumber));
|
|
983
|
+
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
931
984
|
}
|
|
932
985
|
unwindCheckpoints(from, checkpointsToUnwind) {
|
|
933
|
-
|
|
934
|
-
// This only works when we have one block per checkpoint.
|
|
935
|
-
return this.store.unwindBlocks(BlockNumber.fromCheckpointNumber(from), checkpointsToUnwind);
|
|
986
|
+
return this.store.unwindCheckpoints(from, checkpointsToUnwind);
|
|
936
987
|
}
|
|
937
|
-
getLastBlockNumberInCheckpoint(checkpointNumber) {
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
988
|
+
async getLastBlockNumberInCheckpoint(checkpointNumber) {
|
|
989
|
+
const checkpointData = await this.store.getCheckpointData(checkpointNumber);
|
|
990
|
+
if (!checkpointData) {
|
|
991
|
+
return undefined;
|
|
992
|
+
}
|
|
993
|
+
return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1);
|
|
941
994
|
}
|
|
942
995
|
addCheckpoints(checkpoints, pendingChainValidationStatus) {
|
|
943
|
-
|
|
944
|
-
// This only works when we have one block per checkpoint.
|
|
945
|
-
return this.store.addBlocks(checkpoints.map((p)=>PublishedL2Block.fromPublishedCheckpoint(p)), pendingChainValidationStatus);
|
|
946
|
-
}
|
|
947
|
-
async getCheckpointsForEpoch(epochNumber) {
|
|
948
|
-
// TODO: Create store and apis for checkpoints.
|
|
949
|
-
// This only works when we have one block per checkpoint.
|
|
950
|
-
const blocks = await this.getBlocksForEpoch(epochNumber);
|
|
951
|
-
return blocks.map((b)=>b.toCheckpoint());
|
|
952
|
-
}
|
|
953
|
-
/**
|
|
954
|
-
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
955
|
-
* @param from - Number of the first block to return (inclusive).
|
|
956
|
-
* @param limit - The number of blocks to return.
|
|
957
|
-
* @param proven - If true, only return blocks that have been proven.
|
|
958
|
-
* @returns The requested L2 blocks.
|
|
959
|
-
*/ getBlocks(from, limit, proven) {
|
|
960
|
-
return this.getPublishedBlocks(from, limit, proven).then((blocks)=>blocks.map((b)=>b.block));
|
|
961
|
-
}
|
|
962
|
-
/** Equivalent to getBlocks but includes publish data. */ async getPublishedBlocks(from, limit, proven) {
|
|
963
|
-
const limitWithProven = proven ? Math.min(limit, Math.max(await this.store.getProvenL2BlockNumber() - from + 1, 0)) : limit;
|
|
964
|
-
return limitWithProven === 0 ? [] : await this.store.getPublishedBlocks(from, limitWithProven);
|
|
965
|
-
}
|
|
966
|
-
getPublishedBlockByHash(blockHash) {
|
|
967
|
-
return this.store.getPublishedBlockByHash(blockHash);
|
|
968
|
-
}
|
|
969
|
-
getPublishedBlockByArchive(archive) {
|
|
970
|
-
return this.store.getPublishedBlockByArchive(archive);
|
|
996
|
+
return this.store.addCheckpoints(checkpoints, pendingChainValidationStatus);
|
|
971
997
|
}
|
|
972
998
|
getBlockHeaderByHash(blockHash) {
|
|
973
999
|
return this.store.getBlockHeaderByHash(blockHash);
|
|
@@ -979,7 +1005,7 @@ function mapArchiverConfig(config) {
|
|
|
979
1005
|
* Gets an l2 block.
|
|
980
1006
|
* @param number - The block number to return.
|
|
981
1007
|
* @returns The requested L2 block.
|
|
982
|
-
*/ async
|
|
1008
|
+
*/ async getL2BlockNew(number) {
|
|
983
1009
|
// If the number provided is -ve, then return the latest block.
|
|
984
1010
|
if (number < 0) {
|
|
985
1011
|
number = await this.store.getSynchedL2BlockNumber();
|
|
@@ -987,8 +1013,8 @@ function mapArchiverConfig(config) {
|
|
|
987
1013
|
if (number === 0) {
|
|
988
1014
|
return undefined;
|
|
989
1015
|
}
|
|
990
|
-
const publishedBlock = await this.store.
|
|
991
|
-
return publishedBlock
|
|
1016
|
+
const publishedBlock = await this.store.store.getBlock(number);
|
|
1017
|
+
return publishedBlock;
|
|
992
1018
|
}
|
|
993
1019
|
async getBlockHeader(number) {
|
|
994
1020
|
if (number === 'latest') {
|
|
@@ -1000,6 +1026,18 @@ function mapArchiverConfig(config) {
|
|
|
1000
1026
|
const headers = await this.store.getBlockHeaders(number, 1);
|
|
1001
1027
|
return headers.length === 0 ? undefined : headers[0];
|
|
1002
1028
|
}
|
|
1029
|
+
getCheckpointedBlock(number) {
|
|
1030
|
+
return this.store.getCheckpointedBlock(number);
|
|
1031
|
+
}
|
|
1032
|
+
getCheckpointedBlockByHash(blockHash) {
|
|
1033
|
+
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
1034
|
+
}
|
|
1035
|
+
getProvenBlockNumber() {
|
|
1036
|
+
return this.store.getProvenBlockNumber();
|
|
1037
|
+
}
|
|
1038
|
+
getCheckpointedBlockByArchive(archive) {
|
|
1039
|
+
return this.store.getCheckpointedBlockByArchive(archive);
|
|
1040
|
+
}
|
|
1003
1041
|
getTxEffect(txHash) {
|
|
1004
1042
|
return this.store.getTxEffect(txHash);
|
|
1005
1043
|
}
|
|
@@ -1030,15 +1068,10 @@ function mapArchiverConfig(config) {
|
|
|
1030
1068
|
}
|
|
1031
1069
|
/**
|
|
1032
1070
|
* Gets the number of the latest L2 block processed by the block source implementation.
|
|
1071
|
+
* This includes both checkpointed and uncheckpointed blocks.
|
|
1033
1072
|
* @returns The number of the latest L2 block processed by the block source implementation.
|
|
1034
1073
|
*/ getBlockNumber() {
|
|
1035
|
-
return this.store.
|
|
1036
|
-
}
|
|
1037
|
-
getProvenBlockNumber() {
|
|
1038
|
-
return this.store.getProvenL2BlockNumber();
|
|
1039
|
-
}
|
|
1040
|
-
/** Forcefully updates the last proven block number. Use for testing. */ setProvenBlockNumber(blockNumber) {
|
|
1041
|
-
return this.store.setProvenL2BlockNumber(blockNumber);
|
|
1074
|
+
return this.store.getLatestBlockNumber();
|
|
1042
1075
|
}
|
|
1043
1076
|
getContractClass(id) {
|
|
1044
1077
|
return this.store.getContractClass(id);
|
|
@@ -1130,23 +1163,23 @@ function mapArchiverConfig(config) {
|
|
|
1130
1163
|
};
|
|
1131
1164
|
}
|
|
1132
1165
|
async rollbackTo(targetL2BlockNumber) {
|
|
1166
|
+
// TODO(pw/mbps): This still assumes 1 block per checkpoint
|
|
1133
1167
|
const currentBlocks = await this.getL2Tips();
|
|
1134
1168
|
const currentL2Block = currentBlocks.latest.number;
|
|
1135
1169
|
const currentProvenBlock = currentBlocks.proven.number;
|
|
1136
|
-
// const currentFinalizedBlock = currentBlocks.finalized.number;
|
|
1137
1170
|
if (targetL2BlockNumber >= currentL2Block) {
|
|
1138
1171
|
throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`);
|
|
1139
1172
|
}
|
|
1140
1173
|
const blocksToUnwind = currentL2Block - targetL2BlockNumber;
|
|
1141
|
-
const targetL2Block = await this.store.
|
|
1174
|
+
const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber);
|
|
1142
1175
|
if (!targetL2Block) {
|
|
1143
1176
|
throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
|
|
1144
1177
|
}
|
|
1145
1178
|
const targetL1BlockNumber = targetL2Block.l1.blockNumber;
|
|
1146
1179
|
const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber);
|
|
1147
1180
|
const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber);
|
|
1148
|
-
this.log.info(`Unwinding ${blocksToUnwind}
|
|
1149
|
-
await this.store.
|
|
1181
|
+
this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`);
|
|
1182
|
+
await this.store.unwindCheckpoints(CheckpointNumber(currentL2Block), blocksToUnwind);
|
|
1150
1183
|
this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`);
|
|
1151
1184
|
await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
1152
1185
|
this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
|
|
@@ -1157,7 +1190,7 @@ function mapArchiverConfig(config) {
|
|
|
1157
1190
|
});
|
|
1158
1191
|
if (targetL2BlockNumber < currentProvenBlock) {
|
|
1159
1192
|
this.log.info(`Clearing proven L2 block number`);
|
|
1160
|
-
await this.store.
|
|
1193
|
+
await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
|
|
1161
1194
|
}
|
|
1162
1195
|
// TODO(palla/reorg): Set the finalized block when we add support for it.
|
|
1163
1196
|
// if (targetL2BlockNumber < currentFinalizedBlock) {
|
|
@@ -1165,6 +1198,103 @@ function mapArchiverConfig(config) {
|
|
|
1165
1198
|
// await this.store.setFinalizedL2BlockNumber(0);
|
|
1166
1199
|
// }
|
|
1167
1200
|
}
|
|
1201
|
+
async getPublishedCheckpoints(checkpointNumber, limit) {
|
|
1202
|
+
const checkpoints = await this.store.getRangeOfCheckpoints(checkpointNumber, limit);
|
|
1203
|
+
const blocks = (await Promise.all(checkpoints.map((ch)=>this.store.getBlocksForCheckpoint(ch.checkpointNumber)))).filter(isDefined);
|
|
1204
|
+
const fullCheckpoints = [];
|
|
1205
|
+
for(let i = 0; i < checkpoints.length; i++){
|
|
1206
|
+
const blocksForCheckpoint = blocks[i];
|
|
1207
|
+
const checkpoint = checkpoints[i];
|
|
1208
|
+
const fullCheckpoint = new Checkpoint(checkpoint.archive, checkpoint.header, blocksForCheckpoint, checkpoint.checkpointNumber);
|
|
1209
|
+
const publishedCheckpoint = new PublishedCheckpoint(fullCheckpoint, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
|
|
1210
|
+
fullCheckpoints.push(publishedCheckpoint);
|
|
1211
|
+
}
|
|
1212
|
+
return fullCheckpoints;
|
|
1213
|
+
}
|
|
1214
|
+
async getCheckpointsForEpoch(epochNumber) {
|
|
1215
|
+
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1216
|
+
const checkpoints = [];
|
|
1217
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1218
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1219
|
+
let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1220
|
+
const slot = (b)=>b.header.slotNumber;
|
|
1221
|
+
while(checkpointData && slot(checkpointData) >= start){
|
|
1222
|
+
if (slot(checkpointData) <= end) {
|
|
1223
|
+
// push the checkpoints on backwards
|
|
1224
|
+
const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1);
|
|
1225
|
+
checkpoints.push(checkpoint.checkpoint);
|
|
1226
|
+
}
|
|
1227
|
+
checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1));
|
|
1228
|
+
}
|
|
1229
|
+
return checkpoints.reverse();
|
|
1230
|
+
}
|
|
1231
|
+
/* Legacy APIs */ async getPublishedBlockByHash(blockHash) {
|
|
1232
|
+
const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
|
|
1233
|
+
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1234
|
+
}
|
|
1235
|
+
async getPublishedBlockByArchive(archive) {
|
|
1236
|
+
const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive);
|
|
1237
|
+
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1238
|
+
}
|
|
1239
|
+
/**
|
|
1240
|
+
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
1241
|
+
* @param from - Number of the first block to return (inclusive).
|
|
1242
|
+
* @param limit - The number of blocks to return.
|
|
1243
|
+
* @param proven - If true, only return blocks that have been proven.
|
|
1244
|
+
* @returns The requested L2 blocks.
|
|
1245
|
+
*/ async getBlocks(from, limit, proven) {
|
|
1246
|
+
const publishedBlocks = await this.getPublishedBlocks(from, limit, proven);
|
|
1247
|
+
return publishedBlocks.map((x)=>x.block);
|
|
1248
|
+
}
|
|
1249
|
+
async getPublishedBlocks(from, limit, proven) {
|
|
1250
|
+
const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit);
|
|
1251
|
+
const provenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
1252
|
+
const blocks = (await Promise.all(checkpoints.map((ch)=>this.store.getBlocksForCheckpoint(ch.checkpointNumber)))).filter(isDefined);
|
|
1253
|
+
const olbBlocks = [];
|
|
1254
|
+
for(let i = 0; i < checkpoints.length; i++){
|
|
1255
|
+
const blockForCheckpoint = blocks[i][0];
|
|
1256
|
+
const checkpoint = checkpoints[i];
|
|
1257
|
+
if (checkpoint.checkpointNumber > provenCheckpointNumber && proven === true) {
|
|
1258
|
+
continue;
|
|
1259
|
+
}
|
|
1260
|
+
const oldCheckpoint = new Checkpoint(blockForCheckpoint.archive, checkpoint.header, [
|
|
1261
|
+
blockForCheckpoint
|
|
1262
|
+
], checkpoint.checkpointNumber);
|
|
1263
|
+
const oldBlock = L2Block.fromCheckpoint(oldCheckpoint);
|
|
1264
|
+
const publishedBlock = new PublishedL2Block(oldBlock, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
|
|
1265
|
+
olbBlocks.push(publishedBlock);
|
|
1266
|
+
}
|
|
1267
|
+
return olbBlocks;
|
|
1268
|
+
}
|
|
1269
|
+
async buildOldBlockFromCheckpointedBlock(checkpointedBlock) {
|
|
1270
|
+
if (!checkpointedBlock) {
|
|
1271
|
+
return undefined;
|
|
1272
|
+
}
|
|
1273
|
+
const checkpoint = await this.store.getCheckpointData(checkpointedBlock.checkpointNumber);
|
|
1274
|
+
if (!checkpoint) {
|
|
1275
|
+
return checkpoint;
|
|
1276
|
+
}
|
|
1277
|
+
const fullCheckpoint = new Checkpoint(checkpointedBlock?.block.archive, checkpoint?.header, [
|
|
1278
|
+
checkpointedBlock.block
|
|
1279
|
+
], checkpoint.checkpointNumber);
|
|
1280
|
+
const oldBlock = L2Block.fromCheckpoint(fullCheckpoint);
|
|
1281
|
+
const published = new PublishedL2Block(oldBlock, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
|
|
1282
|
+
return published;
|
|
1283
|
+
}
|
|
1284
|
+
async getBlock(number) {
|
|
1285
|
+
// If the number provided is -ve, then return the latest block.
|
|
1286
|
+
if (number < 0) {
|
|
1287
|
+
number = await this.store.getSynchedL2BlockNumber();
|
|
1288
|
+
}
|
|
1289
|
+
if (number === 0) {
|
|
1290
|
+
return undefined;
|
|
1291
|
+
}
|
|
1292
|
+
const publishedBlocks = await this.getPublishedBlocks(number, 1);
|
|
1293
|
+
if (publishedBlocks.length === 0) {
|
|
1294
|
+
return undefined;
|
|
1295
|
+
}
|
|
1296
|
+
return publishedBlocks[0].block;
|
|
1297
|
+
}
|
|
1168
1298
|
}
|
|
1169
1299
|
_ts_decorate([
|
|
1170
1300
|
trackSpan('Archiver.sync')
|
|
@@ -1286,6 +1416,18 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1286
1416
|
}
|
|
1287
1417
|
return true;
|
|
1288
1418
|
}
|
|
1419
|
+
async addBlockDataToDB(block) {
|
|
1420
|
+
const contractClassLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.contractClassLogs);
|
|
1421
|
+
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1422
|
+
const privateLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
|
|
1423
|
+
const publicLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
|
|
1424
|
+
return (await Promise.all([
|
|
1425
|
+
this.#updatePublishedContractClasses(contractClassLogs, block.number, 0),
|
|
1426
|
+
this.#updateDeployedContractInstances(privateLogs, block.number, 0),
|
|
1427
|
+
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, 0),
|
|
1428
|
+
this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.number)
|
|
1429
|
+
])).every(Boolean);
|
|
1430
|
+
}
|
|
1289
1431
|
addBlocks(blocks, pendingChainValidationStatus) {
|
|
1290
1432
|
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
1291
1433
|
// or if the previous block is not in the store.
|
|
@@ -1295,34 +1437,51 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1295
1437
|
// Update the pending chain validation status if provided
|
|
1296
1438
|
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
1297
1439
|
// Add any logs emitted during the retrieved blocks
|
|
1298
|
-
this.store.addLogs(blocks
|
|
1440
|
+
this.store.addLogs(blocks),
|
|
1441
|
+
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1442
|
+
...blocks.map((block)=>{
|
|
1443
|
+
return this.addBlockDataToDB(block);
|
|
1444
|
+
})
|
|
1445
|
+
]);
|
|
1446
|
+
return opResults.every(Boolean);
|
|
1447
|
+
});
|
|
1448
|
+
}
|
|
1449
|
+
addCheckpoints(checkpoints, pendingChainValidationStatus) {
|
|
1450
|
+
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
1451
|
+
// or if the previous block is not in the store.
|
|
1452
|
+
return this.store.transactionAsync(async ()=>{
|
|
1453
|
+
await this.store.addCheckpoints(checkpoints);
|
|
1454
|
+
const allBlocks = checkpoints.flatMap((ch)=>ch.checkpoint.blocks);
|
|
1455
|
+
const opResults = await Promise.all([
|
|
1456
|
+
// Update the pending chain validation status if provided
|
|
1457
|
+
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
1458
|
+
// Add any logs emitted during the retrieved blocks
|
|
1459
|
+
this.store.addLogs(allBlocks),
|
|
1299
1460
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1300
|
-
...
|
|
1301
|
-
|
|
1302
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1303
|
-
const privateLogs = block.block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
|
|
1304
|
-
const publicLogs = block.block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
|
|
1305
|
-
return (await Promise.all([
|
|
1306
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.block.number, 0),
|
|
1307
|
-
this.#updateDeployedContractInstances(privateLogs, block.block.number, 0),
|
|
1308
|
-
this.#updateUpdatedContractInstances(publicLogs, block.block.header.globalVariables.timestamp, 0),
|
|
1309
|
-
this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.block.number)
|
|
1310
|
-
])).every(Boolean);
|
|
1461
|
+
...allBlocks.map((block)=>{
|
|
1462
|
+
return this.addBlockDataToDB(block);
|
|
1311
1463
|
})
|
|
1312
1464
|
]);
|
|
1313
1465
|
return opResults.every(Boolean);
|
|
1314
1466
|
});
|
|
1315
1467
|
}
|
|
1316
|
-
async
|
|
1317
|
-
|
|
1468
|
+
async unwindCheckpoints(from, checkpointsToUnwind) {
|
|
1469
|
+
if (checkpointsToUnwind <= 0) {
|
|
1470
|
+
throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
|
|
1471
|
+
}
|
|
1472
|
+
const last = await this.getSynchedCheckpointNumber();
|
|
1318
1473
|
if (from != last) {
|
|
1319
|
-
throw new Error(`Cannot unwind
|
|
1474
|
+
throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`);
|
|
1320
1475
|
}
|
|
1321
|
-
|
|
1322
|
-
|
|
1476
|
+
const blocks = [];
|
|
1477
|
+
const lastCheckpointNumber = from + checkpointsToUnwind - 1;
|
|
1478
|
+
for(let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++){
|
|
1479
|
+
const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
1480
|
+
if (!blocksForCheckpoint) {
|
|
1481
|
+
continue;
|
|
1482
|
+
}
|
|
1483
|
+
blocks.push(...blocksForCheckpoint);
|
|
1323
1484
|
}
|
|
1324
|
-
// from - blocksToUnwind = the new head, so + 1 for what we need to remove
|
|
1325
|
-
const blocks = await this.getPublishedBlocks(BlockNumber(from - blocksToUnwind + 1), blocksToUnwind);
|
|
1326
1485
|
const opResults = await Promise.all([
|
|
1327
1486
|
// Prune rolls back to the last proven block, which is by definition valid
|
|
1328
1487
|
this.store.setPendingChainValidationStatus({
|
|
@@ -1330,32 +1489,44 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1330
1489
|
}),
|
|
1331
1490
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1332
1491
|
...blocks.map(async (block)=>{
|
|
1333
|
-
const contractClassLogs = block.
|
|
1492
|
+
const contractClassLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.contractClassLogs);
|
|
1334
1493
|
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1335
|
-
const privateLogs = block.
|
|
1336
|
-
const publicLogs = block.
|
|
1494
|
+
const privateLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
|
|
1495
|
+
const publicLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
|
|
1337
1496
|
return (await Promise.all([
|
|
1338
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.
|
|
1339
|
-
this.#updateDeployedContractInstances(privateLogs, block.
|
|
1340
|
-
this.#updateUpdatedContractInstances(publicLogs, block.
|
|
1497
|
+
this.#updatePublishedContractClasses(contractClassLogs, block.number, 1),
|
|
1498
|
+
this.#updateDeployedContractInstances(privateLogs, block.number, 1),
|
|
1499
|
+
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, 1)
|
|
1341
1500
|
])).every(Boolean);
|
|
1342
1501
|
}),
|
|
1343
|
-
this.store.deleteLogs(blocks
|
|
1344
|
-
this.store.
|
|
1502
|
+
this.store.deleteLogs(blocks),
|
|
1503
|
+
this.store.unwindCheckpoints(from, checkpointsToUnwind)
|
|
1345
1504
|
]);
|
|
1346
1505
|
return opResults.every(Boolean);
|
|
1347
1506
|
}
|
|
1348
|
-
|
|
1349
|
-
return this.store.
|
|
1507
|
+
getCheckpointData(checkpointNumber) {
|
|
1508
|
+
return this.store.getCheckpointData(checkpointNumber);
|
|
1509
|
+
}
|
|
1510
|
+
getRangeOfCheckpoints(from, limit) {
|
|
1511
|
+
return this.store.getRangeOfCheckpoints(from, limit);
|
|
1512
|
+
}
|
|
1513
|
+
getCheckpointedL2BlockNumber() {
|
|
1514
|
+
return this.store.getCheckpointedL2BlockNumber();
|
|
1515
|
+
}
|
|
1516
|
+
getSynchedCheckpointNumber() {
|
|
1517
|
+
return this.store.getSynchedCheckpointNumber();
|
|
1518
|
+
}
|
|
1519
|
+
setCheckpointSynchedL1BlockNumber(l1BlockNumber) {
|
|
1520
|
+
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
1350
1521
|
}
|
|
1351
|
-
|
|
1352
|
-
return this.store.
|
|
1522
|
+
getCheckpointedBlock(number) {
|
|
1523
|
+
return this.store.getCheckpointedBlock(number);
|
|
1353
1524
|
}
|
|
1354
|
-
|
|
1355
|
-
return this.store.
|
|
1525
|
+
getCheckpointedBlockByHash(blockHash) {
|
|
1526
|
+
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
1356
1527
|
}
|
|
1357
|
-
|
|
1358
|
-
return this.store.
|
|
1528
|
+
getCheckpointedBlockByArchive(archive) {
|
|
1529
|
+
return this.store.getCheckpointedBlockByArchive(archive);
|
|
1359
1530
|
}
|
|
1360
1531
|
getBlockHeaders(from, limit) {
|
|
1361
1532
|
return this.store.getBlockHeaders(from, limit);
|
|
@@ -1366,6 +1537,18 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1366
1537
|
getBlockHeaderByArchive(archive) {
|
|
1367
1538
|
return this.store.getBlockHeaderByArchive(archive);
|
|
1368
1539
|
}
|
|
1540
|
+
getBlockByHash(blockHash) {
|
|
1541
|
+
return this.store.getBlockByHash(blockHash);
|
|
1542
|
+
}
|
|
1543
|
+
getBlockByArchive(archive) {
|
|
1544
|
+
return this.store.getBlockByArchive(archive);
|
|
1545
|
+
}
|
|
1546
|
+
getLatestBlockNumber() {
|
|
1547
|
+
return this.store.getLatestBlockNumber();
|
|
1548
|
+
}
|
|
1549
|
+
getBlocksForCheckpoint(checkpointNumber) {
|
|
1550
|
+
return this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
1551
|
+
}
|
|
1369
1552
|
getTxEffect(txHash) {
|
|
1370
1553
|
return this.store.getTxEffect(txHash);
|
|
1371
1554
|
}
|
|
@@ -1391,16 +1574,19 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1391
1574
|
return this.store.getContractClassLogs(filter);
|
|
1392
1575
|
}
|
|
1393
1576
|
getSynchedL2BlockNumber() {
|
|
1394
|
-
return this.store.
|
|
1577
|
+
return this.store.getCheckpointedL2BlockNumber();
|
|
1395
1578
|
}
|
|
1396
|
-
|
|
1397
|
-
return this.store.
|
|
1579
|
+
getProvenCheckpointNumber() {
|
|
1580
|
+
return this.store.getProvenCheckpointNumber();
|
|
1398
1581
|
}
|
|
1399
|
-
|
|
1400
|
-
return this.store.
|
|
1582
|
+
getProvenBlockNumber() {
|
|
1583
|
+
return this.store.getProvenBlockNumber();
|
|
1584
|
+
}
|
|
1585
|
+
setProvenCheckpointNumber(checkpointNumber) {
|
|
1586
|
+
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
1401
1587
|
}
|
|
1402
1588
|
setBlockSynchedL1BlockNumber(l1BlockNumber) {
|
|
1403
|
-
return this.store.
|
|
1589
|
+
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
1404
1590
|
}
|
|
1405
1591
|
setMessageSynchedL1Block(l1Block) {
|
|
1406
1592
|
return this.store.setMessageSynchedL1Block(l1Block);
|