@aztec/archiver 3.0.0-nightly.20251126 → 3.0.0-nightly.20251128
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver/archiver.d.ts +24 -17
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +218 -160
- package/dest/archiver/archiver_store.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +5 -4
- package/dest/archiver/config.d.ts +1 -1
- package/dest/archiver/data_retrieval.d.ts +15 -13
- package/dest/archiver/data_retrieval.d.ts.map +1 -1
- package/dest/archiver/data_retrieval.js +56 -55
- package/dest/archiver/errors.d.ts +1 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/index.d.ts +1 -1
- package/dest/archiver/instrumentation.d.ts +3 -3
- package/dest/archiver/instrumentation.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts +1 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +1 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +1 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.d.ts +1 -1
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/message_store.d.ts +1 -1
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
- package/dest/archiver/structs/data_retrieval.d.ts +1 -1
- package/dest/archiver/structs/inbox_message.d.ts +1 -1
- package/dest/archiver/structs/published.d.ts +3 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/archiver/validation.d.ts +10 -4
- package/dest/archiver/validation.d.ts.map +1 -1
- package/dest/archiver/validation.js +25 -17
- package/dest/factory.d.ts +1 -1
- package/dest/index.d.ts +2 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -1
- package/dest/rpc/index.d.ts +2 -2
- package/dest/test/index.d.ts +1 -1
- package/dest/test/mock_archiver.d.ts +1 -1
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.d.ts +6 -5
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_structs.d.ts +1 -1
- package/package.json +17 -17
- package/src/archiver/archiver.ts +290 -192
- package/src/archiver/archiver_store_test_suite.ts +5 -4
- package/src/archiver/data_retrieval.ts +71 -68
- package/src/archiver/instrumentation.ts +2 -2
- package/src/archiver/structs/published.ts +2 -1
- package/src/archiver/validation.ts +40 -19
- package/src/index.ts +1 -1
- package/src/test/mock_l2_block_source.ts +5 -4
package/src/archiver/archiver.ts
CHANGED
|
@@ -9,6 +9,7 @@ import {
|
|
|
9
9
|
createEthereumChain,
|
|
10
10
|
} from '@aztec/ethereum';
|
|
11
11
|
import { maxBigint } from '@aztec/foundation/bigint';
|
|
12
|
+
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
12
13
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
13
14
|
import { merge, pick } from '@aztec/foundation/collection';
|
|
14
15
|
import type { EthAddress } from '@aztec/foundation/eth-address';
|
|
@@ -33,12 +34,14 @@ import type { FunctionSelector } from '@aztec/stdlib/abi';
|
|
|
33
34
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
34
35
|
import {
|
|
35
36
|
type ArchiverEmitter,
|
|
36
|
-
|
|
37
|
+
L2Block,
|
|
37
38
|
type L2BlockId,
|
|
38
39
|
type L2BlockSource,
|
|
39
40
|
L2BlockSourceEvents,
|
|
40
41
|
type L2Tips,
|
|
42
|
+
PublishedL2Block,
|
|
41
43
|
} from '@aztec/stdlib/block';
|
|
44
|
+
import type { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
42
45
|
import {
|
|
43
46
|
type ContractClassPublic,
|
|
44
47
|
type ContractDataSource,
|
|
@@ -61,6 +64,7 @@ import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec
|
|
|
61
64
|
import type { L2LogsSource } from '@aztec/stdlib/interfaces/server';
|
|
62
65
|
import { ContractClassLog, type LogFilter, type PrivateLog, type PublicLog, TxScopedL2Log } from '@aztec/stdlib/logs';
|
|
63
66
|
import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging';
|
|
67
|
+
import type { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
64
68
|
import { type BlockHeader, type IndexedTxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx';
|
|
65
69
|
import type { UInt64 } from '@aztec/stdlib/types';
|
|
66
70
|
import {
|
|
@@ -78,16 +82,15 @@ import { type GetContractReturnType, type Hex, createPublicClient, fallback, htt
|
|
|
78
82
|
import type { ArchiverDataStore, ArchiverL1SynchPoint } from './archiver_store.js';
|
|
79
83
|
import type { ArchiverConfig } from './config.js';
|
|
80
84
|
import {
|
|
81
|
-
|
|
85
|
+
retrieveCheckpointsFromRollup,
|
|
82
86
|
retrieveL1ToL2Message,
|
|
83
87
|
retrieveL1ToL2Messages,
|
|
84
|
-
|
|
88
|
+
retrievedToPublishedCheckpoint,
|
|
85
89
|
} from './data_retrieval.js';
|
|
86
90
|
import { InitialBlockNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
|
|
87
91
|
import { ArchiverInstrumentation } from './instrumentation.js';
|
|
88
92
|
import type { InboxMessage } from './structs/inbox_message.js';
|
|
89
|
-
import type
|
|
90
|
-
import { type ValidateBlockResult, validateBlockAttestations } from './validation.js';
|
|
93
|
+
import { type ValidateBlockResult, validateCheckpointAttestations } from './validation.js';
|
|
91
94
|
|
|
92
95
|
/**
|
|
93
96
|
* Helper interface to combine all sources this archiver implementation provides.
|
|
@@ -111,22 +114,22 @@ function mapArchiverConfig(config: Partial<ArchiverConfig>) {
|
|
|
111
114
|
}
|
|
112
115
|
|
|
113
116
|
type RollupStatus = {
|
|
114
|
-
|
|
117
|
+
provenCheckpointNumber: number;
|
|
115
118
|
provenArchive: Hex;
|
|
116
|
-
|
|
119
|
+
pendingCheckpointNumber: number;
|
|
117
120
|
pendingArchive: Hex;
|
|
118
121
|
validationResult: ValidateBlockResult | undefined;
|
|
119
|
-
|
|
120
|
-
|
|
122
|
+
lastRetrievedCheckpoint?: PublishedCheckpoint;
|
|
123
|
+
lastL1BlockWithCheckpoint?: bigint;
|
|
121
124
|
};
|
|
122
125
|
|
|
123
126
|
/**
|
|
124
|
-
* Pulls
|
|
127
|
+
* Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
|
|
125
128
|
* Responsible for handling robust L1 polling so that other components do not need to
|
|
126
129
|
* concern themselves with it.
|
|
127
130
|
*/
|
|
128
131
|
export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implements ArchiveSource, Traceable {
|
|
129
|
-
/** A loop in which we will be continually fetching new
|
|
132
|
+
/** A loop in which we will be continually fetching new checkpoints. */
|
|
130
133
|
private runningPromise: RunningPromise;
|
|
131
134
|
|
|
132
135
|
private rollup: RollupContract;
|
|
@@ -382,35 +385,35 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
382
385
|
);
|
|
383
386
|
}
|
|
384
387
|
|
|
385
|
-
// ********** Events that are processed per
|
|
388
|
+
// ********** Events that are processed per checkpoint **********
|
|
386
389
|
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
387
|
-
// First we retrieve new L2 blocks and store them in the DB. This will also update the
|
|
388
|
-
// pending chain validation status, proven
|
|
389
|
-
const rollupStatus = await this.
|
|
390
|
+
// First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
|
|
391
|
+
// pending chain validation status, proven checkpoint number, and synched L1 block number.
|
|
392
|
+
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
|
|
390
393
|
// Then we prune the current epoch if it'd reorg on next submission.
|
|
391
|
-
// Note that we don't do this before retrieving
|
|
392
|
-
//
|
|
394
|
+
// Note that we don't do this before retrieving checkpoints because we may need to retrieve
|
|
395
|
+
// checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
|
|
393
396
|
// the chain locally before we start unwinding stuff. This can be optimized by figuring out
|
|
394
|
-
// up to which point we're pruning, and then requesting
|
|
397
|
+
// up to which point we're pruning, and then requesting checkpoints up to that point only.
|
|
395
398
|
const { rollupCanPrune } = await this.handleEpochPrune(
|
|
396
|
-
rollupStatus.
|
|
399
|
+
rollupStatus.provenCheckpointNumber,
|
|
397
400
|
currentL1BlockNumber,
|
|
398
401
|
currentL1Timestamp,
|
|
399
402
|
);
|
|
400
403
|
|
|
401
|
-
// If the last
|
|
404
|
+
// If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
|
|
402
405
|
// past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
|
|
403
|
-
// we get a valid
|
|
404
|
-
if (!rollupStatus.validationResult?.valid && rollupStatus.
|
|
405
|
-
await this.store.setBlockSynchedL1BlockNumber(rollupStatus.
|
|
406
|
+
// we get a valid checkpoint to advance the syncpoint.
|
|
407
|
+
if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
|
|
408
|
+
await this.store.setBlockSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
|
|
406
409
|
}
|
|
407
410
|
|
|
408
|
-
// And lastly we check if we are missing any
|
|
411
|
+
// And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
|
|
409
412
|
// We only do this if rollup cant prune on the next submission. Otherwise we will end up
|
|
410
|
-
// re-syncing the
|
|
413
|
+
// re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
|
|
411
414
|
// since the archiver will rightfully refuse to sync up to it.
|
|
412
415
|
if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
|
|
413
|
-
await this.
|
|
416
|
+
await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
|
|
414
417
|
}
|
|
415
418
|
|
|
416
419
|
this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
|
|
@@ -450,43 +453,47 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
450
453
|
return result;
|
|
451
454
|
}
|
|
452
455
|
|
|
453
|
-
/** Checks if there'd be a reorg for the next
|
|
454
|
-
private async handleEpochPrune(
|
|
456
|
+
/** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */
|
|
457
|
+
private async handleEpochPrune(
|
|
458
|
+
provenCheckpointNumber: number,
|
|
459
|
+
currentL1BlockNumber: bigint,
|
|
460
|
+
currentL1Timestamp: bigint,
|
|
461
|
+
) {
|
|
455
462
|
const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
|
|
456
|
-
const
|
|
457
|
-
const canPrune =
|
|
463
|
+
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
464
|
+
const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
|
|
458
465
|
|
|
459
466
|
if (canPrune) {
|
|
460
467
|
const timer = new Timer();
|
|
461
|
-
const pruneFrom =
|
|
468
|
+
const pruneFrom = provenCheckpointNumber + 1;
|
|
462
469
|
|
|
463
|
-
const header = await this.
|
|
470
|
+
const header = await this.getCheckpointHeader(Number(pruneFrom));
|
|
464
471
|
if (header === undefined) {
|
|
465
|
-
throw new Error(`Missing
|
|
472
|
+
throw new Error(`Missing checkpoint header ${pruneFrom}`);
|
|
466
473
|
}
|
|
467
474
|
|
|
468
|
-
const pruneFromSlotNumber = header.
|
|
469
|
-
const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
|
|
475
|
+
const pruneFromSlotNumber = header.slotNumber.toBigInt();
|
|
476
|
+
const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
|
|
470
477
|
|
|
471
|
-
const
|
|
478
|
+
const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
|
|
472
479
|
|
|
473
|
-
const
|
|
480
|
+
const checkpoints = await this.getCheckpoints(Number(provenCheckpointNumber) + 1, Number(checkpointsToUnwind));
|
|
474
481
|
|
|
475
482
|
// Emit an event for listening services to react to the chain prune
|
|
476
483
|
this.emit(L2BlockSourceEvents.L2PruneDetected, {
|
|
477
484
|
type: L2BlockSourceEvents.L2PruneDetected,
|
|
478
485
|
epochNumber: pruneFromEpochNumber,
|
|
479
|
-
blocks,
|
|
486
|
+
blocks: checkpoints.flatMap(c => L2Block.fromCheckpoint(c)),
|
|
480
487
|
});
|
|
481
488
|
|
|
482
489
|
this.log.debug(
|
|
483
|
-
`L2 prune from ${
|
|
490
|
+
`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`,
|
|
484
491
|
);
|
|
485
|
-
await this.
|
|
492
|
+
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
486
493
|
this.log.warn(
|
|
487
|
-
`Unwound ${count(
|
|
488
|
-
`to ${
|
|
489
|
-
`Updated
|
|
494
|
+
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
495
|
+
`to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
|
|
496
|
+
`Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
490
497
|
);
|
|
491
498
|
this.instrumentation.processPrune(timer.ms());
|
|
492
499
|
// TODO(palla/reorg): Do we need to set the block synched L1 block number here?
|
|
@@ -665,171 +672,185 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
665
672
|
return Buffer32.fromString(block.hash);
|
|
666
673
|
}
|
|
667
674
|
|
|
668
|
-
private async
|
|
669
|
-
const
|
|
675
|
+
private async handleCheckpoints(blocksSynchedTo: bigint, currentL1BlockNumber: bigint): Promise<RollupStatus> {
|
|
676
|
+
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
670
677
|
const initialValidationResult: ValidateBlockResult | undefined = await this.store.getPendingChainValidationStatus();
|
|
671
|
-
const [
|
|
672
|
-
|
|
678
|
+
const [
|
|
679
|
+
rollupProvenCheckpointNumber,
|
|
680
|
+
provenArchive,
|
|
681
|
+
rollupPendingCheckpointNumber,
|
|
682
|
+
pendingArchive,
|
|
683
|
+
archiveForLocalPendingCheckpointNumber,
|
|
684
|
+
] = await this.rollup.status(BigInt(localPendingCheckpointNumber), { blockNumber: currentL1BlockNumber });
|
|
685
|
+
const provenCheckpointNumber = Number(rollupProvenCheckpointNumber);
|
|
686
|
+
const pendingCheckpointNumber = Number(rollupPendingCheckpointNumber);
|
|
673
687
|
const rollupStatus = {
|
|
674
|
-
|
|
688
|
+
provenCheckpointNumber,
|
|
675
689
|
provenArchive,
|
|
676
|
-
|
|
690
|
+
pendingCheckpointNumber,
|
|
677
691
|
pendingArchive,
|
|
678
692
|
validationResult: initialValidationResult,
|
|
679
693
|
};
|
|
680
694
|
this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
|
|
681
|
-
|
|
695
|
+
localPendingCheckpointNumber,
|
|
682
696
|
blocksSynchedTo,
|
|
683
697
|
currentL1BlockNumber,
|
|
684
|
-
|
|
698
|
+
archiveForLocalPendingCheckpointNumber,
|
|
685
699
|
...rollupStatus,
|
|
686
700
|
});
|
|
687
701
|
|
|
688
|
-
const
|
|
689
|
-
// Annoying edge case: if proven
|
|
690
|
-
// we need to set it to zero. This is an edge case because we dont have a
|
|
691
|
-
// so
|
|
692
|
-
if (
|
|
693
|
-
const
|
|
694
|
-
if (
|
|
695
|
-
await this.
|
|
696
|
-
this.log.info(`Rolled back proven chain to
|
|
702
|
+
const updateProvenCheckpoint = async () => {
|
|
703
|
+
// Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
|
|
704
|
+
// we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
|
|
705
|
+
// so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
|
|
706
|
+
if (provenCheckpointNumber === 0) {
|
|
707
|
+
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
708
|
+
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
709
|
+
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
710
|
+
this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
697
711
|
}
|
|
698
712
|
}
|
|
699
713
|
|
|
700
|
-
const
|
|
714
|
+
const localCheckpointForDestinationProvenCheckpointNumber = await this.getCheckpoint(provenCheckpointNumber);
|
|
701
715
|
|
|
702
|
-
// Sanity check. I've hit what seems to be a state where the proven
|
|
703
|
-
// synched
|
|
704
|
-
const synched = await this.
|
|
705
|
-
if (
|
|
716
|
+
// Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
|
|
717
|
+
// synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
|
|
718
|
+
const synched = await this.getSynchedCheckpointNumber();
|
|
719
|
+
if (
|
|
720
|
+
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
721
|
+
synched < localCheckpointForDestinationProvenCheckpointNumber.number
|
|
722
|
+
) {
|
|
706
723
|
this.log.error(
|
|
707
|
-
`Hit local
|
|
724
|
+
`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.number} > ${synched}`,
|
|
708
725
|
);
|
|
709
726
|
}
|
|
710
727
|
|
|
711
728
|
this.log.trace(
|
|
712
|
-
`Local
|
|
713
|
-
|
|
729
|
+
`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${
|
|
730
|
+
localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'
|
|
714
731
|
}`,
|
|
715
732
|
);
|
|
716
733
|
|
|
734
|
+
const lastProvenBlockNumber = await this.getLastBlockNumberInCheckpoint(provenCheckpointNumber);
|
|
717
735
|
if (
|
|
718
|
-
|
|
719
|
-
provenArchive ===
|
|
736
|
+
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
737
|
+
provenArchive === localCheckpointForDestinationProvenCheckpointNumber.archive.root.toString()
|
|
720
738
|
) {
|
|
721
|
-
const
|
|
722
|
-
if (
|
|
723
|
-
await this.
|
|
724
|
-
this.log.info(`Updated proven chain to
|
|
725
|
-
|
|
739
|
+
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
740
|
+
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
741
|
+
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
742
|
+
this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, {
|
|
743
|
+
provenCheckpointNumber,
|
|
726
744
|
});
|
|
727
|
-
const provenSlotNumber =
|
|
728
|
-
|
|
729
|
-
|
|
745
|
+
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber.toBigInt();
|
|
746
|
+
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
|
|
747
|
+
|
|
730
748
|
this.emit(L2BlockSourceEvents.L2BlockProven, {
|
|
731
749
|
type: L2BlockSourceEvents.L2BlockProven,
|
|
732
|
-
blockNumber:
|
|
750
|
+
blockNumber: BigInt(lastProvenBlockNumber),
|
|
733
751
|
slotNumber: provenSlotNumber,
|
|
734
752
|
epochNumber: provenEpochNumber,
|
|
735
753
|
});
|
|
736
754
|
} else {
|
|
737
|
-
this.log.trace(`Proven
|
|
755
|
+
this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
|
|
738
756
|
}
|
|
739
757
|
}
|
|
740
|
-
this.instrumentation.updateLastProvenBlock(
|
|
758
|
+
this.instrumentation.updateLastProvenBlock(lastProvenBlockNumber);
|
|
741
759
|
};
|
|
742
760
|
|
|
743
|
-
// This is an edge case that we only hit if there are no proposed
|
|
744
|
-
// If we have 0
|
|
745
|
-
const
|
|
746
|
-
if (
|
|
761
|
+
// This is an edge case that we only hit if there are no proposed checkpoints.
|
|
762
|
+
// If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
|
|
763
|
+
const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
|
|
764
|
+
if (noCheckpoints) {
|
|
747
765
|
await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
|
|
748
766
|
this.log.debug(
|
|
749
|
-
`No
|
|
767
|
+
`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`,
|
|
750
768
|
);
|
|
751
769
|
return rollupStatus;
|
|
752
770
|
}
|
|
753
771
|
|
|
754
|
-
await
|
|
772
|
+
await updateProvenCheckpoint();
|
|
755
773
|
|
|
756
774
|
// Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
|
|
757
|
-
// are any state that could be impacted by it. If we have no
|
|
758
|
-
if (
|
|
759
|
-
const
|
|
760
|
-
if (
|
|
761
|
-
throw new Error(`Missing
|
|
775
|
+
// are any state that could be impacted by it. If we have no checkpoints, there is no impact.
|
|
776
|
+
if (localPendingCheckpointNumber > 0) {
|
|
777
|
+
const localPendingCheckpoint = await this.getCheckpoint(localPendingCheckpointNumber);
|
|
778
|
+
if (localPendingCheckpoint === undefined) {
|
|
779
|
+
throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
|
|
762
780
|
}
|
|
763
781
|
|
|
764
|
-
const localPendingArchiveRoot =
|
|
765
|
-
const
|
|
766
|
-
if (
|
|
782
|
+
const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
|
|
783
|
+
const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive === localPendingArchiveRoot;
|
|
784
|
+
if (noCheckpointSinceLast) {
|
|
767
785
|
// We believe the following line causes a problem when we encounter L1 re-orgs.
|
|
768
786
|
// Basically, by setting the synched L1 block number here, we are saying that we have
|
|
769
|
-
// processed all
|
|
787
|
+
// processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
|
|
770
788
|
// this block again (or any blocks before).
|
|
771
|
-
// However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing
|
|
789
|
+
// However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
|
|
772
790
|
// We must only set this block number based on actually retrieved logs.
|
|
773
791
|
// TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
|
|
774
792
|
// await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
|
|
775
|
-
this.log.debug(`No
|
|
793
|
+
this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
|
|
776
794
|
return rollupStatus;
|
|
777
795
|
}
|
|
778
796
|
|
|
779
|
-
const
|
|
780
|
-
if (!
|
|
781
|
-
// If our local pending
|
|
797
|
+
const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber === localPendingArchiveRoot;
|
|
798
|
+
if (!localPendingCheckpointInChain) {
|
|
799
|
+
// If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
|
|
782
800
|
// or the L1 have reorged.
|
|
783
801
|
// In any case, we have to figure out how far into the past the action will take us.
|
|
784
|
-
// For simplicity here, we will simply rewind until we end in a
|
|
802
|
+
// For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
|
|
785
803
|
this.log.debug(
|
|
786
|
-
`L2 prune has been detected due to local pending
|
|
787
|
-
{
|
|
804
|
+
`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`,
|
|
805
|
+
{ localPendingCheckpointNumber, localPendingArchiveRoot, archiveForLocalPendingCheckpointNumber },
|
|
788
806
|
);
|
|
789
807
|
|
|
790
|
-
let tipAfterUnwind =
|
|
808
|
+
let tipAfterUnwind = localPendingCheckpointNumber;
|
|
791
809
|
while (true) {
|
|
792
|
-
const
|
|
793
|
-
if (
|
|
810
|
+
const candidateCheckpoint = await this.getCheckpoint(tipAfterUnwind);
|
|
811
|
+
if (candidateCheckpoint === undefined) {
|
|
794
812
|
break;
|
|
795
813
|
}
|
|
796
814
|
|
|
797
|
-
const archiveAtContract = await this.rollup.archiveAt(BigInt(
|
|
798
|
-
this.log.trace(
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
815
|
+
const archiveAtContract = await this.rollup.archiveAt(BigInt(candidateCheckpoint.number));
|
|
816
|
+
this.log.trace(
|
|
817
|
+
`Checking local checkpoint ${candidateCheckpoint.number} with archive ${candidateCheckpoint.archive.root}`,
|
|
818
|
+
{
|
|
819
|
+
archiveAtContract,
|
|
820
|
+
archiveLocal: candidateCheckpoint.archive.root.toString(),
|
|
821
|
+
},
|
|
822
|
+
);
|
|
823
|
+
if (archiveAtContract === candidateCheckpoint.archive.root.toString()) {
|
|
803
824
|
break;
|
|
804
825
|
}
|
|
805
826
|
tipAfterUnwind--;
|
|
806
827
|
}
|
|
807
828
|
|
|
808
|
-
const
|
|
809
|
-
await this.
|
|
829
|
+
const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
|
|
830
|
+
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
810
831
|
|
|
811
832
|
this.log.warn(
|
|
812
|
-
`Unwound ${count(
|
|
813
|
-
`due to mismatched
|
|
814
|
-
`Updated L2 latest
|
|
833
|
+
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
834
|
+
`due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` +
|
|
835
|
+
`Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
815
836
|
);
|
|
816
837
|
}
|
|
817
838
|
}
|
|
818
839
|
|
|
819
|
-
// Retrieve
|
|
840
|
+
// Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
|
|
820
841
|
// computed using the L2 block time vs the L1 block time.
|
|
821
842
|
let searchStartBlock: bigint = blocksSynchedTo;
|
|
822
843
|
let searchEndBlock: bigint = blocksSynchedTo;
|
|
823
|
-
let
|
|
824
|
-
let
|
|
844
|
+
let lastRetrievedCheckpoint: PublishedCheckpoint | undefined;
|
|
845
|
+
let lastL1BlockWithCheckpoint: bigint | undefined = undefined;
|
|
825
846
|
|
|
826
847
|
do {
|
|
827
848
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
828
849
|
|
|
829
|
-
this.log.trace(`Retrieving
|
|
850
|
+
this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
830
851
|
|
|
831
852
|
// TODO(md): Retrieve from blob sink then from consensus client, then from peers
|
|
832
|
-
const
|
|
853
|
+
const retrievedCheckpoints = await retrieveCheckpointsFromRollup(
|
|
833
854
|
this.rollup.getContract() as GetContractReturnType<typeof RollupAbi, ViemPublicClient>,
|
|
834
855
|
this.publicClient,
|
|
835
856
|
this.blobSinkClient,
|
|
@@ -838,31 +859,35 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
838
859
|
this.log,
|
|
839
860
|
);
|
|
840
861
|
|
|
841
|
-
if (
|
|
862
|
+
if (retrievedCheckpoints.length === 0) {
|
|
842
863
|
// We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
|
|
843
864
|
// See further details in earlier comments.
|
|
844
|
-
this.log.trace(`Retrieved no new
|
|
865
|
+
this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
845
866
|
continue;
|
|
846
867
|
}
|
|
847
868
|
|
|
848
869
|
this.log.debug(
|
|
849
|
-
`Retrieved ${
|
|
850
|
-
{
|
|
870
|
+
`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`,
|
|
871
|
+
{
|
|
872
|
+
lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
|
|
873
|
+
searchStartBlock,
|
|
874
|
+
searchEndBlock,
|
|
875
|
+
},
|
|
851
876
|
);
|
|
852
877
|
|
|
853
|
-
const
|
|
854
|
-
const
|
|
878
|
+
const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map(b => retrievedToPublishedCheckpoint(b)));
|
|
879
|
+
const validCheckpoints: PublishedCheckpoint[] = [];
|
|
855
880
|
|
|
856
|
-
for (const
|
|
881
|
+
for (const published of publishedCheckpoints) {
|
|
857
882
|
const validationResult = this.config.skipValidateBlockAttestations
|
|
858
883
|
? { valid: true as const }
|
|
859
|
-
: await
|
|
884
|
+
: await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
|
|
860
885
|
|
|
861
|
-
// Only update the validation result if it has changed, so we can keep track of the first invalid
|
|
862
|
-
// in case there is a sequence of more than one invalid
|
|
863
|
-
// There is an exception though: if
|
|
886
|
+
// Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
|
|
887
|
+
// in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
|
|
888
|
+
// There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
|
|
864
889
|
// we need to update the validation result, since we need to be able to invalidate the new one.
|
|
865
|
-
// See test 'chain progresses if an invalid
|
|
890
|
+
// See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
|
|
866
891
|
if (
|
|
867
892
|
rollupStatus.validationResult?.valid !== validationResult.valid ||
|
|
868
893
|
(!rollupStatus.validationResult.valid &&
|
|
@@ -873,9 +898,9 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
873
898
|
}
|
|
874
899
|
|
|
875
900
|
if (!validationResult.valid) {
|
|
876
|
-
this.log.warn(`Skipping
|
|
877
|
-
|
|
878
|
-
l1BlockNumber:
|
|
901
|
+
this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
|
|
902
|
+
checkpointHash: published.checkpoint.hash(),
|
|
903
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
879
904
|
...pick(validationResult, 'reason'),
|
|
880
905
|
});
|
|
881
906
|
|
|
@@ -885,28 +910,31 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
885
910
|
validationResult,
|
|
886
911
|
});
|
|
887
912
|
|
|
888
|
-
// We keep consuming
|
|
889
|
-
// We just pretend the invalid ones are not there and keep consuming the next
|
|
890
|
-
// Note that this breaks if the committee ever attests to a descendant of an invalid
|
|
913
|
+
// We keep consuming checkpoints if we find an invalid one, since we do not listen for CheckpointInvalidated events
|
|
914
|
+
// We just pretend the invalid ones are not there and keep consuming the next checkpoints
|
|
915
|
+
// Note that this breaks if the committee ever attests to a descendant of an invalid checkpoint
|
|
891
916
|
continue;
|
|
892
917
|
}
|
|
893
918
|
|
|
894
|
-
|
|
895
|
-
this.log.debug(
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
919
|
+
validCheckpoints.push(published);
|
|
920
|
+
this.log.debug(
|
|
921
|
+
`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`,
|
|
922
|
+
{
|
|
923
|
+
checkpointHash: published.checkpoint.hash(),
|
|
924
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
925
|
+
...published.checkpoint.header.toInspect(),
|
|
926
|
+
blocks: published.checkpoint.blocks.map(b => b.getStats()),
|
|
927
|
+
},
|
|
928
|
+
);
|
|
901
929
|
}
|
|
902
930
|
|
|
903
931
|
try {
|
|
904
932
|
const updatedValidationResult =
|
|
905
933
|
rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
906
|
-
const [processDuration] = await elapsed(() => this.
|
|
934
|
+
const [processDuration] = await elapsed(() => this.addCheckpoints(validCheckpoints, updatedValidationResult));
|
|
907
935
|
this.instrumentation.processNewBlocks(
|
|
908
|
-
processDuration /
|
|
909
|
-
|
|
936
|
+
processDuration / validCheckpoints.length,
|
|
937
|
+
validCheckpoints.flatMap(c => c.checkpoint.blocks),
|
|
910
938
|
);
|
|
911
939
|
} catch (err) {
|
|
912
940
|
if (err instanceof InitialBlockNumberNotSequentialError) {
|
|
@@ -929,57 +957,56 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
929
957
|
throw err;
|
|
930
958
|
}
|
|
931
959
|
|
|
932
|
-
for (const
|
|
933
|
-
this.log.info(`Downloaded
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
960
|
+
for (const checkpoint of validCheckpoints) {
|
|
961
|
+
this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
|
|
962
|
+
checkpointHash: checkpoint.checkpoint.hash(),
|
|
963
|
+
checkpointNumber: checkpoint.checkpoint.number,
|
|
964
|
+
blockCount: checkpoint.checkpoint.blocks.length,
|
|
965
|
+
txCount: checkpoint.checkpoint.blocks.reduce((acc, b) => acc + b.body.txEffects.length, 0),
|
|
966
|
+
header: checkpoint.checkpoint.header.toInspect(),
|
|
967
|
+
archiveRoot: checkpoint.checkpoint.archive.root.toString(),
|
|
968
|
+
archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex,
|
|
940
969
|
});
|
|
941
970
|
}
|
|
942
|
-
|
|
943
|
-
|
|
971
|
+
lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
|
|
972
|
+
lastL1BlockWithCheckpoint = publishedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
|
|
944
973
|
} while (searchEndBlock < currentL1BlockNumber);
|
|
945
974
|
|
|
946
975
|
// Important that we update AFTER inserting the blocks.
|
|
947
|
-
await
|
|
976
|
+
await updateProvenCheckpoint();
|
|
948
977
|
|
|
949
|
-
return { ...rollupStatus,
|
|
978
|
+
return { ...rollupStatus, lastRetrievedCheckpoint, lastL1BlockWithCheckpoint };
|
|
950
979
|
}
|
|
951
980
|
|
|
952
|
-
private async
|
|
953
|
-
status:
|
|
954
|
-
lastRetrievedBlock?: PublishedL2Block;
|
|
955
|
-
pendingBlockNumber: number;
|
|
956
|
-
},
|
|
981
|
+
private async checkForNewCheckpointsBeforeL1SyncPoint(
|
|
982
|
+
status: RollupStatus,
|
|
957
983
|
blocksSynchedTo: bigint,
|
|
958
984
|
currentL1BlockNumber: bigint,
|
|
959
985
|
) {
|
|
960
|
-
const {
|
|
961
|
-
// Compare the last
|
|
986
|
+
const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
|
|
987
|
+
// Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
|
|
962
988
|
// rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
|
|
963
|
-
const
|
|
964
|
-
|
|
989
|
+
const latestLocalCheckpointNumber =
|
|
990
|
+
lastRetrievedCheckpoint?.checkpoint.number ?? (await this.getSynchedCheckpointNumber());
|
|
991
|
+
if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
|
|
965
992
|
// Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
|
|
966
|
-
// but still
|
|
967
|
-
// We suspect an L1 reorg that added
|
|
968
|
-
// last
|
|
969
|
-
// don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
970
|
-
const
|
|
971
|
-
|
|
972
|
-
(
|
|
973
|
-
? await this.
|
|
993
|
+
// but still haven't reached the pending checkpoint according to the call to the rollup contract.
|
|
994
|
+
// We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
|
|
995
|
+
// the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
|
|
996
|
+
// we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
997
|
+
const latestLocalCheckpoint =
|
|
998
|
+
lastRetrievedCheckpoint ??
|
|
999
|
+
(latestLocalCheckpointNumber > 0
|
|
1000
|
+
? await this.getPublishedCheckpoints(latestLocalCheckpointNumber, 1).then(([c]) => c)
|
|
974
1001
|
: undefined);
|
|
975
|
-
const targetL1BlockNumber =
|
|
976
|
-
const
|
|
1002
|
+
const targetL1BlockNumber = latestLocalCheckpoint?.l1.blockNumber ?? maxBigint(currentL1BlockNumber - 64n, 0n);
|
|
1003
|
+
const latestLocalCheckpointArchive = latestLocalCheckpoint?.checkpoint.archive.root.toString();
|
|
977
1004
|
this.log.warn(
|
|
978
|
-
`Failed to reach
|
|
1005
|
+
`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` +
|
|
979
1006
|
`Rolling back last synched L1 block number to ${targetL1BlockNumber}.`,
|
|
980
1007
|
{
|
|
981
|
-
|
|
982
|
-
|
|
1008
|
+
latestLocalCheckpointNumber,
|
|
1009
|
+
latestLocalCheckpointArchive,
|
|
983
1010
|
blocksSynchedTo,
|
|
984
1011
|
currentL1BlockNumber,
|
|
985
1012
|
...status,
|
|
@@ -987,9 +1014,9 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
987
1014
|
);
|
|
988
1015
|
await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
|
|
989
1016
|
} else {
|
|
990
|
-
this.log.trace(`No new
|
|
991
|
-
|
|
992
|
-
|
|
1017
|
+
this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
|
|
1018
|
+
latestLocalCheckpointNumber,
|
|
1019
|
+
pendingCheckpointNumber,
|
|
993
1020
|
});
|
|
994
1021
|
}
|
|
995
1022
|
}
|
|
@@ -1049,13 +1076,13 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1049
1076
|
);
|
|
1050
1077
|
}
|
|
1051
1078
|
|
|
1052
|
-
public getL2EpochNumber(): Promise<
|
|
1079
|
+
public getL2EpochNumber(): Promise<EpochNumber | undefined> {
|
|
1053
1080
|
return Promise.resolve(
|
|
1054
1081
|
this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants),
|
|
1055
1082
|
);
|
|
1056
1083
|
}
|
|
1057
1084
|
|
|
1058
|
-
public async getBlocksForEpoch(epochNumber:
|
|
1085
|
+
public async getBlocksForEpoch(epochNumber: EpochNumber): Promise<L2Block[]> {
|
|
1059
1086
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1060
1087
|
const blocks: L2Block[] = [];
|
|
1061
1088
|
|
|
@@ -1073,7 +1100,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1073
1100
|
return blocks.reverse();
|
|
1074
1101
|
}
|
|
1075
1102
|
|
|
1076
|
-
public async getBlockHeadersForEpoch(epochNumber:
|
|
1103
|
+
public async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise<BlockHeader[]> {
|
|
1077
1104
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1078
1105
|
const blocks: BlockHeader[] = [];
|
|
1079
1106
|
|
|
@@ -1091,7 +1118,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1091
1118
|
return blocks.reverse();
|
|
1092
1119
|
}
|
|
1093
1120
|
|
|
1094
|
-
public async isEpochComplete(epochNumber:
|
|
1121
|
+
public async isEpochComplete(epochNumber: EpochNumber): Promise<boolean> {
|
|
1095
1122
|
// The epoch is complete if the current L2 block is the last one in the epoch (or later)
|
|
1096
1123
|
const header = await this.getBlockHeader('latest');
|
|
1097
1124
|
const slot = header?.globalVariables.slotNumber.toBigInt();
|
|
@@ -1124,6 +1151,77 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1124
1151
|
return this.initialSyncComplete;
|
|
1125
1152
|
}
|
|
1126
1153
|
|
|
1154
|
+
public async getPublishedCheckpoints(from: number, limit: number, proven?: boolean): Promise<PublishedCheckpoint[]> {
|
|
1155
|
+
const blocks = await this.getPublishedBlocks(from, limit, proven);
|
|
1156
|
+
return blocks.map(b => b.toPublishedCheckpoint());
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
public async getCheckpoints(from: number, limit: number, proven?: boolean): Promise<Checkpoint[]> {
|
|
1160
|
+
const published = await this.getPublishedCheckpoints(from, limit, proven);
|
|
1161
|
+
return published.map(p => p.checkpoint);
|
|
1162
|
+
}
|
|
1163
|
+
|
|
1164
|
+
public async getCheckpoint(number: number): Promise<Checkpoint | undefined> {
|
|
1165
|
+
if (number < 0) {
|
|
1166
|
+
number = await this.getSynchedCheckpointNumber();
|
|
1167
|
+
}
|
|
1168
|
+
if (number === 0) {
|
|
1169
|
+
return undefined;
|
|
1170
|
+
}
|
|
1171
|
+
const published = await this.getPublishedCheckpoints(number, 1);
|
|
1172
|
+
return published[0]?.checkpoint;
|
|
1173
|
+
}
|
|
1174
|
+
|
|
1175
|
+
public async getCheckpointHeader(number: number | 'latest'): Promise<CheckpointHeader | undefined> {
|
|
1176
|
+
if (number === 'latest') {
|
|
1177
|
+
number = await this.getSynchedCheckpointNumber();
|
|
1178
|
+
}
|
|
1179
|
+
if (number === 0) {
|
|
1180
|
+
return undefined;
|
|
1181
|
+
}
|
|
1182
|
+
const checkpoint = await this.getCheckpoint(number);
|
|
1183
|
+
return checkpoint?.header;
|
|
1184
|
+
}
|
|
1185
|
+
|
|
1186
|
+
public getCheckpointNumber(): Promise<number> {
|
|
1187
|
+
return this.getSynchedCheckpointNumber();
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
public getSynchedCheckpointNumber(): Promise<number> {
|
|
1191
|
+
// TODO: Checkpoint number will no longer be the same as the block number once we support multiple blocks per checkpoint.
|
|
1192
|
+
return this.store.getSynchedL2BlockNumber();
|
|
1193
|
+
}
|
|
1194
|
+
|
|
1195
|
+
public getProvenCheckpointNumber(): Promise<number> {
|
|
1196
|
+
// TODO: Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
|
|
1197
|
+
return this.store.getProvenL2BlockNumber();
|
|
1198
|
+
}
|
|
1199
|
+
|
|
1200
|
+
public setProvenCheckpointNumber(checkpointNumber: number): Promise<void> {
|
|
1201
|
+
// TODO: Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
|
|
1202
|
+
return this.store.setProvenL2BlockNumber(checkpointNumber);
|
|
1203
|
+
}
|
|
1204
|
+
|
|
1205
|
+
public unwindCheckpoints(from: number, checkpointsToUnwind: number): Promise<boolean> {
|
|
1206
|
+
// TODO: This only works if we have one block per checkpoint.
|
|
1207
|
+
return this.store.unwindBlocks(from, checkpointsToUnwind);
|
|
1208
|
+
}
|
|
1209
|
+
|
|
1210
|
+
public getLastBlockNumberInCheckpoint(checkpointNumber: number): Promise<number> {
|
|
1211
|
+
// TODO: Checkpoint number will no longer be the same as the block number once we support multiple blocks per checkpoint.
|
|
1212
|
+
return Promise.resolve(checkpointNumber);
|
|
1213
|
+
}
|
|
1214
|
+
|
|
1215
|
+
public addCheckpoints(
|
|
1216
|
+
checkpoints: PublishedCheckpoint[],
|
|
1217
|
+
pendingChainValidationStatus?: ValidateBlockResult,
|
|
1218
|
+
): Promise<boolean> {
|
|
1219
|
+
return this.store.addBlocks(
|
|
1220
|
+
checkpoints.map(p => PublishedL2Block.fromPublishedCheckpoint(p)),
|
|
1221
|
+
pendingChainValidationStatus,
|
|
1222
|
+
);
|
|
1223
|
+
}
|
|
1224
|
+
|
|
1127
1225
|
/**
|
|
1128
1226
|
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
1129
1227
|
* @param from - Number of the first block to return (inclusive).
|