@aztec/archiver 4.0.0-nightly.20260114 → 4.0.0-nightly.20260115
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/archiver.d.ts +134 -0
- package/dest/archiver.d.ts.map +1 -0
- package/dest/archiver.js +767 -0
- package/dest/{archiver/config.d.ts → config.d.ts} +9 -1
- package/dest/config.d.ts.map +1 -0
- package/dest/{archiver/config.js → config.js} +9 -0
- package/dest/{archiver/errors.d.ts → errors.d.ts} +1 -1
- package/dest/errors.d.ts.map +1 -0
- package/dest/factory.d.ts +5 -6
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +82 -5
- package/dest/index.d.ts +10 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +8 -3
- package/dest/interfaces.d.ts +9 -0
- package/dest/interfaces.d.ts.map +1 -0
- package/dest/interfaces.js +3 -0
- package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.d.ts +1 -1
- package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/calldata_retriever.d.ts +2 -2
- package/dest/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/data_retrieval.d.ts +2 -2
- package/dest/l1/data_retrieval.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/debug_tx.d.ts +1 -1
- package/dest/l1/debug_tx.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/spire_proposer.d.ts +1 -1
- package/dest/l1/spire_proposer.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/trace_tx.d.ts +1 -1
- package/dest/l1/trace_tx.d.ts.map +1 -0
- package/dest/l1/types.d.ts +12 -0
- package/dest/l1/types.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/validate_trace.d.ts +1 -1
- package/dest/l1/validate_trace.d.ts.map +1 -0
- package/dest/modules/data_source_base.d.ts +83 -0
- package/dest/modules/data_source_base.d.ts.map +1 -0
- package/dest/{archiver/archive_source_base.js → modules/data_source_base.js} +109 -10
- package/dest/modules/data_store_updater.d.ts +46 -0
- package/dest/modules/data_store_updater.d.ts.map +1 -0
- package/dest/modules/data_store_updater.js +216 -0
- package/dest/modules/instrumentation.d.ts +37 -0
- package/dest/modules/instrumentation.d.ts.map +1 -0
- package/dest/modules/l1_synchronizer.d.ts +67 -0
- package/dest/modules/l1_synchronizer.d.ts.map +1 -0
- package/dest/{archiver/archiver.js → modules/l1_synchronizer.js} +60 -543
- package/dest/{archiver → modules}/validation.d.ts +1 -1
- package/dest/modules/validation.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/block_store.d.ts +1 -1
- package/dest/store/block_store.d.ts.map +1 -0
- package/dest/store/contract_class_store.d.ts +18 -0
- package/dest/store/contract_class_store.d.ts.map +1 -0
- package/dest/store/contract_instance_store.d.ts +24 -0
- package/dest/store/contract_instance_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/kv_archiver_store.d.ts +2 -2
- package/dest/store/kv_archiver_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/log_store.d.ts +1 -1
- package/dest/store/log_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/message_store.d.ts +1 -1
- package/dest/store/message_store.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
- package/dest/structs/data_retrieval.d.ts.map +1 -0
- package/dest/structs/inbox_message.d.ts +15 -0
- package/dest/structs/inbox_message.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/published.d.ts +1 -1
- package/dest/structs/published.d.ts.map +1 -0
- package/dest/{archiver/test → test}/fake_l1_state.d.ts +1 -1
- package/dest/test/fake_l1_state.d.ts.map +1 -0
- package/dest/test/index.d.ts +2 -1
- package/dest/test/index.d.ts.map +1 -1
- package/dest/test/index.js +1 -0
- package/dest/test/mock_structs.d.ts +76 -2
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +133 -2
- package/package.json +15 -17
- package/src/archiver.ts +522 -0
- package/src/{archiver/config.ts → config.ts} +11 -0
- package/src/factory.ts +118 -6
- package/src/index.ts +10 -3
- package/src/interfaces.ts +9 -0
- package/src/{archiver/l1 → l1}/calldata_retriever.ts +1 -1
- package/src/{archiver/l1 → l1}/data_retrieval.ts +1 -1
- package/src/{archiver/archive_source_base.ts → modules/data_source_base.ts} +130 -30
- package/src/modules/data_store_updater.ts +318 -0
- package/src/{archiver/archiver.ts → modules/l1_synchronizer.ts} +68 -717
- package/src/test/index.ts +1 -0
- package/src/test/mock_structs.ts +247 -2
- package/dest/archiver/archive_source_base.d.ts +0 -75
- package/dest/archiver/archive_source_base.d.ts.map +0 -1
- package/dest/archiver/archiver.d.ts +0 -168
- package/dest/archiver/archiver.d.ts.map +0 -1
- package/dest/archiver/archiver_store_updates.d.ts +0 -38
- package/dest/archiver/archiver_store_updates.d.ts.map +0 -1
- package/dest/archiver/archiver_store_updates.js +0 -212
- package/dest/archiver/config.d.ts.map +0 -1
- package/dest/archiver/errors.d.ts.map +0 -1
- package/dest/archiver/index.d.ts +0 -8
- package/dest/archiver/index.d.ts.map +0 -1
- package/dest/archiver/index.js +0 -6
- package/dest/archiver/instrumentation.d.ts +0 -37
- package/dest/archiver/instrumentation.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +0 -1
- package/dest/archiver/l1/calldata_retriever.d.ts.map +0 -1
- package/dest/archiver/l1/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/l1/debug_tx.d.ts.map +0 -1
- package/dest/archiver/l1/spire_proposer.d.ts.map +0 -1
- package/dest/archiver/l1/trace_tx.d.ts.map +0 -1
- package/dest/archiver/l1/types.d.ts +0 -12
- package/dest/archiver/l1/types.d.ts.map +0 -1
- package/dest/archiver/l1/validate_trace.d.ts.map +0 -1
- package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/structs/inbox_message.d.ts +0 -15
- package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
- package/dest/archiver/structs/published.d.ts.map +0 -1
- package/dest/archiver/test/fake_l1_state.d.ts.map +0 -1
- package/dest/archiver/validation.d.ts.map +0 -1
- package/dest/rpc/index.d.ts +0 -9
- package/dest/rpc/index.d.ts.map +0 -1
- package/dest/rpc/index.js +0 -15
- package/src/archiver/archiver_store_updates.ts +0 -321
- package/src/archiver/index.ts +0 -7
- package/src/rpc/index.ts +0 -16
- /package/dest/{archiver/errors.js → errors.js} +0 -0
- /package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.js +0 -0
- /package/dest/{archiver/l1 → l1}/calldata_retriever.js +0 -0
- /package/dest/{archiver/l1 → l1}/data_retrieval.js +0 -0
- /package/dest/{archiver/l1 → l1}/debug_tx.js +0 -0
- /package/dest/{archiver/l1 → l1}/spire_proposer.js +0 -0
- /package/dest/{archiver/l1 → l1}/trace_tx.js +0 -0
- /package/dest/{archiver/l1 → l1}/types.js +0 -0
- /package/dest/{archiver/l1 → l1}/validate_trace.js +0 -0
- /package/dest/{archiver → modules}/instrumentation.js +0 -0
- /package/dest/{archiver → modules}/validation.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/block_store.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/contract_class_store.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/kv_archiver_store.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/log_store.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/message_store.js +0 -0
- /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
- /package/dest/{archiver/structs → structs}/inbox_message.js +0 -0
- /package/dest/{archiver/structs → structs}/published.js +0 -0
- /package/dest/{archiver/test → test}/fake_l1_state.js +0 -0
- /package/src/{archiver/errors.ts → errors.ts} +0 -0
- /package/src/{archiver/l1 → l1}/README.md +0 -0
- /package/src/{archiver/l1 → l1}/bin/retrieve-calldata.ts +0 -0
- /package/src/{archiver/l1 → l1}/debug_tx.ts +0 -0
- /package/src/{archiver/l1 → l1}/spire_proposer.ts +0 -0
- /package/src/{archiver/l1 → l1}/trace_tx.ts +0 -0
- /package/src/{archiver/l1 → l1}/types.ts +0 -0
- /package/src/{archiver/l1 → l1}/validate_trace.ts +0 -0
- /package/src/{archiver → modules}/instrumentation.ts +0 -0
- /package/src/{archiver → modules}/validation.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/block_store.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/contract_class_store.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/kv_archiver_store.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/log_store.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/message_store.ts +0 -0
- /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
- /package/src/{archiver/structs → structs}/inbox_message.ts +0 -0
- /package/src/{archiver/structs → structs}/published.ts +0 -0
- /package/src/{archiver/test → test}/fake_l1_state.ts +0 -0
|
@@ -1,159 +1,59 @@
|
|
|
1
1
|
import type { BlobClientInterface } from '@aztec/blob-client/client';
|
|
2
|
-
import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
3
2
|
import { EpochCache } from '@aztec/epoch-cache';
|
|
4
|
-
import {
|
|
5
|
-
import { BlockTagTooOldError, InboxContract, RollupContract } from '@aztec/ethereum/contracts';
|
|
3
|
+
import { InboxContract, RollupContract } from '@aztec/ethereum/contracts';
|
|
6
4
|
import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
|
|
7
5
|
import type { L1BlockId } from '@aztec/ethereum/l1-types';
|
|
8
6
|
import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types';
|
|
9
7
|
import { maxBigint } from '@aztec/foundation/bigint';
|
|
10
|
-
import { BlockNumber, CheckpointNumber, EpochNumber
|
|
11
|
-
import {
|
|
12
|
-
import {
|
|
8
|
+
import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
9
|
+
import { Buffer32 } from '@aztec/foundation/buffer';
|
|
10
|
+
import { pick } from '@aztec/foundation/collection';
|
|
13
11
|
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
14
12
|
import { EthAddress } from '@aztec/foundation/eth-address';
|
|
15
13
|
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
16
|
-
import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
17
|
-
import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
|
|
18
14
|
import { count } from '@aztec/foundation/string';
|
|
19
15
|
import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
|
|
20
16
|
import { isDefined } from '@aztec/foundation/types';
|
|
21
|
-
import {
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
GENESIS_CHECKPOINT_HEADER_HASH,
|
|
25
|
-
L2Block,
|
|
26
|
-
L2BlockNew,
|
|
27
|
-
type L2BlockSink,
|
|
28
|
-
type L2BlockSource,
|
|
29
|
-
L2BlockSourceEvents,
|
|
30
|
-
type L2Tips,
|
|
31
|
-
} from '@aztec/stdlib/block';
|
|
32
|
-
import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
33
|
-
import type { ContractDataSource } from '@aztec/stdlib/contract';
|
|
34
|
-
import {
|
|
35
|
-
type L1RollupConstants,
|
|
36
|
-
getEpochAtSlot,
|
|
37
|
-
getEpochNumberAtTimestamp,
|
|
38
|
-
getSlotAtTimestamp,
|
|
39
|
-
getSlotRangeForEpoch,
|
|
40
|
-
getTimestampRangeForEpoch,
|
|
41
|
-
} from '@aztec/stdlib/epoch-helpers';
|
|
42
|
-
import type { L2LogsSource } from '@aztec/stdlib/interfaces/server';
|
|
43
|
-
import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging';
|
|
17
|
+
import { type ArchiverEmitter, L2BlockSourceEvents, type ValidateCheckpointResult } from '@aztec/stdlib/block';
|
|
18
|
+
import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
19
|
+
import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
|
|
44
20
|
import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
45
|
-
import type
|
|
46
|
-
|
|
47
|
-
import {
|
|
48
|
-
type TelemetryClient,
|
|
49
|
-
type Traceable,
|
|
50
|
-
type Tracer,
|
|
51
|
-
execInSpan,
|
|
52
|
-
getTelemetryClient,
|
|
53
|
-
trackSpan,
|
|
54
|
-
} from '@aztec/telemetry-client';
|
|
55
|
-
|
|
56
|
-
import { EventEmitter } from 'events';
|
|
57
|
-
import { type Hex, createPublicClient, fallback, http } from 'viem';
|
|
58
|
-
|
|
59
|
-
import { ArchiveSourceBase } from './archive_source_base.js';
|
|
60
|
-
import {
|
|
61
|
-
addBlocksWithContractData,
|
|
62
|
-
addCheckpointsWithContractData,
|
|
63
|
-
unwindCheckpointsWithContractData,
|
|
64
|
-
} from './archiver_store_updates.js';
|
|
65
|
-
import type { ArchiverConfig } from './config.js';
|
|
66
|
-
import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
|
|
67
|
-
import { ArchiverInstrumentation } from './instrumentation.js';
|
|
68
|
-
import type { CheckpointData } from './kv_archiver_store/block_store.js';
|
|
69
|
-
import type { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js';
|
|
21
|
+
import { type Traceable, type Tracer, execInSpan, trackSpan } from '@aztec/telemetry-client';
|
|
22
|
+
|
|
23
|
+
import { InitialCheckpointNumberNotSequentialError } from '../errors.js';
|
|
70
24
|
import {
|
|
71
25
|
retrieveCheckpointsFromRollup,
|
|
72
26
|
retrieveL1ToL2Message,
|
|
73
27
|
retrieveL1ToL2Messages,
|
|
74
28
|
retrievedToPublishedCheckpoint,
|
|
75
|
-
} from '
|
|
76
|
-
import {
|
|
77
|
-
import type { InboxMessage } from '
|
|
78
|
-
import {
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
* Helper interface to combine all sources this archiver implementation provides.
|
|
82
|
-
*/
|
|
83
|
-
export type ArchiveSource = L2BlockSource & L2LogsSource & ContractDataSource & L1ToL2MessageSource;
|
|
84
|
-
|
|
85
|
-
/** Request to add a block to the archiver, queued for processing by the sync loop. */
|
|
86
|
-
type AddBlockRequest = {
|
|
87
|
-
block: L2BlockNew;
|
|
88
|
-
resolve: () => void;
|
|
89
|
-
reject: (err: Error) => void;
|
|
90
|
-
};
|
|
91
|
-
|
|
92
|
-
export type ArchiverDeps = {
|
|
93
|
-
telemetry?: TelemetryClient;
|
|
94
|
-
blobClient: BlobClientInterface;
|
|
95
|
-
epochCache?: EpochCache;
|
|
96
|
-
dateProvider?: DateProvider;
|
|
97
|
-
};
|
|
98
|
-
|
|
99
|
-
function mapArchiverConfig(config: Partial<ArchiverConfig>) {
|
|
100
|
-
return {
|
|
101
|
-
pollingIntervalMs: config.archiverPollingIntervalMS,
|
|
102
|
-
batchSize: config.archiverBatchSize,
|
|
103
|
-
skipValidateCheckpointAttestations: config.skipValidateCheckpointAttestations,
|
|
104
|
-
maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds,
|
|
105
|
-
ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts,
|
|
106
|
-
};
|
|
107
|
-
}
|
|
29
|
+
} from '../l1/data_retrieval.js';
|
|
30
|
+
import type { KVArchiverDataStore } from '../store/kv_archiver_store.js';
|
|
31
|
+
import type { InboxMessage } from '../structs/inbox_message.js';
|
|
32
|
+
import { ArchiverDataStoreUpdater } from './data_store_updater.js';
|
|
33
|
+
import type { ArchiverInstrumentation } from './instrumentation.js';
|
|
34
|
+
import { validateCheckpointAttestations } from './validation.js';
|
|
108
35
|
|
|
109
36
|
type RollupStatus = {
|
|
110
37
|
provenCheckpointNumber: CheckpointNumber;
|
|
111
|
-
provenArchive:
|
|
38
|
+
provenArchive: string;
|
|
112
39
|
pendingCheckpointNumber: CheckpointNumber;
|
|
113
|
-
pendingArchive:
|
|
40
|
+
pendingArchive: string;
|
|
114
41
|
validationResult: ValidateCheckpointResult | undefined;
|
|
115
42
|
lastRetrievedCheckpoint?: PublishedCheckpoint;
|
|
116
43
|
lastL1BlockWithCheckpoint?: bigint;
|
|
117
44
|
};
|
|
118
45
|
|
|
119
46
|
/**
|
|
120
|
-
*
|
|
121
|
-
* Responsible for
|
|
122
|
-
* concern themselves with it.
|
|
47
|
+
* Handles L1 synchronization for the archiver.
|
|
48
|
+
* Responsible for fetching checkpoints, L1→L2 messages, and handling L1 reorgs.
|
|
123
49
|
*/
|
|
124
|
-
export class
|
|
125
|
-
/** Event emitter for archiver events (L2BlockProven, L2PruneDetected, etc). */
|
|
126
|
-
public readonly events: ArchiverEmitter = new EventEmitter() as ArchiverEmitter;
|
|
127
|
-
|
|
128
|
-
/** A loop in which we will be continually fetching new checkpoints. */
|
|
129
|
-
private runningPromise: RunningPromise;
|
|
130
|
-
|
|
50
|
+
export class ArchiverL1Synchronizer implements Traceable {
|
|
131
51
|
private l1BlockNumber: bigint | undefined;
|
|
132
52
|
private l1Timestamp: bigint | undefined;
|
|
133
|
-
private initialSyncComplete: boolean = false;
|
|
134
|
-
private initialSyncPromise: PromiseWithResolvers<void>;
|
|
135
|
-
|
|
136
|
-
/** Queue of blocks to be added to the store, processed by the sync loop. */
|
|
137
|
-
private blockQueue: AddBlockRequest[] = [];
|
|
138
53
|
|
|
54
|
+
private readonly updater: ArchiverDataStoreUpdater;
|
|
139
55
|
public readonly tracer: Tracer;
|
|
140
56
|
|
|
141
|
-
/**
|
|
142
|
-
* Creates a new instance of the Archiver.
|
|
143
|
-
* @param publicClient - A client for interacting with the Ethereum node.
|
|
144
|
-
* @param debugClient - A client for interacting with the Ethereum node for debug/trace methods.
|
|
145
|
-
* @param rollup - Rollup contract instance.
|
|
146
|
-
* @param inbox - Inbox contract instance.
|
|
147
|
-
* @param l1Addresses - L1 contract addresses (registry, governance proposer, slash factory, slashing proposer).
|
|
148
|
-
* @param dataStore - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data.
|
|
149
|
-
* @param config - Archiver configuration options.
|
|
150
|
-
* @param blobClient - Client for retrieving blob data.
|
|
151
|
-
* @param epochCache - Cache for epoch-related data.
|
|
152
|
-
* @param dateProvider - Provider for current date/time.
|
|
153
|
-
* @param instrumentation - Instrumentation for metrics and tracing.
|
|
154
|
-
* @param l1constants - L1 rollup constants.
|
|
155
|
-
* @param log - A logger.
|
|
156
|
-
*/
|
|
157
57
|
constructor(
|
|
158
58
|
private readonly publicClient: ViemPublicClient,
|
|
159
59
|
private readonly debugClient: ViemPublicDebugClient,
|
|
@@ -163,213 +63,37 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
163
63
|
L1ContractAddresses,
|
|
164
64
|
'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress'
|
|
165
65
|
> & { slashingProposerAddress: EthAddress },
|
|
166
|
-
readonly
|
|
167
|
-
private config: {
|
|
168
|
-
pollingIntervalMs: number;
|
|
66
|
+
private readonly store: KVArchiverDataStore,
|
|
67
|
+
private readonly config: {
|
|
169
68
|
batchSize: number;
|
|
170
69
|
skipValidateCheckpointAttestations?: boolean;
|
|
171
70
|
maxAllowedEthClientDriftSeconds: number;
|
|
172
|
-
ethereumAllowNoDebugHosts?: boolean;
|
|
173
71
|
},
|
|
174
72
|
private readonly blobClient: BlobClientInterface,
|
|
175
73
|
private readonly epochCache: EpochCache,
|
|
176
74
|
private readonly dateProvider: DateProvider,
|
|
177
75
|
private readonly instrumentation: ArchiverInstrumentation,
|
|
178
76
|
private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
|
|
179
|
-
private readonly
|
|
77
|
+
private readonly events: ArchiverEmitter,
|
|
78
|
+
tracer: Tracer,
|
|
79
|
+
private readonly log: Logger = createLogger('archiver:l1-sync'),
|
|
180
80
|
) {
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
this.tracer = instrumentation.tracer;
|
|
184
|
-
this.initialSyncPromise = promiseWithResolvers();
|
|
185
|
-
|
|
186
|
-
// Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
|
|
187
|
-
// are done as fast as possible. This then gets updated once the initial sync completes.
|
|
188
|
-
this.runningPromise = new RunningPromise(
|
|
189
|
-
() => this.sync(),
|
|
190
|
-
this.log,
|
|
191
|
-
this.config.pollingIntervalMs / 10,
|
|
192
|
-
makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError),
|
|
193
|
-
);
|
|
81
|
+
this.updater = new ArchiverDataStoreUpdater(this.store);
|
|
82
|
+
this.tracer = tracer;
|
|
194
83
|
}
|
|
195
84
|
|
|
196
|
-
/**
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
* @param archiverStore - The backing store for the archiver.
|
|
200
|
-
* @param blockUntilSynced - If true, blocks until the archiver has fully synced.
|
|
201
|
-
* @returns - An instance of the archiver.
|
|
202
|
-
*/
|
|
203
|
-
public static async createAndSync(
|
|
204
|
-
config: ArchiverConfig,
|
|
205
|
-
archiverStore: KVArchiverDataStore,
|
|
206
|
-
deps: ArchiverDeps,
|
|
207
|
-
blockUntilSynced = true,
|
|
208
|
-
): Promise<Archiver> {
|
|
209
|
-
const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId);
|
|
210
|
-
const publicClient = createPublicClient({
|
|
211
|
-
chain: chain.chainInfo,
|
|
212
|
-
transport: fallback(config.l1RpcUrls.map(url => http(url, { batch: false }))),
|
|
213
|
-
pollingInterval: config.viemPollingIntervalMS,
|
|
214
|
-
});
|
|
215
|
-
|
|
216
|
-
// Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs
|
|
217
|
-
const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls;
|
|
218
|
-
const debugClient = createPublicClient({
|
|
219
|
-
chain: chain.chainInfo,
|
|
220
|
-
transport: fallback(debugRpcUrls.map(url => http(url, { batch: false }))),
|
|
221
|
-
pollingInterval: config.viemPollingIntervalMS,
|
|
222
|
-
}) as ViemPublicDebugClient;
|
|
223
|
-
|
|
224
|
-
const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress);
|
|
225
|
-
const inbox = new InboxContract(publicClient, config.l1Contracts.inboxAddress);
|
|
226
|
-
|
|
227
|
-
const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] =
|
|
228
|
-
await Promise.all([
|
|
229
|
-
rollup.getL1StartBlock(),
|
|
230
|
-
rollup.getL1GenesisTime(),
|
|
231
|
-
rollup.getProofSubmissionEpochs(),
|
|
232
|
-
rollup.getGenesisArchiveTreeRoot(),
|
|
233
|
-
rollup.getSlashingProposerAddress(),
|
|
234
|
-
] as const);
|
|
235
|
-
|
|
236
|
-
const l1StartBlockHash = await publicClient
|
|
237
|
-
.getBlock({ blockNumber: l1StartBlock, includeTransactions: false })
|
|
238
|
-
.then(block => Buffer32.fromString(block.hash));
|
|
239
|
-
|
|
240
|
-
const { aztecEpochDuration: epochDuration, aztecSlotDuration: slotDuration, ethereumSlotDuration } = config;
|
|
241
|
-
|
|
242
|
-
const l1Constants = {
|
|
243
|
-
l1StartBlockHash,
|
|
244
|
-
l1StartBlock,
|
|
245
|
-
l1GenesisTime,
|
|
246
|
-
epochDuration,
|
|
247
|
-
slotDuration,
|
|
248
|
-
ethereumSlotDuration,
|
|
249
|
-
proofSubmissionEpochs: Number(proofSubmissionEpochs),
|
|
250
|
-
genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()),
|
|
251
|
-
};
|
|
252
|
-
|
|
253
|
-
const opts = merge(
|
|
254
|
-
{
|
|
255
|
-
pollingIntervalMs: 10_000,
|
|
256
|
-
batchSize: 100,
|
|
257
|
-
maxAllowedEthClientDriftSeconds: 300,
|
|
258
|
-
ethereumAllowNoDebugHosts: false,
|
|
259
|
-
},
|
|
260
|
-
mapArchiverConfig(config),
|
|
261
|
-
);
|
|
262
|
-
|
|
263
|
-
const epochCache = deps.epochCache ?? (await EpochCache.create(config.l1Contracts.rollupAddress, config, deps));
|
|
264
|
-
const telemetry = deps.telemetry ?? getTelemetryClient();
|
|
265
|
-
|
|
266
|
-
const archiver = new Archiver(
|
|
267
|
-
publicClient,
|
|
268
|
-
debugClient,
|
|
269
|
-
rollup,
|
|
270
|
-
inbox,
|
|
271
|
-
{ ...config.l1Contracts, slashingProposerAddress },
|
|
272
|
-
archiverStore,
|
|
273
|
-
opts,
|
|
274
|
-
deps.blobClient,
|
|
275
|
-
epochCache,
|
|
276
|
-
deps.dateProvider ?? new DateProvider(),
|
|
277
|
-
await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()),
|
|
278
|
-
l1Constants,
|
|
279
|
-
);
|
|
280
|
-
await archiver.start(blockUntilSynced);
|
|
281
|
-
return archiver;
|
|
282
|
-
}
|
|
283
|
-
|
|
284
|
-
/** Updates archiver config */
|
|
285
|
-
public updateConfig(newConfig: Partial<ArchiverConfig>) {
|
|
286
|
-
this.config = merge(this.config, mapArchiverConfig(newConfig));
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
/**
|
|
290
|
-
* Starts sync process.
|
|
291
|
-
* @param blockUntilSynced - If true, blocks until the archiver has fully synced.
|
|
292
|
-
*/
|
|
293
|
-
public async start(blockUntilSynced: boolean): Promise<void> {
|
|
294
|
-
if (this.runningPromise.isRunning()) {
|
|
295
|
-
throw new Error('Archiver is already running');
|
|
296
|
-
}
|
|
297
|
-
|
|
298
|
-
await this.blobClient.testSources();
|
|
299
|
-
await this.testEthereumNodeSynced();
|
|
300
|
-
await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
|
|
301
|
-
|
|
302
|
-
// Log initial state for the archiver
|
|
303
|
-
const { l1StartBlock } = this.l1constants;
|
|
304
|
-
const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
|
|
305
|
-
const currentL2Checkpoint = await this.getSynchedCheckpointNumber();
|
|
306
|
-
this.log.info(
|
|
307
|
-
`Starting archiver sync to rollup contract ${this.rollup.address} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`,
|
|
308
|
-
{ blocksSynchedTo, messagesSynchedTo, currentL2Checkpoint },
|
|
309
|
-
);
|
|
310
|
-
|
|
311
|
-
// Start sync loop, and return the wait for initial sync if we are asked to block until synced
|
|
312
|
-
this.runningPromise.start();
|
|
313
|
-
if (blockUntilSynced) {
|
|
314
|
-
return this.waitForInitialSync();
|
|
315
|
-
}
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
public syncImmediate() {
|
|
319
|
-
return this.runningPromise.trigger();
|
|
320
|
-
}
|
|
321
|
-
|
|
322
|
-
/**
|
|
323
|
-
* Queues a block to be added to the archiver store and triggers processing.
|
|
324
|
-
* The block will be processed by the sync loop.
|
|
325
|
-
* Implements the L2BlockSink interface.
|
|
326
|
-
* @param block - The L2 block to add.
|
|
327
|
-
* @returns A promise that resolves when the block has been added to the store, or rejects on error.
|
|
328
|
-
*/
|
|
329
|
-
public addBlock(block: L2BlockNew): Promise<void> {
|
|
330
|
-
return new Promise<void>((resolve, reject) => {
|
|
331
|
-
this.blockQueue.push({ block, resolve, reject });
|
|
332
|
-
this.log.debug(`Queued block ${block.number} for processing`);
|
|
333
|
-
// Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed
|
|
334
|
-
this.syncImmediate().catch(err => {
|
|
335
|
-
this.log.error(`Sync immediate call failed: ${err}`);
|
|
336
|
-
});
|
|
337
|
-
});
|
|
338
|
-
}
|
|
339
|
-
|
|
340
|
-
/**
|
|
341
|
-
* Processes all queued blocks, adding them to the store.
|
|
342
|
-
* Called at the beginning of each sync iteration.
|
|
343
|
-
* Blocks are processed in the order they were queued.
|
|
344
|
-
*/
|
|
345
|
-
private async processQueuedBlocks(): Promise<void> {
|
|
346
|
-
if (this.blockQueue.length === 0) {
|
|
347
|
-
return;
|
|
348
|
-
}
|
|
349
|
-
|
|
350
|
-
// Take all blocks from the queue
|
|
351
|
-
const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
|
|
352
|
-
this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
|
|
353
|
-
|
|
354
|
-
// Process each block individually to properly resolve/reject each promise
|
|
355
|
-
for (const { block, resolve, reject } of queuedItems) {
|
|
356
|
-
try {
|
|
357
|
-
await addBlocksWithContractData(this.store, [block]);
|
|
358
|
-
this.log.debug(`Added block ${block.number} to store`);
|
|
359
|
-
resolve();
|
|
360
|
-
} catch (err: any) {
|
|
361
|
-
this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
|
|
362
|
-
reject(err);
|
|
363
|
-
}
|
|
364
|
-
}
|
|
85
|
+
/** Returns the last L1 block number that was synced. */
|
|
86
|
+
public getL1BlockNumber(): bigint | undefined {
|
|
87
|
+
return this.l1BlockNumber;
|
|
365
88
|
}
|
|
366
89
|
|
|
367
|
-
|
|
368
|
-
|
|
90
|
+
/** Returns the last L1 timestamp that was synced. */
|
|
91
|
+
public getL1Timestamp(): bigint | undefined {
|
|
92
|
+
return this.l1Timestamp;
|
|
369
93
|
}
|
|
370
94
|
|
|
371
95
|
/** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */
|
|
372
|
-
|
|
96
|
+
public async testEthereumNodeSynced(): Promise<void> {
|
|
373
97
|
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
374
98
|
if (maxAllowedDelay === 0) {
|
|
375
99
|
return;
|
|
@@ -384,7 +108,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
384
108
|
}
|
|
385
109
|
|
|
386
110
|
@trackSpan('Archiver.syncFromL1')
|
|
387
|
-
|
|
111
|
+
public async syncFromL1(initialSyncComplete: boolean): Promise<void> {
|
|
388
112
|
/**
|
|
389
113
|
* We keep track of three "pointers" to L1 blocks:
|
|
390
114
|
* 1. the last L1 block that published an L2 block
|
|
@@ -454,7 +178,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
454
178
|
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
455
179
|
// First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
|
|
456
180
|
// pending chain validation status, proven checkpoint number, and synched L1 block number.
|
|
457
|
-
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
|
|
181
|
+
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber, initialSyncComplete);
|
|
458
182
|
// Then we prune the current epoch if it'd reorg on next submission.
|
|
459
183
|
// Note that we don't do this before retrieving checkpoints because we may need to retrieve
|
|
460
184
|
// checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
|
|
@@ -496,34 +220,10 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
496
220
|
l1TimestampAtStart: currentL1Timestamp,
|
|
497
221
|
l1BlockNumberAtEnd,
|
|
498
222
|
});
|
|
499
|
-
|
|
500
|
-
// We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace)
|
|
501
|
-
// so if the initial sync took too long, we still go for another iteration.
|
|
502
|
-
if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= l1BlockNumberAtEnd) {
|
|
503
|
-
this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, {
|
|
504
|
-
l1BlockNumber: currentL1BlockNumber,
|
|
505
|
-
syncPoint: await this.store.getSynchPoint(),
|
|
506
|
-
...(await this.getL2Tips()),
|
|
507
|
-
});
|
|
508
|
-
this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs);
|
|
509
|
-
this.initialSyncComplete = true;
|
|
510
|
-
this.initialSyncPromise.resolve();
|
|
511
|
-
}
|
|
512
|
-
}
|
|
513
|
-
|
|
514
|
-
/**
|
|
515
|
-
* Fetches logs from L1 contracts and processes them.
|
|
516
|
-
*/
|
|
517
|
-
@trackSpan('Archiver.sync')
|
|
518
|
-
private async sync() {
|
|
519
|
-
// Process any queued blocks first, before doing L1 sync
|
|
520
|
-
await this.processQueuedBlocks();
|
|
521
|
-
// Now perform L1 sync
|
|
522
|
-
await this.syncFromL1();
|
|
523
223
|
}
|
|
524
224
|
|
|
525
225
|
/** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */
|
|
526
|
-
private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint) {
|
|
226
|
+
private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint): Promise<boolean> {
|
|
527
227
|
const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
|
|
528
228
|
const result = await this.rollup.canPruneAtTime(time, { blockNumber: currentL1BlockNumber });
|
|
529
229
|
if (result) {
|
|
@@ -542,9 +242,9 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
542
242
|
provenCheckpointNumber: CheckpointNumber,
|
|
543
243
|
currentL1BlockNumber: bigint,
|
|
544
244
|
currentL1Timestamp: bigint,
|
|
545
|
-
) {
|
|
245
|
+
): Promise<{ rollupCanPrune: boolean }> {
|
|
546
246
|
const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
|
|
547
|
-
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
247
|
+
const localPendingCheckpointNumber = await this.store.getSynchedCheckpointNumber();
|
|
548
248
|
const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
|
|
549
249
|
|
|
550
250
|
if (canPrune) {
|
|
@@ -583,11 +283,11 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
583
283
|
this.log.debug(
|
|
584
284
|
`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`,
|
|
585
285
|
);
|
|
586
|
-
await this.
|
|
286
|
+
await this.updater.unwindCheckpointsWithContractData(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
587
287
|
this.log.warn(
|
|
588
288
|
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
589
289
|
`to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
|
|
590
|
-
`Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
290
|
+
`Updated latest checkpoint is ${await this.store.getSynchedCheckpointNumber()}.`,
|
|
591
291
|
);
|
|
592
292
|
this.instrumentation.processPrune(timer.ms());
|
|
593
293
|
// TODO(palla/reorg): Do we need to set the block synched L1 block number here?
|
|
@@ -613,7 +313,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
613
313
|
messagesSyncPoint: L1BlockId,
|
|
614
314
|
currentL1BlockNumber: bigint,
|
|
615
315
|
_currentL1BlockHash: Buffer32,
|
|
616
|
-
) {
|
|
316
|
+
): Promise<void> {
|
|
617
317
|
this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`);
|
|
618
318
|
if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) {
|
|
619
319
|
return;
|
|
@@ -633,7 +333,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
633
333
|
// Compare message count and rolling hash. If they match, no need to retrieve anything.
|
|
634
334
|
if (
|
|
635
335
|
remoteMessagesState.totalMessagesInserted === localMessagesInserted &&
|
|
636
|
-
remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ??
|
|
336
|
+
remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer32.ZERO)
|
|
637
337
|
) {
|
|
638
338
|
this.log.trace(
|
|
639
339
|
`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`,
|
|
@@ -721,7 +421,10 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
721
421
|
return undefined;
|
|
722
422
|
}
|
|
723
423
|
|
|
724
|
-
private async rollbackL1ToL2Messages(
|
|
424
|
+
private async rollbackL1ToL2Messages(
|
|
425
|
+
localLastMessage: InboxMessage,
|
|
426
|
+
messagesSyncPoint: L1BlockId,
|
|
427
|
+
): Promise<L1BlockId> {
|
|
725
428
|
// Slowly go back through our messages until we find the last common message.
|
|
726
429
|
// We could query the logs in batch as an optimization, but the depth of the reorg should not be deep, and this
|
|
727
430
|
// is a very rare case, so it's fine to query one log at a time.
|
|
@@ -768,8 +471,12 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
768
471
|
}
|
|
769
472
|
|
|
770
473
|
@trackSpan('Archiver.handleCheckpoints')
|
|
771
|
-
private async handleCheckpoints(
|
|
772
|
-
|
|
474
|
+
private async handleCheckpoints(
|
|
475
|
+
blocksSynchedTo: bigint,
|
|
476
|
+
currentL1BlockNumber: bigint,
|
|
477
|
+
initialSyncComplete: boolean,
|
|
478
|
+
): Promise<RollupStatus> {
|
|
479
|
+
const localPendingCheckpointNumber = await this.store.getSynchedCheckpointNumber();
|
|
773
480
|
const initialValidationResult: ValidateCheckpointResult | undefined =
|
|
774
481
|
await this.store.getPendingChainValidationStatus();
|
|
775
482
|
const {
|
|
@@ -801,9 +508,9 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
801
508
|
// we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
|
|
802
509
|
// so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
|
|
803
510
|
if (provenCheckpointNumber === 0) {
|
|
804
|
-
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
511
|
+
const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber();
|
|
805
512
|
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
806
|
-
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
513
|
+
await this.store.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
807
514
|
this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
808
515
|
}
|
|
809
516
|
}
|
|
@@ -813,7 +520,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
813
520
|
|
|
814
521
|
// Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
|
|
815
522
|
// synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
|
|
816
|
-
const synched = await this.getSynchedCheckpointNumber();
|
|
523
|
+
const synched = await this.store.getSynchedCheckpointNumber();
|
|
817
524
|
if (
|
|
818
525
|
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
819
526
|
synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber
|
|
@@ -833,9 +540,9 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
833
540
|
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
834
541
|
provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root)
|
|
835
542
|
) {
|
|
836
|
-
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
543
|
+
const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber();
|
|
837
544
|
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
838
|
-
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
545
|
+
await this.store.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
839
546
|
this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
840
547
|
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
|
|
841
548
|
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
|
|
@@ -928,12 +635,12 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
928
635
|
}
|
|
929
636
|
|
|
930
637
|
const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
|
|
931
|
-
await this.
|
|
638
|
+
await this.updater.unwindCheckpointsWithContractData(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
932
639
|
|
|
933
640
|
this.log.warn(
|
|
934
641
|
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
935
642
|
`due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` +
|
|
936
|
-
`Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
643
|
+
`Updated L2 latest checkpoint is ${await this.store.getSynchedCheckpointNumber()}.`,
|
|
937
644
|
);
|
|
938
645
|
}
|
|
939
646
|
}
|
|
@@ -962,7 +669,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
962
669
|
this.l1Addresses,
|
|
963
670
|
this.instrumentation,
|
|
964
671
|
this.log,
|
|
965
|
-
!
|
|
672
|
+
!initialSyncComplete, // isHistoricalSync
|
|
966
673
|
),
|
|
967
674
|
);
|
|
968
675
|
|
|
@@ -1026,7 +733,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
1026
733
|
// Check the inHash of the checkpoint against the l1->l2 messages.
|
|
1027
734
|
// The messages should've been synced up to the currentL1BlockNumber and must be available for the published
|
|
1028
735
|
// checkpoints we just retrieved.
|
|
1029
|
-
const l1ToL2Messages = await this.getL1ToL2Messages(published.checkpoint.number);
|
|
736
|
+
const l1ToL2Messages = await this.store.getL1ToL2Messages(published.checkpoint.number);
|
|
1030
737
|
const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);
|
|
1031
738
|
const publishedInHash = published.checkpoint.header.inHash;
|
|
1032
739
|
if (!computedInHash.equals(publishedInHash)) {
|
|
@@ -1059,7 +766,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
1059
766
|
rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
1060
767
|
const [processDuration] = await elapsed(() =>
|
|
1061
768
|
execInSpan(this.tracer, 'Archiver.addCheckpoints', () =>
|
|
1062
|
-
this.
|
|
769
|
+
this.updater.addCheckpointsWithContractData(validCheckpoints, updatedValidationResult),
|
|
1063
770
|
),
|
|
1064
771
|
);
|
|
1065
772
|
this.instrumentation.processNewBlocks(
|
|
@@ -1111,12 +818,12 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
1111
818
|
status: RollupStatus,
|
|
1112
819
|
blocksSynchedTo: bigint,
|
|
1113
820
|
currentL1BlockNumber: bigint,
|
|
1114
|
-
) {
|
|
821
|
+
): Promise<void> {
|
|
1115
822
|
const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
|
|
1116
823
|
// Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
|
|
1117
824
|
// rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
|
|
1118
825
|
const latestLocalCheckpointNumber =
|
|
1119
|
-
lastRetrievedCheckpoint?.checkpoint.number ?? (await this.getSynchedCheckpointNumber());
|
|
826
|
+
lastRetrievedCheckpoint?.checkpoint.number ?? (await this.store.getSynchedCheckpointNumber());
|
|
1120
827
|
if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
|
|
1121
828
|
// Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
|
|
1122
829
|
// but still haven't reached the pending checkpoint according to the call to the rollup contract.
|
|
@@ -1153,367 +860,11 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl
|
|
|
1153
860
|
}
|
|
1154
861
|
}
|
|
1155
862
|
|
|
1156
|
-
|
|
1157
|
-
public resume() {
|
|
1158
|
-
if (this.runningPromise.isRunning()) {
|
|
1159
|
-
this.log.warn(`Archiver already running`);
|
|
1160
|
-
}
|
|
1161
|
-
this.log.info(`Restarting archiver`);
|
|
1162
|
-
this.runningPromise.start();
|
|
1163
|
-
}
|
|
1164
|
-
|
|
1165
|
-
/**
|
|
1166
|
-
* Stops the archiver.
|
|
1167
|
-
* @returns A promise signalling completion of the stop process.
|
|
1168
|
-
*/
|
|
1169
|
-
public async stop(): Promise<void> {
|
|
1170
|
-
this.log.debug('Stopping...');
|
|
1171
|
-
await this.runningPromise.stop();
|
|
1172
|
-
|
|
1173
|
-
this.log.info('Stopped.');
|
|
1174
|
-
return Promise.resolve();
|
|
1175
|
-
}
|
|
1176
|
-
|
|
1177
|
-
public backupTo(destPath: string): Promise<string> {
|
|
1178
|
-
return this.dataStore.backupTo(destPath);
|
|
1179
|
-
}
|
|
1180
|
-
|
|
1181
|
-
public getL1Constants(): Promise<L1RollupConstants> {
|
|
1182
|
-
return Promise.resolve(this.l1constants);
|
|
1183
|
-
}
|
|
1184
|
-
|
|
1185
|
-
public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> {
|
|
1186
|
-
return Promise.resolve({ genesisArchiveRoot: this.l1constants.genesisArchiveRoot });
|
|
1187
|
-
}
|
|
1188
|
-
|
|
1189
|
-
public getRollupAddress(): Promise<EthAddress> {
|
|
1190
|
-
return Promise.resolve(EthAddress.fromString(this.rollup.address));
|
|
1191
|
-
}
|
|
1192
|
-
|
|
1193
|
-
public getRegistryAddress(): Promise<EthAddress> {
|
|
1194
|
-
return Promise.resolve(this.l1Addresses.registryAddress);
|
|
1195
|
-
}
|
|
1196
|
-
|
|
1197
|
-
public getL1BlockNumber(): bigint | undefined {
|
|
1198
|
-
return this.l1BlockNumber;
|
|
1199
|
-
}
|
|
1200
|
-
|
|
1201
|
-
public getL1Timestamp(): Promise<bigint | undefined> {
|
|
1202
|
-
return Promise.resolve(this.l1Timestamp);
|
|
1203
|
-
}
|
|
1204
|
-
|
|
1205
|
-
public getL2SlotNumber(): Promise<SlotNumber | undefined> {
|
|
1206
|
-
return Promise.resolve(
|
|
1207
|
-
this.l1Timestamp === undefined ? undefined : getSlotAtTimestamp(this.l1Timestamp, this.l1constants),
|
|
1208
|
-
);
|
|
1209
|
-
}
|
|
1210
|
-
|
|
1211
|
-
public getL2EpochNumber(): Promise<EpochNumber | undefined> {
|
|
1212
|
-
return Promise.resolve(
|
|
1213
|
-
this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants),
|
|
1214
|
-
);
|
|
1215
|
-
}
|
|
1216
|
-
|
|
1217
|
-
public async getBlocksForEpoch(epochNumber: EpochNumber): Promise<L2Block[]> {
|
|
1218
|
-
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1219
|
-
const blocks: L2Block[] = [];
|
|
1220
|
-
|
|
1221
|
-
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1222
|
-
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1223
|
-
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1224
|
-
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1225
|
-
while (checkpoint && slot(checkpoint) >= start) {
|
|
1226
|
-
if (slot(checkpoint) <= end) {
|
|
1227
|
-
// push the blocks on backwards
|
|
1228
|
-
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1229
|
-
for (let i = endBlock; i >= checkpoint.startBlock; i--) {
|
|
1230
|
-
const block = await this.getBlock(BlockNumber(i));
|
|
1231
|
-
if (block) {
|
|
1232
|
-
blocks.push(block);
|
|
1233
|
-
}
|
|
1234
|
-
}
|
|
1235
|
-
}
|
|
1236
|
-
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
1237
|
-
}
|
|
1238
|
-
|
|
1239
|
-
return blocks.reverse();
|
|
1240
|
-
}
|
|
1241
|
-
|
|
1242
|
-
public async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise<BlockHeader[]> {
|
|
1243
|
-
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1244
|
-
const blocks: BlockHeader[] = [];
|
|
1245
|
-
|
|
1246
|
-
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1247
|
-
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1248
|
-
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1249
|
-
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1250
|
-
while (checkpoint && slot(checkpoint) >= start) {
|
|
1251
|
-
if (slot(checkpoint) <= end) {
|
|
1252
|
-
// push the blocks on backwards
|
|
1253
|
-
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1254
|
-
for (let i = endBlock; i >= checkpoint.startBlock; i--) {
|
|
1255
|
-
const block = await this.getBlockHeader(BlockNumber(i));
|
|
1256
|
-
if (block) {
|
|
1257
|
-
blocks.push(block);
|
|
1258
|
-
}
|
|
1259
|
-
}
|
|
1260
|
-
}
|
|
1261
|
-
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
1262
|
-
}
|
|
1263
|
-
return blocks.reverse();
|
|
1264
|
-
}
|
|
1265
|
-
|
|
1266
|
-
public async isEpochComplete(epochNumber: EpochNumber): Promise<boolean> {
|
|
1267
|
-
// The epoch is complete if the current L2 block is the last one in the epoch (or later)
|
|
1268
|
-
const header = await this.getBlockHeader('latest');
|
|
1269
|
-
const slot = header ? header.globalVariables.slotNumber : undefined;
|
|
1270
|
-
const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1271
|
-
if (slot && slot >= endSlot) {
|
|
1272
|
-
return true;
|
|
1273
|
-
}
|
|
1274
|
-
|
|
1275
|
-
// If we haven't run an initial sync, just return false.
|
|
1276
|
-
const l1Timestamp = this.l1Timestamp;
|
|
1277
|
-
if (l1Timestamp === undefined) {
|
|
1278
|
-
return false;
|
|
1279
|
-
}
|
|
1280
|
-
|
|
1281
|
-
// If not, the epoch may also be complete if the L2 slot has passed without a block
|
|
1282
|
-
// We compute this based on the end timestamp for the given epoch and the timestamp of the last L1 block
|
|
1283
|
-
const [_startTimestamp, endTimestamp] = getTimestampRangeForEpoch(epochNumber, this.l1constants);
|
|
1284
|
-
|
|
1285
|
-
// For this computation, we throw in a few extra seconds just for good measure,
|
|
1286
|
-
// since we know the next L1 block won't be mined within this range. Remember that
|
|
1287
|
-
// l1timestamp is the timestamp of the last l1 block we've seen, so this relies on
|
|
1288
|
-
// the fact that L1 won't mine two blocks within this time of each other.
|
|
1289
|
-
// TODO(palla/reorg): Is the above a safe assumption?
|
|
1290
|
-
const leeway = 1n;
|
|
1291
|
-
return l1Timestamp + leeway >= endTimestamp;
|
|
1292
|
-
}
|
|
1293
|
-
|
|
1294
|
-
/** Returns whether the archiver has completed an initial sync run successfully. */
|
|
1295
|
-
public isInitialSyncComplete(): boolean {
|
|
1296
|
-
return this.initialSyncComplete;
|
|
1297
|
-
}
|
|
1298
|
-
|
|
1299
|
-
public async getCheckpointHeader(number: CheckpointNumber | 'latest'): Promise<CheckpointHeader | undefined> {
|
|
1300
|
-
if (number === 'latest') {
|
|
1301
|
-
number = await this.getSynchedCheckpointNumber();
|
|
1302
|
-
}
|
|
1303
|
-
if (number === 0) {
|
|
1304
|
-
return undefined;
|
|
1305
|
-
}
|
|
863
|
+
private async getCheckpointHeader(number: CheckpointNumber) {
|
|
1306
864
|
const checkpoint = await this.store.getCheckpointData(number);
|
|
1307
865
|
if (!checkpoint) {
|
|
1308
866
|
return undefined;
|
|
1309
867
|
}
|
|
1310
868
|
return checkpoint.header;
|
|
1311
869
|
}
|
|
1312
|
-
|
|
1313
|
-
public getCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1314
|
-
return this.getSynchedCheckpointNumber();
|
|
1315
|
-
}
|
|
1316
|
-
|
|
1317
|
-
public getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1318
|
-
return this.store.getSynchedCheckpointNumber();
|
|
1319
|
-
}
|
|
1320
|
-
|
|
1321
|
-
public getProvenCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1322
|
-
return this.store.getProvenCheckpointNumber();
|
|
1323
|
-
}
|
|
1324
|
-
|
|
1325
|
-
public setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise<void> {
|
|
1326
|
-
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
1327
|
-
}
|
|
1328
|
-
|
|
1329
|
-
public unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
1330
|
-
return unwindCheckpointsWithContractData(this.store, from, checkpointsToUnwind);
|
|
1331
|
-
}
|
|
1332
|
-
|
|
1333
|
-
public async getLastBlockNumberInCheckpoint(checkpointNumber: CheckpointNumber): Promise<BlockNumber | undefined> {
|
|
1334
|
-
const checkpointData = await this.store.getCheckpointData(checkpointNumber);
|
|
1335
|
-
if (!checkpointData) {
|
|
1336
|
-
return undefined;
|
|
1337
|
-
}
|
|
1338
|
-
return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1);
|
|
1339
|
-
}
|
|
1340
|
-
|
|
1341
|
-
public addCheckpoints(
|
|
1342
|
-
checkpoints: PublishedCheckpoint[],
|
|
1343
|
-
pendingChainValidationStatus?: ValidateCheckpointResult,
|
|
1344
|
-
): Promise<boolean> {
|
|
1345
|
-
return addCheckpointsWithContractData(this.store, checkpoints, pendingChainValidationStatus);
|
|
1346
|
-
}
|
|
1347
|
-
|
|
1348
|
-
getCheckpointedBlockNumber(): Promise<BlockNumber> {
|
|
1349
|
-
return this.store.getCheckpointedL2BlockNumber();
|
|
1350
|
-
}
|
|
1351
|
-
|
|
1352
|
-
public async getL2Tips(): Promise<L2Tips> {
|
|
1353
|
-
const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber] = await Promise.all([
|
|
1354
|
-
this.getBlockNumber(),
|
|
1355
|
-
this.getProvenBlockNumber(),
|
|
1356
|
-
this.getCheckpointedBlockNumber(),
|
|
1357
|
-
] as const);
|
|
1358
|
-
|
|
1359
|
-
// TODO(#13569): Compute proper finalized block number based on L1 finalized block.
|
|
1360
|
-
// We just force it 2 epochs worth of proven data for now.
|
|
1361
|
-
// NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations
|
|
1362
|
-
const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0));
|
|
1363
|
-
|
|
1364
|
-
const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
|
|
1365
|
-
|
|
1366
|
-
// Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks
|
|
1367
|
-
const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] =
|
|
1368
|
-
await Promise.all([
|
|
1369
|
-
latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined,
|
|
1370
|
-
provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined,
|
|
1371
|
-
finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined,
|
|
1372
|
-
checkpointedBlockNumber > beforeInitialblockNumber
|
|
1373
|
-
? this.getCheckpointedBlock(checkpointedBlockNumber)
|
|
1374
|
-
: undefined,
|
|
1375
|
-
] as const);
|
|
1376
|
-
|
|
1377
|
-
if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) {
|
|
1378
|
-
throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`);
|
|
1379
|
-
}
|
|
1380
|
-
|
|
1381
|
-
// Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number.
|
|
1382
|
-
if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) {
|
|
1383
|
-
throw new Error(
|
|
1384
|
-
`Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`,
|
|
1385
|
-
);
|
|
1386
|
-
}
|
|
1387
|
-
|
|
1388
|
-
if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) {
|
|
1389
|
-
throw new Error(
|
|
1390
|
-
`Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`,
|
|
1391
|
-
);
|
|
1392
|
-
}
|
|
1393
|
-
|
|
1394
|
-
if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) {
|
|
1395
|
-
throw new Error(
|
|
1396
|
-
`Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`,
|
|
1397
|
-
);
|
|
1398
|
-
}
|
|
1399
|
-
|
|
1400
|
-
const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1401
|
-
const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1402
|
-
const finalizedBlockHeaderHash =
|
|
1403
|
-
(await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1404
|
-
const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1405
|
-
|
|
1406
|
-
// Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks
|
|
1407
|
-
const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([
|
|
1408
|
-
provenCheckpointedBlock !== undefined
|
|
1409
|
-
? await this.getPublishedCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1)
|
|
1410
|
-
: [undefined],
|
|
1411
|
-
finalizedCheckpointedBlock !== undefined
|
|
1412
|
-
? await this.getPublishedCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1)
|
|
1413
|
-
: [undefined],
|
|
1414
|
-
checkpointedBlock !== undefined
|
|
1415
|
-
? await this.getPublishedCheckpoints(checkpointedBlock?.checkpointNumber, 1)
|
|
1416
|
-
: [undefined],
|
|
1417
|
-
]);
|
|
1418
|
-
|
|
1419
|
-
const initialcheckpointId: CheckpointId = {
|
|
1420
|
-
number: CheckpointNumber.ZERO,
|
|
1421
|
-
hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(),
|
|
1422
|
-
};
|
|
1423
|
-
|
|
1424
|
-
const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => {
|
|
1425
|
-
if (checkpoint === undefined) {
|
|
1426
|
-
return initialcheckpointId;
|
|
1427
|
-
}
|
|
1428
|
-
return {
|
|
1429
|
-
number: checkpoint.checkpoint.number,
|
|
1430
|
-
hash: checkpoint.checkpoint.hash().toString(),
|
|
1431
|
-
};
|
|
1432
|
-
};
|
|
1433
|
-
|
|
1434
|
-
const l2Tips: L2Tips = {
|
|
1435
|
-
proposed: {
|
|
1436
|
-
number: latestBlockNumber,
|
|
1437
|
-
hash: latestBlockHeaderHash.toString(),
|
|
1438
|
-
},
|
|
1439
|
-
proven: {
|
|
1440
|
-
block: {
|
|
1441
|
-
number: provenBlockNumber,
|
|
1442
|
-
hash: provenBlockHeaderHash.toString(),
|
|
1443
|
-
},
|
|
1444
|
-
checkpoint: makeCheckpointId(provenBlockCheckpoint),
|
|
1445
|
-
},
|
|
1446
|
-
finalized: {
|
|
1447
|
-
block: {
|
|
1448
|
-
number: finalizedBlockNumber,
|
|
1449
|
-
hash: finalizedBlockHeaderHash.toString(),
|
|
1450
|
-
},
|
|
1451
|
-
checkpoint: makeCheckpointId(finalizedBlockCheckpoint),
|
|
1452
|
-
},
|
|
1453
|
-
checkpointed: {
|
|
1454
|
-
block: {
|
|
1455
|
-
number: checkpointedBlockNumber,
|
|
1456
|
-
hash: checkpointedBlockHeaderHash.toString(),
|
|
1457
|
-
},
|
|
1458
|
-
checkpoint: makeCheckpointId(checkpointedBlockCheckpoint),
|
|
1459
|
-
},
|
|
1460
|
-
};
|
|
1461
|
-
|
|
1462
|
-
return l2Tips;
|
|
1463
|
-
}
|
|
1464
|
-
|
|
1465
|
-
public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise<void> {
|
|
1466
|
-
// TODO(pw/mbps): This still assumes 1 block per checkpoint
|
|
1467
|
-
const currentBlocks = await this.getL2Tips();
|
|
1468
|
-
const currentL2Block = currentBlocks.proposed.number;
|
|
1469
|
-
const currentProvenBlock = currentBlocks.proven.block.number;
|
|
1470
|
-
|
|
1471
|
-
if (targetL2BlockNumber >= currentL2Block) {
|
|
1472
|
-
throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`);
|
|
1473
|
-
}
|
|
1474
|
-
const blocksToUnwind = currentL2Block - targetL2BlockNumber;
|
|
1475
|
-
const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber);
|
|
1476
|
-
if (!targetL2Block) {
|
|
1477
|
-
throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
|
|
1478
|
-
}
|
|
1479
|
-
const targetL1BlockNumber = targetL2Block.l1.blockNumber;
|
|
1480
|
-
const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber);
|
|
1481
|
-
const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber);
|
|
1482
|
-
this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`);
|
|
1483
|
-
await unwindCheckpointsWithContractData(this.store, CheckpointNumber(currentL2Block), blocksToUnwind);
|
|
1484
|
-
this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`);
|
|
1485
|
-
await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
1486
|
-
this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
|
|
1487
|
-
await this.store.setCheckpointSynchedL1BlockNumber(targetL1BlockNumber);
|
|
1488
|
-
await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash });
|
|
1489
|
-
if (targetL2BlockNumber < currentProvenBlock) {
|
|
1490
|
-
this.log.info(`Clearing proven L2 block number`);
|
|
1491
|
-
await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
|
|
1492
|
-
}
|
|
1493
|
-
// TODO(palla/reorg): Set the finalized block when we add support for it.
|
|
1494
|
-
// if (targetL2BlockNumber < currentFinalizedBlock) {
|
|
1495
|
-
// this.log.info(`Clearing finalized L2 block number`);
|
|
1496
|
-
// await this.store.setFinalizedL2BlockNumber(0);
|
|
1497
|
-
// }
|
|
1498
|
-
}
|
|
1499
|
-
|
|
1500
|
-
public async getCheckpointsForEpoch(epochNumber: EpochNumber): Promise<Checkpoint[]> {
|
|
1501
|
-
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1502
|
-
const checkpoints: Checkpoint[] = [];
|
|
1503
|
-
|
|
1504
|
-
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1505
|
-
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1506
|
-
let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1507
|
-
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1508
|
-
while (checkpointData && slot(checkpointData) >= start) {
|
|
1509
|
-
if (slot(checkpointData) <= end) {
|
|
1510
|
-
// push the checkpoints on backwards
|
|
1511
|
-
const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1);
|
|
1512
|
-
checkpoints.push(checkpoint.checkpoint);
|
|
1513
|
-
}
|
|
1514
|
-
checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1));
|
|
1515
|
-
}
|
|
1516
|
-
|
|
1517
|
-
return checkpoints.reverse();
|
|
1518
|
-
}
|
|
1519
870
|
}
|