@aztec/archiver 4.0.0-nightly.20250907 → 4.0.0-nightly.20260107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -6
- package/dest/archiver/archiver.d.ts +127 -84
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +1128 -380
- package/dest/archiver/archiver_store.d.ts +122 -45
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +2013 -343
- package/dest/archiver/config.d.ts +7 -20
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +21 -5
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/instrumentation.d.ts +5 -3
- package/dest/archiver/instrumentation.d.ts.map +1 -1
- package/dest/archiver/instrumentation.js +14 -0
- package/dest/archiver/kv_archiver_store/block_store.d.ts +83 -15
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +396 -73
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.js +1 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +51 -55
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +82 -46
- package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +149 -84
- package/dest/archiver/kv_archiver_store/message_store.d.ts +6 -5
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/message_store.js +15 -14
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
- package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/archiver/l1/calldata_retriever.js +471 -0
- package/dest/archiver/l1/data_retrieval.d.ts +90 -0
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
- package/dest/archiver/l1/data_retrieval.js +331 -0
- package/dest/archiver/l1/debug_tx.d.ts +19 -0
- package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
- package/dest/archiver/l1/debug_tx.js +73 -0
- package/dest/archiver/l1/spire_proposer.d.ts +70 -0
- package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
- package/dest/archiver/l1/spire_proposer.js +157 -0
- package/dest/archiver/l1/trace_tx.d.ts +97 -0
- package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
- package/dest/archiver/l1/trace_tx.js +91 -0
- package/dest/archiver/l1/types.d.ts +12 -0
- package/dest/archiver/l1/types.d.ts.map +1 -0
- package/dest/archiver/l1/types.js +3 -0
- package/dest/archiver/l1/validate_trace.d.ts +29 -0
- package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
- package/dest/archiver/l1/validate_trace.js +150 -0
- package/dest/archiver/structs/data_retrieval.d.ts +1 -1
- package/dest/archiver/structs/inbox_message.d.ts +4 -4
- package/dest/archiver/structs/inbox_message.d.ts.map +1 -1
- package/dest/archiver/structs/inbox_message.js +6 -5
- package/dest/archiver/structs/published.d.ts +2 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/archiver/validation.d.ts +10 -4
- package/dest/archiver/validation.d.ts.map +1 -1
- package/dest/archiver/validation.js +66 -44
- package/dest/factory.d.ts +4 -6
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +5 -4
- package/dest/index.d.ts +2 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -1
- package/dest/rpc/index.d.ts +2 -2
- package/dest/test/index.d.ts +1 -1
- package/dest/test/mock_archiver.d.ts +16 -8
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +19 -14
- package/dest/test/mock_l1_to_l2_message_source.d.ts +7 -6
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +10 -9
- package/dest/test/mock_l2_block_source.d.ts +31 -20
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +85 -18
- package/dest/test/mock_structs.d.ts +3 -2
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +9 -8
- package/package.json +18 -17
- package/src/archiver/archiver.ts +971 -475
- package/src/archiver/archiver_store.ts +141 -44
- package/src/archiver/archiver_store_test_suite.ts +2114 -331
- package/src/archiver/config.ts +30 -35
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/instrumentation.ts +19 -2
- package/src/archiver/kv_archiver_store/block_store.ts +541 -83
- package/src/archiver/kv_archiver_store/contract_class_store.ts +1 -1
- package/src/archiver/kv_archiver_store/contract_instance_store.ts +1 -1
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +107 -67
- package/src/archiver/kv_archiver_store/log_store.ts +209 -99
- package/src/archiver/kv_archiver_store/message_store.ts +21 -18
- package/src/archiver/l1/README.md +98 -0
- package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
- package/src/archiver/l1/calldata_retriever.ts +641 -0
- package/src/archiver/l1/data_retrieval.ts +512 -0
- package/src/archiver/l1/debug_tx.ts +99 -0
- package/src/archiver/l1/spire_proposer.ts +160 -0
- package/src/archiver/l1/trace_tx.ts +128 -0
- package/src/archiver/l1/types.ts +13 -0
- package/src/archiver/l1/validate_trace.ts +211 -0
- package/src/archiver/structs/inbox_message.ts +8 -8
- package/src/archiver/structs/published.ts +1 -1
- package/src/archiver/validation.ts +86 -32
- package/src/factory.ts +6 -7
- package/src/index.ts +1 -1
- package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
- package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
- package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
- package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
- package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
- package/src/test/fixtures/trace_transaction-proxied.json +128 -0
- package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
- package/src/test/mock_archiver.ts +22 -16
- package/src/test/mock_l1_to_l2_message_source.ts +10 -9
- package/src/test/mock_l2_block_source.ts +114 -27
- package/src/test/mock_structs.ts +10 -9
- package/dest/archiver/data_retrieval.d.ts +0 -78
- package/dest/archiver/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/data_retrieval.js +0 -354
- package/src/archiver/data_retrieval.ts +0 -535
package/src/archiver/archiver.ts
CHANGED
|
@@ -1,23 +1,23 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { BlobClientInterface } from '@aztec/blob-client/client';
|
|
2
|
+
import { GENESIS_BLOCK_HEADER_HASH } from '@aztec/constants';
|
|
2
3
|
import { EpochCache } from '@aztec/epoch-cache';
|
|
3
|
-
import {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
type ViemPublicClient,
|
|
9
|
-
createEthereumChain,
|
|
10
|
-
} from '@aztec/ethereum';
|
|
4
|
+
import { createEthereumChain } from '@aztec/ethereum/chain';
|
|
5
|
+
import { BlockTagTooOldError, InboxContract, RollupContract } from '@aztec/ethereum/contracts';
|
|
6
|
+
import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
|
|
7
|
+
import type { L1BlockId } from '@aztec/ethereum/l1-types';
|
|
8
|
+
import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types';
|
|
11
9
|
import { maxBigint } from '@aztec/foundation/bigint';
|
|
10
|
+
import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types';
|
|
12
11
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
13
|
-
import { pick } from '@aztec/foundation/collection';
|
|
12
|
+
import { merge, pick } from '@aztec/foundation/collection';
|
|
13
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
14
14
|
import type { EthAddress } from '@aztec/foundation/eth-address';
|
|
15
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
16
15
|
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
16
|
+
import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
17
17
|
import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
|
|
18
|
-
import { sleep } from '@aztec/foundation/sleep';
|
|
19
18
|
import { count } from '@aztec/foundation/string';
|
|
20
19
|
import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
|
|
20
|
+
import { isDefined } from '@aztec/foundation/types';
|
|
21
21
|
import type { CustomRange } from '@aztec/kv-store';
|
|
22
22
|
import { RollupAbi } from '@aztec/l1-artifacts';
|
|
23
23
|
import {
|
|
@@ -33,12 +33,17 @@ import type { FunctionSelector } from '@aztec/stdlib/abi';
|
|
|
33
33
|
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
34
34
|
import {
|
|
35
35
|
type ArchiverEmitter,
|
|
36
|
-
|
|
37
|
-
|
|
36
|
+
CheckpointedL2Block,
|
|
37
|
+
CommitteeAttestation,
|
|
38
|
+
L2Block,
|
|
39
|
+
L2BlockNew,
|
|
40
|
+
type L2BlockSink,
|
|
38
41
|
type L2BlockSource,
|
|
39
42
|
L2BlockSourceEvents,
|
|
40
43
|
type L2Tips,
|
|
44
|
+
PublishedL2Block,
|
|
41
45
|
} from '@aztec/stdlib/block';
|
|
46
|
+
import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
42
47
|
import {
|
|
43
48
|
type ContractClassPublic,
|
|
44
49
|
type ContractDataSource,
|
|
@@ -59,12 +64,20 @@ import {
|
|
|
59
64
|
} from '@aztec/stdlib/epoch-helpers';
|
|
60
65
|
import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
|
|
61
66
|
import type { L2LogsSource } from '@aztec/stdlib/interfaces/server';
|
|
62
|
-
import {
|
|
63
|
-
|
|
67
|
+
import {
|
|
68
|
+
ContractClassLog,
|
|
69
|
+
type LogFilter,
|
|
70
|
+
type PrivateLog,
|
|
71
|
+
type PublicLog,
|
|
72
|
+
type SiloedTag,
|
|
73
|
+
Tag,
|
|
74
|
+
TxScopedL2Log,
|
|
75
|
+
} from '@aztec/stdlib/logs';
|
|
76
|
+
import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
77
|
+
import type { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
64
78
|
import { type BlockHeader, type IndexedTxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx';
|
|
65
79
|
import type { UInt64 } from '@aztec/stdlib/types';
|
|
66
80
|
import {
|
|
67
|
-
Attributes,
|
|
68
81
|
type TelemetryClient,
|
|
69
82
|
type Traceable,
|
|
70
83
|
type Tracer,
|
|
@@ -74,44 +87,73 @@ import {
|
|
|
74
87
|
|
|
75
88
|
import { EventEmitter } from 'events';
|
|
76
89
|
import groupBy from 'lodash.groupby';
|
|
77
|
-
import { type GetContractReturnType, createPublicClient, fallback, http } from 'viem';
|
|
90
|
+
import { type GetContractReturnType, type Hex, createPublicClient, fallback, http } from 'viem';
|
|
78
91
|
|
|
79
92
|
import type { ArchiverDataStore, ArchiverL1SynchPoint } from './archiver_store.js';
|
|
80
93
|
import type { ArchiverConfig } from './config.js';
|
|
94
|
+
import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
|
|
95
|
+
import { ArchiverInstrumentation } from './instrumentation.js';
|
|
96
|
+
import type { CheckpointData } from './kv_archiver_store/block_store.js';
|
|
81
97
|
import {
|
|
82
|
-
|
|
98
|
+
retrieveCheckpointsFromRollup,
|
|
83
99
|
retrieveL1ToL2Message,
|
|
84
100
|
retrieveL1ToL2Messages,
|
|
85
|
-
|
|
86
|
-
} from './data_retrieval.js';
|
|
87
|
-
import {
|
|
88
|
-
import { ArchiverInstrumentation } from './instrumentation.js';
|
|
101
|
+
retrievedToPublishedCheckpoint,
|
|
102
|
+
} from './l1/data_retrieval.js';
|
|
103
|
+
import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
|
|
89
104
|
import type { InboxMessage } from './structs/inbox_message.js';
|
|
90
|
-
import type
|
|
91
|
-
import { type ValidateBlockResult, validateBlockAttestations } from './validation.js';
|
|
105
|
+
import { type ValidateBlockResult, validateCheckpointAttestations } from './validation.js';
|
|
92
106
|
|
|
93
107
|
/**
|
|
94
108
|
* Helper interface to combine all sources this archiver implementation provides.
|
|
95
109
|
*/
|
|
96
110
|
export type ArchiveSource = L2BlockSource & L2LogsSource & ContractDataSource & L1ToL2MessageSource;
|
|
97
111
|
|
|
112
|
+
/** Request to add a block to the archiver, queued for processing by the sync loop. */
|
|
113
|
+
type AddBlockRequest = {
|
|
114
|
+
block: L2BlockNew;
|
|
115
|
+
resolve: () => void;
|
|
116
|
+
reject: (err: Error) => void;
|
|
117
|
+
};
|
|
118
|
+
|
|
98
119
|
export type ArchiverDeps = {
|
|
99
120
|
telemetry?: TelemetryClient;
|
|
100
|
-
|
|
121
|
+
blobClient: BlobClientInterface;
|
|
101
122
|
epochCache?: EpochCache;
|
|
102
123
|
dateProvider?: DateProvider;
|
|
103
124
|
};
|
|
104
125
|
|
|
126
|
+
function mapArchiverConfig(config: Partial<ArchiverConfig>) {
|
|
127
|
+
return {
|
|
128
|
+
pollingIntervalMs: config.archiverPollingIntervalMS,
|
|
129
|
+
batchSize: config.archiverBatchSize,
|
|
130
|
+
skipValidateBlockAttestations: config.skipValidateBlockAttestations,
|
|
131
|
+
maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds,
|
|
132
|
+
ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts,
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
type RollupStatus = {
|
|
137
|
+
provenCheckpointNumber: CheckpointNumber;
|
|
138
|
+
provenArchive: Hex;
|
|
139
|
+
pendingCheckpointNumber: CheckpointNumber;
|
|
140
|
+
pendingArchive: Hex;
|
|
141
|
+
validationResult: ValidateBlockResult | undefined;
|
|
142
|
+
lastRetrievedCheckpoint?: PublishedCheckpoint;
|
|
143
|
+
lastL1BlockWithCheckpoint?: bigint;
|
|
144
|
+
};
|
|
145
|
+
|
|
105
146
|
/**
|
|
106
|
-
* Pulls
|
|
147
|
+
* Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
|
|
107
148
|
* Responsible for handling robust L1 polling so that other components do not need to
|
|
108
149
|
* concern themselves with it.
|
|
109
150
|
*/
|
|
110
|
-
export class Archiver
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
151
|
+
export class Archiver
|
|
152
|
+
extends (EventEmitter as new () => ArchiverEmitter)
|
|
153
|
+
implements ArchiveSource, L2BlockSink, Traceable
|
|
154
|
+
{
|
|
155
|
+
/** A loop in which we will be continually fetching new checkpoints. */
|
|
156
|
+
private runningPromise: RunningPromise;
|
|
115
157
|
|
|
116
158
|
private rollup: RollupContract;
|
|
117
159
|
private inbox: InboxContract;
|
|
@@ -120,14 +162,18 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
120
162
|
|
|
121
163
|
private l1BlockNumber: bigint | undefined;
|
|
122
164
|
private l1Timestamp: bigint | undefined;
|
|
123
|
-
private pendingChainValidationStatus: ValidateBlockResult = { valid: true };
|
|
124
165
|
private initialSyncComplete: boolean = false;
|
|
166
|
+
private initialSyncPromise: PromiseWithResolvers<void>;
|
|
167
|
+
|
|
168
|
+
/** Queue of blocks to be added to the store, processed by the sync loop. */
|
|
169
|
+
private blockQueue: AddBlockRequest[] = [];
|
|
125
170
|
|
|
126
171
|
public readonly tracer: Tracer;
|
|
127
172
|
|
|
128
173
|
/**
|
|
129
174
|
* Creates a new instance of the Archiver.
|
|
130
175
|
* @param publicClient - A client for interacting with the Ethereum node.
|
|
176
|
+
* @param debugClient - A client for interacting with the Ethereum node for debug/trace methods.
|
|
131
177
|
* @param rollupAddress - Ethereum address of the rollup contract.
|
|
132
178
|
* @param inboxAddress - Ethereum address of the inbox contract.
|
|
133
179
|
* @param registryAddress - Ethereum address of the registry contract.
|
|
@@ -137,13 +183,24 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
137
183
|
*/
|
|
138
184
|
constructor(
|
|
139
185
|
private readonly publicClient: ViemPublicClient,
|
|
140
|
-
private readonly
|
|
186
|
+
private readonly debugClient: ViemPublicDebugClient,
|
|
187
|
+
private readonly l1Addresses: Pick<
|
|
188
|
+
L1ContractAddresses,
|
|
189
|
+
'rollupAddress' | 'inboxAddress' | 'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress'
|
|
190
|
+
> & { slashingProposerAddress: EthAddress },
|
|
141
191
|
readonly dataStore: ArchiverDataStore,
|
|
142
|
-
private
|
|
143
|
-
|
|
192
|
+
private config: {
|
|
193
|
+
pollingIntervalMs: number;
|
|
194
|
+
batchSize: number;
|
|
195
|
+
skipValidateBlockAttestations?: boolean;
|
|
196
|
+
maxAllowedEthClientDriftSeconds: number;
|
|
197
|
+
ethereumAllowNoDebugHosts?: boolean;
|
|
198
|
+
},
|
|
199
|
+
private readonly blobClient: BlobClientInterface,
|
|
144
200
|
private readonly epochCache: EpochCache,
|
|
201
|
+
private readonly dateProvider: DateProvider,
|
|
145
202
|
private readonly instrumentation: ArchiverInstrumentation,
|
|
146
|
-
private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32 },
|
|
203
|
+
private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
|
|
147
204
|
private readonly log: Logger = createLogger('archiver'),
|
|
148
205
|
) {
|
|
149
206
|
super();
|
|
@@ -153,6 +210,16 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
153
210
|
|
|
154
211
|
this.rollup = new RollupContract(publicClient, l1Addresses.rollupAddress);
|
|
155
212
|
this.inbox = new InboxContract(publicClient, l1Addresses.inboxAddress);
|
|
213
|
+
this.initialSyncPromise = promiseWithResolvers();
|
|
214
|
+
|
|
215
|
+
// Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
|
|
216
|
+
// are done as fast as possible. This then gets updated once the initial sync completes.
|
|
217
|
+
this.runningPromise = new RunningPromise(
|
|
218
|
+
() => this.sync(),
|
|
219
|
+
this.log,
|
|
220
|
+
this.config.pollingIntervalMs / 10,
|
|
221
|
+
makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError),
|
|
222
|
+
);
|
|
156
223
|
}
|
|
157
224
|
|
|
158
225
|
/**
|
|
@@ -171,17 +238,28 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
171
238
|
const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId);
|
|
172
239
|
const publicClient = createPublicClient({
|
|
173
240
|
chain: chain.chainInfo,
|
|
174
|
-
transport: fallback(config.l1RpcUrls.map(url => http(url))),
|
|
241
|
+
transport: fallback(config.l1RpcUrls.map(url => http(url, { batch: false }))),
|
|
175
242
|
pollingInterval: config.viemPollingIntervalMS,
|
|
176
243
|
});
|
|
177
244
|
|
|
245
|
+
// Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs
|
|
246
|
+
const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls;
|
|
247
|
+
const debugClient = createPublicClient({
|
|
248
|
+
chain: chain.chainInfo,
|
|
249
|
+
transport: fallback(debugRpcUrls.map(url => http(url, { batch: false }))),
|
|
250
|
+
pollingInterval: config.viemPollingIntervalMS,
|
|
251
|
+
}) as ViemPublicDebugClient;
|
|
252
|
+
|
|
178
253
|
const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress);
|
|
179
254
|
|
|
180
|
-
const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs] =
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
255
|
+
const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] =
|
|
256
|
+
await Promise.all([
|
|
257
|
+
rollup.getL1StartBlock(),
|
|
258
|
+
rollup.getL1GenesisTime(),
|
|
259
|
+
rollup.getProofSubmissionEpochs(),
|
|
260
|
+
rollup.getGenesisArchiveTreeRoot(),
|
|
261
|
+
rollup.getSlashingProposerAddress(),
|
|
262
|
+
] as const);
|
|
185
263
|
|
|
186
264
|
const l1StartBlockHash = await publicClient
|
|
187
265
|
.getBlock({ blockNumber: l1StartBlock, includeTransactions: false })
|
|
@@ -197,23 +275,31 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
197
275
|
slotDuration,
|
|
198
276
|
ethereumSlotDuration,
|
|
199
277
|
proofSubmissionEpochs: Number(proofSubmissionEpochs),
|
|
278
|
+
genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()),
|
|
200
279
|
};
|
|
201
280
|
|
|
202
|
-
const opts =
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
281
|
+
const opts = merge(
|
|
282
|
+
{
|
|
283
|
+
pollingIntervalMs: 10_000,
|
|
284
|
+
batchSize: 100,
|
|
285
|
+
maxAllowedEthClientDriftSeconds: 300,
|
|
286
|
+
ethereumAllowNoDebugHosts: false,
|
|
287
|
+
},
|
|
288
|
+
mapArchiverConfig(config),
|
|
289
|
+
);
|
|
206
290
|
|
|
207
291
|
const epochCache = deps.epochCache ?? (await EpochCache.create(config.l1Contracts.rollupAddress, config, deps));
|
|
208
292
|
const telemetry = deps.telemetry ?? getTelemetryClient();
|
|
209
293
|
|
|
210
294
|
const archiver = new Archiver(
|
|
211
295
|
publicClient,
|
|
212
|
-
|
|
296
|
+
debugClient,
|
|
297
|
+
{ ...config.l1Contracts, slashingProposerAddress },
|
|
213
298
|
archiverStore,
|
|
214
299
|
opts,
|
|
215
|
-
deps.
|
|
300
|
+
deps.blobClient,
|
|
216
301
|
epochCache,
|
|
302
|
+
deps.dateProvider ?? new DateProvider(),
|
|
217
303
|
await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()),
|
|
218
304
|
l1Constants,
|
|
219
305
|
);
|
|
@@ -221,67 +307,109 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
221
307
|
return archiver;
|
|
222
308
|
}
|
|
223
309
|
|
|
310
|
+
/** Updates archiver config */
|
|
311
|
+
public updateConfig(newConfig: Partial<ArchiverConfig>) {
|
|
312
|
+
this.config = merge(this.config, mapArchiverConfig(newConfig));
|
|
313
|
+
}
|
|
314
|
+
|
|
224
315
|
/**
|
|
225
316
|
* Starts sync process.
|
|
226
317
|
* @param blockUntilSynced - If true, blocks until the archiver has fully synced.
|
|
227
318
|
*/
|
|
228
319
|
public async start(blockUntilSynced: boolean): Promise<void> {
|
|
229
|
-
if (this.runningPromise) {
|
|
320
|
+
if (this.runningPromise.isRunning()) {
|
|
230
321
|
throw new Error('Archiver is already running');
|
|
231
322
|
}
|
|
232
323
|
|
|
233
|
-
await this.
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
this.log,
|
|
245
|
-
this.config.pollingIntervalMs,
|
|
246
|
-
makeLoggingErrorHandler(
|
|
247
|
-
this.log,
|
|
248
|
-
// Ignored errors will not log to the console
|
|
249
|
-
// We ignore NoBlobBodiesFound as the message may not have been passed to the blob sink yet
|
|
250
|
-
NoBlobBodiesFoundError,
|
|
251
|
-
),
|
|
324
|
+
await this.blobClient.testSources();
|
|
325
|
+
await this.testEthereumNodeSynced();
|
|
326
|
+
await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
|
|
327
|
+
|
|
328
|
+
// Log initial state for the archiver
|
|
329
|
+
const { l1StartBlock } = this.l1constants;
|
|
330
|
+
const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
|
|
331
|
+
const currentL2Checkpoint = await this.getSynchedCheckpointNumber();
|
|
332
|
+
this.log.info(
|
|
333
|
+
`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`,
|
|
334
|
+
{ blocksSynchedTo, messagesSynchedTo, currentL2Checkpoint },
|
|
252
335
|
);
|
|
253
336
|
|
|
337
|
+
// Start sync loop, and return the wait for initial sync if we are asked to block until synced
|
|
254
338
|
this.runningPromise.start();
|
|
339
|
+
if (blockUntilSynced) {
|
|
340
|
+
return this.waitForInitialSync();
|
|
341
|
+
}
|
|
255
342
|
}
|
|
256
343
|
|
|
257
344
|
public syncImmediate() {
|
|
258
|
-
if (!this.runningPromise) {
|
|
259
|
-
throw new Error('Archiver is not running');
|
|
260
|
-
}
|
|
261
345
|
return this.runningPromise.trigger();
|
|
262
346
|
}
|
|
263
347
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
348
|
+
/**
|
|
349
|
+
* Queues a block to be added to the archiver store and triggers processing.
|
|
350
|
+
* The block will be processed by the sync loop.
|
|
351
|
+
* Implements the L2BlockSink interface.
|
|
352
|
+
* @param block - The L2 block to add.
|
|
353
|
+
* @returns A promise that resolves when the block has been added to the store, or rejects on error.
|
|
354
|
+
*/
|
|
355
|
+
public addBlock(block: L2BlockNew): Promise<void> {
|
|
356
|
+
return new Promise<void>((resolve, reject) => {
|
|
357
|
+
this.blockQueue.push({ block, resolve, reject });
|
|
358
|
+
this.log.debug(`Queued block ${block.number} for processing`);
|
|
359
|
+
// Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed
|
|
360
|
+
this.syncImmediate().catch(err => {
|
|
361
|
+
this.log.error(`Sync immediate call failed: ${err}`);
|
|
362
|
+
});
|
|
363
|
+
});
|
|
278
364
|
}
|
|
279
365
|
|
|
280
366
|
/**
|
|
281
|
-
*
|
|
367
|
+
* Processes all queued blocks, adding them to the store.
|
|
368
|
+
* Called at the beginning of each sync iteration.
|
|
369
|
+
* Blocks are processed in the order they were queued.
|
|
282
370
|
*/
|
|
283
|
-
|
|
284
|
-
|
|
371
|
+
private async processQueuedBlocks(): Promise<void> {
|
|
372
|
+
if (this.blockQueue.length === 0) {
|
|
373
|
+
return;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// Take all blocks from the queue
|
|
377
|
+
const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
|
|
378
|
+
this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
|
|
379
|
+
|
|
380
|
+
// Process each block individually to properly resolve/reject each promise
|
|
381
|
+
for (const { block, resolve, reject } of queuedItems) {
|
|
382
|
+
try {
|
|
383
|
+
await this.store.addBlocks([block]);
|
|
384
|
+
this.log.debug(`Added block ${block.number} to store`);
|
|
385
|
+
resolve();
|
|
386
|
+
} catch (err: any) {
|
|
387
|
+
this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
|
|
388
|
+
reject(err);
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
public waitForInitialSync() {
|
|
394
|
+
return this.initialSyncPromise.promise;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
/** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */
|
|
398
|
+
private async testEthereumNodeSynced() {
|
|
399
|
+
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
400
|
+
if (maxAllowedDelay === 0) {
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
const { number, timestamp: l1Timestamp } = await this.publicClient.getBlock({ includeTransactions: false });
|
|
404
|
+
const currentTime = BigInt(this.dateProvider.nowInSeconds());
|
|
405
|
+
if (currentTime - l1Timestamp > BigInt(maxAllowedDelay)) {
|
|
406
|
+
throw new Error(
|
|
407
|
+
`Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`,
|
|
408
|
+
);
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
private async syncFromL1() {
|
|
285
413
|
/**
|
|
286
414
|
* We keep track of three "pointers" to L1 blocks:
|
|
287
415
|
* 1. the last L1 block that published an L2 block
|
|
@@ -291,8 +419,6 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
291
419
|
* We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
|
|
292
420
|
* We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
|
|
293
421
|
* The archiver will stay back, until there's data on L1 that will move the pointers forward.
|
|
294
|
-
*
|
|
295
|
-
* This code does not handle reorgs.
|
|
296
422
|
*/
|
|
297
423
|
const { l1StartBlock, l1StartBlockHash } = this.l1constants;
|
|
298
424
|
const {
|
|
@@ -304,13 +430,12 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
304
430
|
const currentL1BlockNumber = currentL1Block.number;
|
|
305
431
|
const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
|
|
306
432
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
}
|
|
433
|
+
this.log.trace(`Starting new archiver sync iteration`, {
|
|
434
|
+
blocksSynchedTo,
|
|
435
|
+
messagesSynchedTo,
|
|
436
|
+
currentL1BlockNumber,
|
|
437
|
+
currentL1BlockHash,
|
|
438
|
+
});
|
|
314
439
|
|
|
315
440
|
// ********** Ensuring Consistency of data pulled from L1 **********
|
|
316
441
|
|
|
@@ -340,37 +465,45 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
340
465
|
? (await this.publicClient.getBlock({ blockNumber: currentL1BlockNumber })).timestamp
|
|
341
466
|
: this.l1Timestamp;
|
|
342
467
|
|
|
343
|
-
//
|
|
468
|
+
// Warn if the latest L1 block timestamp is too old
|
|
469
|
+
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
470
|
+
const now = this.dateProvider.nowInSeconds();
|
|
471
|
+
if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
|
|
472
|
+
this.log.warn(
|
|
473
|
+
`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`,
|
|
474
|
+
{ currentL1BlockNumber, currentL1Timestamp, now, maxAllowedDelay },
|
|
475
|
+
);
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
// ********** Events that are processed per checkpoint **********
|
|
344
479
|
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
345
|
-
// First we retrieve new L2 blocks
|
|
346
|
-
|
|
480
|
+
// First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
|
|
481
|
+
// pending chain validation status, proven checkpoint number, and synched L1 block number.
|
|
482
|
+
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
|
|
347
483
|
// Then we prune the current epoch if it'd reorg on next submission.
|
|
348
|
-
// Note that we don't do this before retrieving
|
|
349
|
-
//
|
|
484
|
+
// Note that we don't do this before retrieving checkpoints because we may need to retrieve
|
|
485
|
+
// checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
|
|
350
486
|
// the chain locally before we start unwinding stuff. This can be optimized by figuring out
|
|
351
|
-
// up to which point we're pruning, and then requesting
|
|
487
|
+
// up to which point we're pruning, and then requesting checkpoints up to that point only.
|
|
352
488
|
const { rollupCanPrune } = await this.handleEpochPrune(
|
|
353
|
-
rollupStatus.
|
|
489
|
+
rollupStatus.provenCheckpointNumber,
|
|
354
490
|
currentL1BlockNumber,
|
|
355
491
|
currentL1Timestamp,
|
|
356
492
|
);
|
|
357
493
|
|
|
358
|
-
//
|
|
359
|
-
//
|
|
360
|
-
// we
|
|
361
|
-
if (
|
|
362
|
-
rollupStatus.
|
|
363
|
-
rollupStatus.validationResult?.valid !== this.pendingChainValidationStatus.valid
|
|
364
|
-
) {
|
|
365
|
-
this.pendingChainValidationStatus = rollupStatus.validationResult;
|
|
494
|
+
// If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
|
|
495
|
+
// past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
|
|
496
|
+
// we get a valid checkpoint to advance the syncpoint.
|
|
497
|
+
if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
|
|
498
|
+
await this.store.setCheckpointSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
|
|
366
499
|
}
|
|
367
500
|
|
|
368
|
-
// And lastly we check if we are missing any
|
|
501
|
+
// And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
|
|
369
502
|
// We only do this if rollup cant prune on the next submission. Otherwise we will end up
|
|
370
|
-
// re-syncing the
|
|
503
|
+
// re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
|
|
371
504
|
// since the archiver will rightfully refuse to sync up to it.
|
|
372
|
-
if (!rollupCanPrune &&
|
|
373
|
-
await this.
|
|
505
|
+
if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
|
|
506
|
+
await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
|
|
374
507
|
}
|
|
375
508
|
|
|
376
509
|
this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
|
|
@@ -381,17 +514,32 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
381
514
|
// but the corresponding blocks have not been processed (see #12631).
|
|
382
515
|
this.l1Timestamp = currentL1Timestamp;
|
|
383
516
|
this.l1BlockNumber = currentL1BlockNumber;
|
|
384
|
-
this.initialSyncComplete = true;
|
|
385
517
|
|
|
386
|
-
|
|
387
|
-
|
|
518
|
+
// We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace)
|
|
519
|
+
// so if the initial sync took too long, we still go for another iteration.
|
|
520
|
+
if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= (await this.publicClient.getBlockNumber())) {
|
|
521
|
+
this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, {
|
|
388
522
|
l1BlockNumber: currentL1BlockNumber,
|
|
389
523
|
syncPoint: await this.store.getSynchPoint(),
|
|
390
524
|
...(await this.getL2Tips()),
|
|
391
525
|
});
|
|
526
|
+
this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs);
|
|
527
|
+
this.initialSyncComplete = true;
|
|
528
|
+
this.initialSyncPromise.resolve();
|
|
392
529
|
}
|
|
393
530
|
}
|
|
394
531
|
|
|
532
|
+
/**
|
|
533
|
+
* Fetches logs from L1 contracts and processes them.
|
|
534
|
+
*/
|
|
535
|
+
@trackSpan('Archiver.sync')
|
|
536
|
+
private async sync() {
|
|
537
|
+
// Process any queued blocks first, before doing L1 sync
|
|
538
|
+
await this.processQueuedBlocks();
|
|
539
|
+
// Now perform L1 sync
|
|
540
|
+
await this.syncFromL1();
|
|
541
|
+
}
|
|
542
|
+
|
|
395
543
|
/** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */
|
|
396
544
|
private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint) {
|
|
397
545
|
const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
|
|
@@ -406,27 +554,44 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
406
554
|
return result;
|
|
407
555
|
}
|
|
408
556
|
|
|
409
|
-
/** Checks if there'd be a reorg for the next
|
|
410
|
-
private async handleEpochPrune(
|
|
557
|
+
/** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */
|
|
558
|
+
private async handleEpochPrune(
|
|
559
|
+
provenCheckpointNumber: CheckpointNumber,
|
|
560
|
+
currentL1BlockNumber: bigint,
|
|
561
|
+
currentL1Timestamp: bigint,
|
|
562
|
+
) {
|
|
411
563
|
const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
|
|
412
|
-
const
|
|
413
|
-
const canPrune =
|
|
564
|
+
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
565
|
+
const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
|
|
414
566
|
|
|
415
567
|
if (canPrune) {
|
|
416
568
|
const timer = new Timer();
|
|
417
|
-
const pruneFrom =
|
|
569
|
+
const pruneFrom = CheckpointNumber(provenCheckpointNumber + 1);
|
|
418
570
|
|
|
419
|
-
const header = await this.
|
|
571
|
+
const header = await this.getCheckpointHeader(pruneFrom);
|
|
420
572
|
if (header === undefined) {
|
|
421
|
-
throw new Error(`Missing
|
|
573
|
+
throw new Error(`Missing checkpoint header ${pruneFrom}`);
|
|
422
574
|
}
|
|
423
575
|
|
|
424
|
-
const pruneFromSlotNumber = header.
|
|
425
|
-
const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
|
|
576
|
+
const pruneFromSlotNumber = header.slotNumber;
|
|
577
|
+
const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
|
|
578
|
+
|
|
579
|
+
const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
|
|
426
580
|
|
|
427
|
-
const
|
|
581
|
+
const checkpointPromises = Array.from({ length: checkpointsToUnwind })
|
|
582
|
+
.fill(0)
|
|
583
|
+
.map((_, i) => this.store.getCheckpointData(CheckpointNumber(i + pruneFrom)));
|
|
584
|
+
const checkpoints = await Promise.all(checkpointPromises);
|
|
585
|
+
|
|
586
|
+
const blockPromises = await Promise.all(
|
|
587
|
+
checkpoints
|
|
588
|
+
.filter(isDefined)
|
|
589
|
+
.map(cp => this.store.getBlocksForCheckpoint(CheckpointNumber(cp.checkpointNumber))),
|
|
590
|
+
);
|
|
591
|
+
const newBlocks = blockPromises.filter(isDefined).flat();
|
|
428
592
|
|
|
429
|
-
|
|
593
|
+
// TODO(pw/mbps): Don't convert to legacy blocks here
|
|
594
|
+
const blocks: L2Block[] = (await Promise.all(newBlocks.map(x => this.getBlock(x.number)))).filter(isDefined);
|
|
430
595
|
|
|
431
596
|
// Emit an event for listening services to react to the chain prune
|
|
432
597
|
this.emit(L2BlockSourceEvents.L2PruneDetected, {
|
|
@@ -436,13 +601,13 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
436
601
|
});
|
|
437
602
|
|
|
438
603
|
this.log.debug(
|
|
439
|
-
`L2 prune from ${
|
|
604
|
+
`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`,
|
|
440
605
|
);
|
|
441
|
-
await this.
|
|
606
|
+
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
442
607
|
this.log.warn(
|
|
443
|
-
`Unwound ${count(
|
|
444
|
-
`to ${
|
|
445
|
-
`Updated
|
|
608
|
+
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
609
|
+
`to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
|
|
610
|
+
`Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
446
611
|
);
|
|
447
612
|
this.instrumentation.processPrune(timer.ms());
|
|
448
613
|
// TODO(palla/reorg): Do we need to set the block synched L1 block number here?
|
|
@@ -489,7 +654,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
489
654
|
remoteMessagesState.totalMessagesInserted === localMessagesInserted &&
|
|
490
655
|
remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer16.ZERO)
|
|
491
656
|
) {
|
|
492
|
-
this.log.
|
|
657
|
+
this.log.trace(
|
|
493
658
|
`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`,
|
|
494
659
|
);
|
|
495
660
|
return;
|
|
@@ -543,7 +708,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
543
708
|
// Log stats for messages retrieved (if any).
|
|
544
709
|
if (messageCount > 0) {
|
|
545
710
|
this.log.info(
|
|
546
|
-
`Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for
|
|
711
|
+
`Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for checkpoint ${lastMessage?.checkpointNumber}`,
|
|
547
712
|
{ lastMessage, messageCount },
|
|
548
713
|
);
|
|
549
714
|
}
|
|
@@ -621,202 +786,241 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
621
786
|
return Buffer32.fromString(block.hash);
|
|
622
787
|
}
|
|
623
788
|
|
|
624
|
-
private async
|
|
625
|
-
const
|
|
626
|
-
const
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
provenBlockNumber: Number(provenBlockNumber),
|
|
789
|
+
private async handleCheckpoints(blocksSynchedTo: bigint, currentL1BlockNumber: bigint): Promise<RollupStatus> {
|
|
790
|
+
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
791
|
+
const initialValidationResult: ValidateBlockResult | undefined = await this.store.getPendingChainValidationStatus();
|
|
792
|
+
const {
|
|
793
|
+
provenCheckpointNumber,
|
|
630
794
|
provenArchive,
|
|
631
|
-
|
|
795
|
+
pendingCheckpointNumber,
|
|
632
796
|
pendingArchive,
|
|
633
|
-
|
|
797
|
+
archiveOfMyCheckpoint: archiveForLocalPendingCheckpointNumber,
|
|
798
|
+
} = await this.rollup.status(localPendingCheckpointNumber, { blockNumber: currentL1BlockNumber });
|
|
799
|
+
const rollupStatus: RollupStatus = {
|
|
800
|
+
provenCheckpointNumber,
|
|
801
|
+
provenArchive: provenArchive.toString(),
|
|
802
|
+
pendingCheckpointNumber,
|
|
803
|
+
pendingArchive: pendingArchive.toString(),
|
|
804
|
+
validationResult: initialValidationResult,
|
|
634
805
|
};
|
|
635
806
|
this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
|
|
636
|
-
|
|
807
|
+
localPendingCheckpointNumber,
|
|
637
808
|
blocksSynchedTo,
|
|
638
809
|
currentL1BlockNumber,
|
|
639
|
-
|
|
810
|
+
archiveForLocalPendingCheckpointNumber,
|
|
640
811
|
...rollupStatus,
|
|
641
812
|
});
|
|
642
813
|
|
|
643
|
-
const
|
|
644
|
-
// Annoying edge case: if proven
|
|
645
|
-
// we need to set it to zero. This is an edge case because we dont have a
|
|
646
|
-
// so
|
|
647
|
-
if (
|
|
648
|
-
const
|
|
649
|
-
if (
|
|
650
|
-
await this.
|
|
651
|
-
this.log.info(`Rolled back proven chain to
|
|
814
|
+
const updateProvenCheckpoint = async () => {
|
|
815
|
+
// Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
|
|
816
|
+
// we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
|
|
817
|
+
// so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
|
|
818
|
+
if (provenCheckpointNumber === 0) {
|
|
819
|
+
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
820
|
+
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
821
|
+
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
822
|
+
this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
652
823
|
}
|
|
653
824
|
}
|
|
654
825
|
|
|
655
|
-
const
|
|
826
|
+
const localCheckpointForDestinationProvenCheckpointNumber =
|
|
827
|
+
await this.store.getCheckpointData(provenCheckpointNumber);
|
|
656
828
|
|
|
657
|
-
// Sanity check. I've hit what seems to be a state where the proven
|
|
658
|
-
// synched
|
|
659
|
-
const synched = await this.
|
|
660
|
-
if (
|
|
829
|
+
// Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
|
|
830
|
+
// synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
|
|
831
|
+
const synched = await this.getSynchedCheckpointNumber();
|
|
832
|
+
if (
|
|
833
|
+
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
834
|
+
synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber
|
|
835
|
+
) {
|
|
661
836
|
this.log.error(
|
|
662
|
-
`Hit local
|
|
837
|
+
`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber} > ${synched}`,
|
|
663
838
|
);
|
|
664
839
|
}
|
|
665
840
|
|
|
666
841
|
this.log.trace(
|
|
667
|
-
`Local
|
|
668
|
-
|
|
842
|
+
`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${
|
|
843
|
+
localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'
|
|
669
844
|
}`,
|
|
670
845
|
);
|
|
671
846
|
|
|
672
847
|
if (
|
|
673
|
-
|
|
674
|
-
provenArchive
|
|
848
|
+
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
849
|
+
provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root)
|
|
675
850
|
) {
|
|
676
|
-
const
|
|
677
|
-
if (
|
|
678
|
-
await this.
|
|
679
|
-
this.log.info(`Updated proven chain to
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
const
|
|
683
|
-
|
|
684
|
-
|
|
851
|
+
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
852
|
+
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
853
|
+
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
854
|
+
this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
855
|
+
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
|
|
856
|
+
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
|
|
857
|
+
const lastBlockNumberInCheckpoint =
|
|
858
|
+
localCheckpointForDestinationProvenCheckpointNumber.startBlock +
|
|
859
|
+
localCheckpointForDestinationProvenCheckpointNumber.numBlocks -
|
|
860
|
+
1;
|
|
861
|
+
|
|
685
862
|
this.emit(L2BlockSourceEvents.L2BlockProven, {
|
|
686
863
|
type: L2BlockSourceEvents.L2BlockProven,
|
|
687
|
-
blockNumber:
|
|
864
|
+
blockNumber: BlockNumber(lastBlockNumberInCheckpoint),
|
|
688
865
|
slotNumber: provenSlotNumber,
|
|
689
866
|
epochNumber: provenEpochNumber,
|
|
690
867
|
});
|
|
868
|
+
this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint);
|
|
691
869
|
} else {
|
|
692
|
-
this.log.trace(`Proven
|
|
870
|
+
this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
|
|
693
871
|
}
|
|
694
872
|
}
|
|
695
|
-
this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
|
|
696
873
|
};
|
|
697
874
|
|
|
698
|
-
// This is an edge case that we only hit if there are no proposed
|
|
699
|
-
// If we have 0
|
|
700
|
-
const
|
|
701
|
-
if (
|
|
702
|
-
await this.store.
|
|
875
|
+
// This is an edge case that we only hit if there are no proposed checkpoints.
|
|
876
|
+
// If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
|
|
877
|
+
const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
|
|
878
|
+
if (noCheckpoints) {
|
|
879
|
+
await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
|
|
703
880
|
this.log.debug(
|
|
704
|
-
`No
|
|
881
|
+
`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`,
|
|
705
882
|
);
|
|
706
883
|
return rollupStatus;
|
|
707
884
|
}
|
|
708
885
|
|
|
709
|
-
await
|
|
886
|
+
await updateProvenCheckpoint();
|
|
710
887
|
|
|
711
888
|
// Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
|
|
712
|
-
// are any state that could be impacted by it. If we have no
|
|
713
|
-
if (
|
|
714
|
-
const
|
|
715
|
-
if (
|
|
716
|
-
throw new Error(`Missing
|
|
889
|
+
// are any state that could be impacted by it. If we have no checkpoints, there is no impact.
|
|
890
|
+
if (localPendingCheckpointNumber > 0) {
|
|
891
|
+
const localPendingCheckpoint = await this.store.getCheckpointData(localPendingCheckpointNumber);
|
|
892
|
+
if (localPendingCheckpoint === undefined) {
|
|
893
|
+
throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
|
|
717
894
|
}
|
|
718
895
|
|
|
719
|
-
const localPendingArchiveRoot =
|
|
720
|
-
const
|
|
721
|
-
if (
|
|
896
|
+
const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
|
|
897
|
+
const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive.toString() === localPendingArchiveRoot;
|
|
898
|
+
if (noCheckpointSinceLast) {
|
|
722
899
|
// We believe the following line causes a problem when we encounter L1 re-orgs.
|
|
723
900
|
// Basically, by setting the synched L1 block number here, we are saying that we have
|
|
724
|
-
// processed all
|
|
901
|
+
// processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
|
|
725
902
|
// this block again (or any blocks before).
|
|
726
|
-
// However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing
|
|
903
|
+
// However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
|
|
727
904
|
// We must only set this block number based on actually retrieved logs.
|
|
728
905
|
// TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
|
|
729
906
|
// await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
|
|
730
|
-
this.log.debug(`No
|
|
907
|
+
this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
|
|
731
908
|
return rollupStatus;
|
|
732
909
|
}
|
|
733
910
|
|
|
734
|
-
const
|
|
735
|
-
|
|
736
|
-
|
|
911
|
+
const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber.equals(
|
|
912
|
+
localPendingCheckpoint.archive.root,
|
|
913
|
+
);
|
|
914
|
+
if (!localPendingCheckpointInChain) {
|
|
915
|
+
// If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
|
|
737
916
|
// or the L1 have reorged.
|
|
738
917
|
// In any case, we have to figure out how far into the past the action will take us.
|
|
739
|
-
// For simplicity here, we will simply rewind until we end in a
|
|
918
|
+
// For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
|
|
740
919
|
this.log.debug(
|
|
741
|
-
`L2 prune has been detected due to local pending
|
|
742
|
-
{
|
|
920
|
+
`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`,
|
|
921
|
+
{ localPendingCheckpointNumber, localPendingArchiveRoot, archiveForLocalPendingCheckpointNumber },
|
|
743
922
|
);
|
|
744
923
|
|
|
745
|
-
let tipAfterUnwind =
|
|
924
|
+
let tipAfterUnwind = localPendingCheckpointNumber;
|
|
746
925
|
while (true) {
|
|
747
|
-
const
|
|
748
|
-
if (
|
|
926
|
+
const candidateCheckpoint = await this.store.getCheckpointData(tipAfterUnwind);
|
|
927
|
+
if (candidateCheckpoint === undefined) {
|
|
749
928
|
break;
|
|
750
929
|
}
|
|
751
930
|
|
|
752
|
-
const archiveAtContract = await this.rollup.archiveAt(
|
|
753
|
-
|
|
754
|
-
|
|
931
|
+
const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.checkpointNumber);
|
|
932
|
+
this.log.trace(
|
|
933
|
+
`Checking local checkpoint ${candidateCheckpoint.checkpointNumber} with archive ${candidateCheckpoint.archive.root}`,
|
|
934
|
+
{
|
|
935
|
+
archiveAtContract,
|
|
936
|
+
archiveLocal: candidateCheckpoint.archive.root.toString(),
|
|
937
|
+
},
|
|
938
|
+
);
|
|
939
|
+
if (archiveAtContract.equals(candidateCheckpoint.archive.root)) {
|
|
755
940
|
break;
|
|
756
941
|
}
|
|
757
942
|
tipAfterUnwind--;
|
|
758
943
|
}
|
|
759
944
|
|
|
760
|
-
const
|
|
761
|
-
await this.
|
|
945
|
+
const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
|
|
946
|
+
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
762
947
|
|
|
763
948
|
this.log.warn(
|
|
764
|
-
`Unwound ${count(
|
|
765
|
-
`due to mismatched
|
|
766
|
-
`Updated L2 latest
|
|
949
|
+
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
950
|
+
`due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` +
|
|
951
|
+
`Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
767
952
|
);
|
|
768
953
|
}
|
|
769
954
|
}
|
|
770
955
|
|
|
771
|
-
// Retrieve
|
|
956
|
+
// Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
|
|
772
957
|
// computed using the L2 block time vs the L1 block time.
|
|
773
958
|
let searchStartBlock: bigint = blocksSynchedTo;
|
|
774
959
|
let searchEndBlock: bigint = blocksSynchedTo;
|
|
775
|
-
let
|
|
960
|
+
let lastRetrievedCheckpoint: PublishedCheckpoint | undefined;
|
|
961
|
+
let lastL1BlockWithCheckpoint: bigint | undefined = undefined;
|
|
776
962
|
|
|
777
963
|
do {
|
|
778
964
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
779
965
|
|
|
780
|
-
this.log.trace(`Retrieving
|
|
966
|
+
this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
781
967
|
|
|
782
|
-
// TODO(md): Retrieve from blob
|
|
783
|
-
const
|
|
968
|
+
// TODO(md): Retrieve from blob client then from consensus client, then from peers
|
|
969
|
+
const retrievedCheckpoints = await retrieveCheckpointsFromRollup(
|
|
784
970
|
this.rollup.getContract() as GetContractReturnType<typeof RollupAbi, ViemPublicClient>,
|
|
785
971
|
this.publicClient,
|
|
786
|
-
this.
|
|
972
|
+
this.debugClient,
|
|
973
|
+
this.blobClient,
|
|
787
974
|
searchStartBlock, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier
|
|
788
975
|
searchEndBlock,
|
|
976
|
+
this.l1Addresses,
|
|
977
|
+
this.instrumentation,
|
|
789
978
|
this.log,
|
|
979
|
+
!this.initialSyncComplete, // isHistoricalSync
|
|
790
980
|
);
|
|
791
981
|
|
|
792
|
-
if (
|
|
982
|
+
if (retrievedCheckpoints.length === 0) {
|
|
793
983
|
// We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
|
|
794
984
|
// See further details in earlier comments.
|
|
795
|
-
this.log.trace(`Retrieved no new
|
|
985
|
+
this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
796
986
|
continue;
|
|
797
987
|
}
|
|
798
988
|
|
|
799
|
-
const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber;
|
|
800
989
|
this.log.debug(
|
|
801
|
-
`Retrieved ${
|
|
990
|
+
`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`,
|
|
991
|
+
{
|
|
992
|
+
lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
|
|
993
|
+
searchStartBlock,
|
|
994
|
+
searchEndBlock,
|
|
995
|
+
},
|
|
802
996
|
);
|
|
803
997
|
|
|
804
|
-
const
|
|
805
|
-
const
|
|
806
|
-
|
|
807
|
-
for (const
|
|
808
|
-
const validationResult =
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
if
|
|
998
|
+
const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map(b => retrievedToPublishedCheckpoint(b)));
|
|
999
|
+
const validCheckpoints: PublishedCheckpoint[] = [];
|
|
1000
|
+
|
|
1001
|
+
for (const published of publishedCheckpoints) {
|
|
1002
|
+
const validationResult = this.config.skipValidateBlockAttestations
|
|
1003
|
+
? { valid: true as const }
|
|
1004
|
+
: await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
|
|
1005
|
+
|
|
1006
|
+
// Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
|
|
1007
|
+
// in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
|
|
1008
|
+
// There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
|
|
1009
|
+
// we need to update the validation result, since we need to be able to invalidate the new one.
|
|
1010
|
+
// See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
|
|
1011
|
+
if (
|
|
1012
|
+
rollupStatus.validationResult?.valid !== validationResult.valid ||
|
|
1013
|
+
(!rollupStatus.validationResult.valid &&
|
|
1014
|
+
!validationResult.valid &&
|
|
1015
|
+
rollupStatus.validationResult.block.blockNumber === validationResult.block.blockNumber)
|
|
1016
|
+
) {
|
|
813
1017
|
rollupStatus.validationResult = validationResult;
|
|
814
1018
|
}
|
|
815
1019
|
|
|
816
1020
|
if (!validationResult.valid) {
|
|
817
|
-
this.log.warn(`Skipping
|
|
818
|
-
|
|
819
|
-
l1BlockNumber:
|
|
1021
|
+
this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
|
|
1022
|
+
checkpointHash: published.checkpoint.hash(),
|
|
1023
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
820
1024
|
...pick(validationResult, 'reason'),
|
|
821
1025
|
});
|
|
822
1026
|
|
|
@@ -826,40 +1030,64 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
826
1030
|
validationResult,
|
|
827
1031
|
});
|
|
828
1032
|
|
|
829
|
-
// We keep consuming
|
|
830
|
-
// We just pretend the invalid ones are not there and keep consuming the next
|
|
1033
|
+
// We keep consuming checkpoints if we find an invalid one, since we do not listen for CheckpointInvalidated events
|
|
1034
|
+
// We just pretend the invalid ones are not there and keep consuming the next checkpoints
|
|
1035
|
+
// Note that this breaks if the committee ever attests to a descendant of an invalid checkpoint
|
|
831
1036
|
continue;
|
|
832
1037
|
}
|
|
833
1038
|
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
1039
|
+
// Check the inHash of the checkpoint against the l1->l2 messages.
|
|
1040
|
+
// The messages should've been synced up to the currentL1BlockNumber and must be available for the published
|
|
1041
|
+
// checkpoints we just retrieved.
|
|
1042
|
+
const l1ToL2Messages = await this.getL1ToL2Messages(published.checkpoint.number);
|
|
1043
|
+
const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);
|
|
1044
|
+
const publishedInHash = published.checkpoint.header.contentCommitment.inHash;
|
|
1045
|
+
if (!computedInHash.equals(publishedInHash)) {
|
|
1046
|
+
this.log.fatal(`Mismatch inHash for checkpoint ${published.checkpoint.number}`, {
|
|
1047
|
+
checkpointHash: published.checkpoint.hash(),
|
|
1048
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
1049
|
+
computedInHash,
|
|
1050
|
+
publishedInHash,
|
|
1051
|
+
});
|
|
1052
|
+
// Throwing an error since this is most likely caused by a bug.
|
|
1053
|
+
throw new Error(
|
|
1054
|
+
`Mismatch inHash for checkpoint ${published.checkpoint.number}. Expected ${computedInHash} but got ${publishedInHash}`,
|
|
1055
|
+
);
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
validCheckpoints.push(published);
|
|
1059
|
+
this.log.debug(
|
|
1060
|
+
`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`,
|
|
1061
|
+
{
|
|
1062
|
+
checkpointHash: published.checkpoint.hash(),
|
|
1063
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
1064
|
+
...published.checkpoint.header.toInspect(),
|
|
1065
|
+
blocks: published.checkpoint.blocks.map(b => b.getStats()),
|
|
1066
|
+
},
|
|
1067
|
+
);
|
|
841
1068
|
}
|
|
842
1069
|
|
|
843
1070
|
try {
|
|
844
|
-
const
|
|
1071
|
+
const updatedValidationResult =
|
|
1072
|
+
rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
1073
|
+
const [processDuration] = await elapsed(() => this.addCheckpoints(validCheckpoints, updatedValidationResult));
|
|
845
1074
|
this.instrumentation.processNewBlocks(
|
|
846
|
-
processDuration /
|
|
847
|
-
|
|
1075
|
+
processDuration / validCheckpoints.length,
|
|
1076
|
+
validCheckpoints.flatMap(c => c.checkpoint.blocks),
|
|
848
1077
|
);
|
|
849
1078
|
} catch (err) {
|
|
850
|
-
if (err instanceof
|
|
851
|
-
const {
|
|
852
|
-
const
|
|
853
|
-
? await this.store.
|
|
1079
|
+
if (err instanceof InitialCheckpointNumberNotSequentialError) {
|
|
1080
|
+
const { previousCheckpointNumber, newCheckpointNumber } = err;
|
|
1081
|
+
const previousCheckpoint = previousCheckpointNumber
|
|
1082
|
+
? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber))
|
|
854
1083
|
: undefined;
|
|
855
|
-
const updatedL1SyncPoint =
|
|
1084
|
+
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1constants.l1StartBlock;
|
|
856
1085
|
await this.store.setBlockSynchedL1BlockNumber(updatedL1SyncPoint);
|
|
857
1086
|
this.log.warn(
|
|
858
|
-
`Attempting to insert
|
|
1087
|
+
`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`,
|
|
859
1088
|
{
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
newBlockNumber,
|
|
1089
|
+
previousCheckpointNumber,
|
|
1090
|
+
newCheckpointNumber,
|
|
863
1091
|
updatedL1SyncPoint,
|
|
864
1092
|
},
|
|
865
1093
|
);
|
|
@@ -867,56 +1095,59 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
867
1095
|
throw err;
|
|
868
1096
|
}
|
|
869
1097
|
|
|
870
|
-
for (const
|
|
871
|
-
this.log.info(`Downloaded
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
1098
|
+
for (const checkpoint of validCheckpoints) {
|
|
1099
|
+
this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
|
|
1100
|
+
checkpointHash: checkpoint.checkpoint.hash(),
|
|
1101
|
+
checkpointNumber: checkpoint.checkpoint.number,
|
|
1102
|
+
blockCount: checkpoint.checkpoint.blocks.length,
|
|
1103
|
+
txCount: checkpoint.checkpoint.blocks.reduce((acc, b) => acc + b.body.txEffects.length, 0),
|
|
1104
|
+
header: checkpoint.checkpoint.header.toInspect(),
|
|
1105
|
+
archiveRoot: checkpoint.checkpoint.archive.root.toString(),
|
|
1106
|
+
archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex,
|
|
878
1107
|
});
|
|
879
1108
|
}
|
|
880
|
-
|
|
1109
|
+
lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
|
|
1110
|
+
lastL1BlockWithCheckpoint = retrievedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
|
|
881
1111
|
} while (searchEndBlock < currentL1BlockNumber);
|
|
882
1112
|
|
|
883
1113
|
// Important that we update AFTER inserting the blocks.
|
|
884
|
-
await
|
|
1114
|
+
await updateProvenCheckpoint();
|
|
885
1115
|
|
|
886
|
-
return { ...rollupStatus,
|
|
1116
|
+
return { ...rollupStatus, lastRetrievedCheckpoint, lastL1BlockWithCheckpoint };
|
|
887
1117
|
}
|
|
888
1118
|
|
|
889
|
-
private async
|
|
890
|
-
status:
|
|
891
|
-
lastRetrievedBlock?: PublishedL2Block;
|
|
892
|
-
pendingBlockNumber: number;
|
|
893
|
-
},
|
|
1119
|
+
private async checkForNewCheckpointsBeforeL1SyncPoint(
|
|
1120
|
+
status: RollupStatus,
|
|
894
1121
|
blocksSynchedTo: bigint,
|
|
895
1122
|
currentL1BlockNumber: bigint,
|
|
896
1123
|
) {
|
|
897
|
-
const {
|
|
898
|
-
// Compare the last
|
|
1124
|
+
const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
|
|
1125
|
+
// Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
|
|
899
1126
|
// rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
|
|
900
|
-
const
|
|
901
|
-
|
|
1127
|
+
const latestLocalCheckpointNumber =
|
|
1128
|
+
lastRetrievedCheckpoint?.checkpoint.number ?? (await this.getSynchedCheckpointNumber());
|
|
1129
|
+
if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
|
|
902
1130
|
// Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
|
|
903
|
-
// but still
|
|
904
|
-
// We suspect an L1 reorg that added
|
|
905
|
-
// last
|
|
906
|
-
// don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
1131
|
+
// but still haven't reached the pending checkpoint according to the call to the rollup contract.
|
|
1132
|
+
// We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
|
|
1133
|
+
// the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
|
|
1134
|
+
// we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
1135
|
+
let latestLocalCheckpointArchive: string | undefined = undefined;
|
|
1136
|
+
let targetL1BlockNumber = maxBigint(currentL1BlockNumber - 64n, 0n);
|
|
1137
|
+
if (lastRetrievedCheckpoint) {
|
|
1138
|
+
latestLocalCheckpointArchive = lastRetrievedCheckpoint.checkpoint.archive.root.toString();
|
|
1139
|
+
targetL1BlockNumber = lastRetrievedCheckpoint.l1.blockNumber;
|
|
1140
|
+
} else if (latestLocalCheckpointNumber > 0) {
|
|
1141
|
+
const checkpoint = await this.store.getRangeOfCheckpoints(latestLocalCheckpointNumber, 1).then(([c]) => c);
|
|
1142
|
+
latestLocalCheckpointArchive = checkpoint.archive.root.toString();
|
|
1143
|
+
targetL1BlockNumber = checkpoint.l1.blockNumber;
|
|
1144
|
+
}
|
|
914
1145
|
this.log.warn(
|
|
915
|
-
`Failed to reach
|
|
1146
|
+
`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` +
|
|
916
1147
|
`Rolling back last synched L1 block number to ${targetL1BlockNumber}.`,
|
|
917
1148
|
{
|
|
918
|
-
|
|
919
|
-
|
|
1149
|
+
latestLocalCheckpointNumber,
|
|
1150
|
+
latestLocalCheckpointArchive,
|
|
920
1151
|
blocksSynchedTo,
|
|
921
1152
|
currentL1BlockNumber,
|
|
922
1153
|
...status,
|
|
@@ -924,18 +1155,15 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
924
1155
|
);
|
|
925
1156
|
await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
|
|
926
1157
|
} else {
|
|
927
|
-
this.log.trace(`No new
|
|
928
|
-
|
|
929
|
-
|
|
1158
|
+
this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
|
|
1159
|
+
latestLocalCheckpointNumber,
|
|
1160
|
+
pendingCheckpointNumber,
|
|
930
1161
|
});
|
|
931
1162
|
}
|
|
932
1163
|
}
|
|
933
1164
|
|
|
934
1165
|
/** Resumes the archiver after a stop. */
|
|
935
1166
|
public resume() {
|
|
936
|
-
if (!this.runningPromise) {
|
|
937
|
-
throw new Error(`Archiver was never started`);
|
|
938
|
-
}
|
|
939
1167
|
if (this.runningPromise.isRunning()) {
|
|
940
1168
|
this.log.warn(`Archiver already running`);
|
|
941
1169
|
}
|
|
@@ -949,7 +1177,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
949
1177
|
*/
|
|
950
1178
|
public async stop(): Promise<void> {
|
|
951
1179
|
this.log.debug('Stopping...');
|
|
952
|
-
await this.runningPromise
|
|
1180
|
+
await this.runningPromise.stop();
|
|
953
1181
|
|
|
954
1182
|
this.log.info('Stopped.');
|
|
955
1183
|
return Promise.resolve();
|
|
@@ -963,6 +1191,10 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
963
1191
|
return Promise.resolve(this.l1constants);
|
|
964
1192
|
}
|
|
965
1193
|
|
|
1194
|
+
public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> {
|
|
1195
|
+
return Promise.resolve({ genesisArchiveRoot: this.l1constants.genesisArchiveRoot });
|
|
1196
|
+
}
|
|
1197
|
+
|
|
966
1198
|
public getRollupAddress(): Promise<EthAddress> {
|
|
967
1199
|
return Promise.resolve(this.l1Addresses.rollupAddress);
|
|
968
1200
|
}
|
|
@@ -971,70 +1203,79 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
971
1203
|
return Promise.resolve(this.l1Addresses.registryAddress);
|
|
972
1204
|
}
|
|
973
1205
|
|
|
974
|
-
public getL1BlockNumber(): bigint {
|
|
975
|
-
|
|
976
|
-
if (!l1BlockNumber) {
|
|
977
|
-
throw new Error('L1 block number not yet available. Complete an initial sync first.');
|
|
978
|
-
}
|
|
979
|
-
return l1BlockNumber;
|
|
1206
|
+
public getL1BlockNumber(): bigint | undefined {
|
|
1207
|
+
return this.l1BlockNumber;
|
|
980
1208
|
}
|
|
981
1209
|
|
|
982
|
-
public getL1Timestamp(): Promise<bigint> {
|
|
983
|
-
|
|
984
|
-
if (!l1Timestamp) {
|
|
985
|
-
throw new Error('L1 timestamp not yet available. Complete an initial sync first.');
|
|
986
|
-
}
|
|
987
|
-
return Promise.resolve(l1Timestamp);
|
|
1210
|
+
public getL1Timestamp(): Promise<bigint | undefined> {
|
|
1211
|
+
return Promise.resolve(this.l1Timestamp);
|
|
988
1212
|
}
|
|
989
1213
|
|
|
990
|
-
public
|
|
991
|
-
return
|
|
1214
|
+
public getL2SlotNumber(): Promise<SlotNumber | undefined> {
|
|
1215
|
+
return Promise.resolve(
|
|
1216
|
+
this.l1Timestamp === undefined ? undefined : getSlotAtTimestamp(this.l1Timestamp, this.l1constants),
|
|
1217
|
+
);
|
|
992
1218
|
}
|
|
993
1219
|
|
|
994
|
-
public
|
|
995
|
-
return
|
|
1220
|
+
public getL2EpochNumber(): Promise<EpochNumber | undefined> {
|
|
1221
|
+
return Promise.resolve(
|
|
1222
|
+
this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants),
|
|
1223
|
+
);
|
|
996
1224
|
}
|
|
997
1225
|
|
|
998
|
-
public async getBlocksForEpoch(epochNumber:
|
|
1226
|
+
public async getBlocksForEpoch(epochNumber: EpochNumber): Promise<L2Block[]> {
|
|
999
1227
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1000
1228
|
const blocks: L2Block[] = [];
|
|
1001
1229
|
|
|
1002
|
-
// Walk the list of
|
|
1003
|
-
// We'll typically ask for
|
|
1004
|
-
let
|
|
1005
|
-
const slot = (b:
|
|
1006
|
-
while (
|
|
1007
|
-
if (slot(
|
|
1008
|
-
blocks
|
|
1230
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1231
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1232
|
+
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1233
|
+
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1234
|
+
while (checkpoint && slot(checkpoint) >= start) {
|
|
1235
|
+
if (slot(checkpoint) <= end) {
|
|
1236
|
+
// push the blocks on backwards
|
|
1237
|
+
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1238
|
+
for (let i = endBlock; i >= checkpoint.startBlock; i--) {
|
|
1239
|
+
const block = await this.getBlock(BlockNumber(i));
|
|
1240
|
+
if (block) {
|
|
1241
|
+
blocks.push(block);
|
|
1242
|
+
}
|
|
1243
|
+
}
|
|
1009
1244
|
}
|
|
1010
|
-
|
|
1245
|
+
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
1011
1246
|
}
|
|
1012
1247
|
|
|
1013
1248
|
return blocks.reverse();
|
|
1014
1249
|
}
|
|
1015
1250
|
|
|
1016
|
-
public async getBlockHeadersForEpoch(epochNumber:
|
|
1251
|
+
public async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise<BlockHeader[]> {
|
|
1017
1252
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1018
1253
|
const blocks: BlockHeader[] = [];
|
|
1019
1254
|
|
|
1020
|
-
// Walk the list of
|
|
1021
|
-
// We'll typically ask for
|
|
1022
|
-
let
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1255
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1256
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1257
|
+
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1258
|
+
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1259
|
+
while (checkpoint && slot(checkpoint) >= start) {
|
|
1260
|
+
if (slot(checkpoint) <= end) {
|
|
1261
|
+
// push the blocks on backwards
|
|
1262
|
+
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1263
|
+
for (let i = endBlock; i >= checkpoint.startBlock; i--) {
|
|
1264
|
+
const block = await this.getBlockHeader(BlockNumber(i));
|
|
1265
|
+
if (block) {
|
|
1266
|
+
blocks.push(block);
|
|
1267
|
+
}
|
|
1268
|
+
}
|
|
1028
1269
|
}
|
|
1029
|
-
|
|
1270
|
+
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
1030
1271
|
}
|
|
1031
1272
|
return blocks.reverse();
|
|
1032
1273
|
}
|
|
1033
1274
|
|
|
1034
|
-
public async isEpochComplete(epochNumber:
|
|
1275
|
+
public async isEpochComplete(epochNumber: EpochNumber): Promise<boolean> {
|
|
1035
1276
|
// The epoch is complete if the current L2 block is the last one in the epoch (or later)
|
|
1036
1277
|
const header = await this.getBlockHeader('latest');
|
|
1037
|
-
const slot = header
|
|
1278
|
+
const slot = header ? header.globalVariables.slotNumber : undefined;
|
|
1038
1279
|
const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1039
1280
|
if (slot && slot >= endSlot) {
|
|
1040
1281
|
return true;
|
|
@@ -1064,23 +1305,61 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1064
1305
|
return this.initialSyncComplete;
|
|
1065
1306
|
}
|
|
1066
1307
|
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1308
|
+
public async getCheckpointHeader(number: CheckpointNumber | 'latest'): Promise<CheckpointHeader | undefined> {
|
|
1309
|
+
if (number === 'latest') {
|
|
1310
|
+
number = await this.getSynchedCheckpointNumber();
|
|
1311
|
+
}
|
|
1312
|
+
if (number === 0) {
|
|
1313
|
+
return undefined;
|
|
1314
|
+
}
|
|
1315
|
+
const checkpoint = await this.store.getCheckpointData(number);
|
|
1316
|
+
if (!checkpoint) {
|
|
1317
|
+
return undefined;
|
|
1318
|
+
}
|
|
1319
|
+
return checkpoint.header;
|
|
1320
|
+
}
|
|
1321
|
+
|
|
1322
|
+
public getCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1323
|
+
return this.getSynchedCheckpointNumber();
|
|
1324
|
+
}
|
|
1325
|
+
|
|
1326
|
+
public getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1327
|
+
return this.store.getSynchedCheckpointNumber();
|
|
1328
|
+
}
|
|
1329
|
+
|
|
1330
|
+
public getProvenCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1331
|
+
return this.store.getProvenCheckpointNumber();
|
|
1076
1332
|
}
|
|
1077
1333
|
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
return
|
|
1334
|
+
public setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise<void> {
|
|
1335
|
+
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
1336
|
+
}
|
|
1337
|
+
|
|
1338
|
+
public unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
1339
|
+
return this.store.unwindCheckpoints(from, checkpointsToUnwind);
|
|
1340
|
+
}
|
|
1341
|
+
|
|
1342
|
+
public async getLastBlockNumberInCheckpoint(checkpointNumber: CheckpointNumber): Promise<BlockNumber | undefined> {
|
|
1343
|
+
const checkpointData = await this.store.getCheckpointData(checkpointNumber);
|
|
1344
|
+
if (!checkpointData) {
|
|
1345
|
+
return undefined;
|
|
1346
|
+
}
|
|
1347
|
+
return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1);
|
|
1348
|
+
}
|
|
1349
|
+
|
|
1350
|
+
public addCheckpoints(
|
|
1351
|
+
checkpoints: PublishedCheckpoint[],
|
|
1352
|
+
pendingChainValidationStatus?: ValidateBlockResult,
|
|
1353
|
+
): Promise<boolean> {
|
|
1354
|
+
return this.store.addCheckpoints(checkpoints, pendingChainValidationStatus);
|
|
1355
|
+
}
|
|
1356
|
+
|
|
1357
|
+
public getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
|
|
1358
|
+
return this.store.getBlockHeaderByHash(blockHash);
|
|
1359
|
+
}
|
|
1360
|
+
|
|
1361
|
+
public getBlockHeaderByArchive(archive: Fr): Promise<BlockHeader | undefined> {
|
|
1362
|
+
return this.store.getBlockHeaderByArchive(archive);
|
|
1084
1363
|
}
|
|
1085
1364
|
|
|
1086
1365
|
/**
|
|
@@ -1088,7 +1367,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1088
1367
|
* @param number - The block number to return.
|
|
1089
1368
|
* @returns The requested L2 block.
|
|
1090
1369
|
*/
|
|
1091
|
-
public async
|
|
1370
|
+
public async getL2BlockNew(number: BlockNumber): Promise<L2BlockNew | undefined> {
|
|
1092
1371
|
// If the number provided is -ve, then return the latest block.
|
|
1093
1372
|
if (number < 0) {
|
|
1094
1373
|
number = await this.store.getSynchedL2BlockNumber();
|
|
@@ -1096,11 +1375,11 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1096
1375
|
if (number === 0) {
|
|
1097
1376
|
return undefined;
|
|
1098
1377
|
}
|
|
1099
|
-
const publishedBlock = await this.store.
|
|
1100
|
-
return publishedBlock
|
|
1378
|
+
const publishedBlock = await this.store.store.getBlock(number);
|
|
1379
|
+
return publishedBlock;
|
|
1101
1380
|
}
|
|
1102
1381
|
|
|
1103
|
-
public async getBlockHeader(number:
|
|
1382
|
+
public async getBlockHeader(number: BlockNumber | 'latest'): Promise<BlockHeader | undefined> {
|
|
1104
1383
|
if (number === 'latest') {
|
|
1105
1384
|
number = await this.store.getSynchedL2BlockNumber();
|
|
1106
1385
|
}
|
|
@@ -1111,6 +1390,21 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1111
1390
|
return headers.length === 0 ? undefined : headers[0];
|
|
1112
1391
|
}
|
|
1113
1392
|
|
|
1393
|
+
getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
|
|
1394
|
+
return this.store.getCheckpointedBlock(number);
|
|
1395
|
+
}
|
|
1396
|
+
|
|
1397
|
+
getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
1398
|
+
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
1399
|
+
}
|
|
1400
|
+
|
|
1401
|
+
getProvenBlockNumber(): Promise<BlockNumber> {
|
|
1402
|
+
return this.store.getProvenBlockNumber();
|
|
1403
|
+
}
|
|
1404
|
+
getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
1405
|
+
return this.store.getCheckpointedBlockByArchive(archive);
|
|
1406
|
+
}
|
|
1407
|
+
|
|
1114
1408
|
public getTxEffect(txHash: TxHash) {
|
|
1115
1409
|
return this.store.getTxEffect(txHash);
|
|
1116
1410
|
}
|
|
@@ -1119,24 +1413,12 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1119
1413
|
return this.store.getSettledTxReceipt(txHash);
|
|
1120
1414
|
}
|
|
1121
1415
|
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
* @param from - The block number from which to begin retrieving logs.
|
|
1125
|
-
* @param limit - The maximum number of blocks to retrieve logs from.
|
|
1126
|
-
* @returns An array of private logs from the specified range of blocks.
|
|
1127
|
-
*/
|
|
1128
|
-
public getPrivateLogs(from: number, limit: number): Promise<PrivateLog[]> {
|
|
1129
|
-
return this.store.getPrivateLogs(from, limit);
|
|
1416
|
+
getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
1417
|
+
return this.store.getPrivateLogsByTags(tags);
|
|
1130
1418
|
}
|
|
1131
1419
|
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
* @param tags - The tags to filter the logs by.
|
|
1135
|
-
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
|
|
1136
|
-
* that tag.
|
|
1137
|
-
*/
|
|
1138
|
-
getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
|
|
1139
|
-
return this.store.getLogsByTags(tags);
|
|
1420
|
+
getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
|
|
1421
|
+
return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
1140
1422
|
}
|
|
1141
1423
|
|
|
1142
1424
|
/**
|
|
@@ -1159,19 +1441,11 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1159
1441
|
|
|
1160
1442
|
/**
|
|
1161
1443
|
* Gets the number of the latest L2 block processed by the block source implementation.
|
|
1444
|
+
* This includes both checkpointed and uncheckpointed blocks.
|
|
1162
1445
|
* @returns The number of the latest L2 block processed by the block source implementation.
|
|
1163
1446
|
*/
|
|
1164
|
-
public getBlockNumber(): Promise<
|
|
1165
|
-
return this.store.
|
|
1166
|
-
}
|
|
1167
|
-
|
|
1168
|
-
public getProvenBlockNumber(): Promise<number> {
|
|
1169
|
-
return this.store.getProvenL2BlockNumber();
|
|
1170
|
-
}
|
|
1171
|
-
|
|
1172
|
-
/** Forcefully updates the last proven block number. Use for testing. */
|
|
1173
|
-
public setProvenBlockNumber(blockNumber: number): Promise<void> {
|
|
1174
|
-
return this.store.setProvenL2BlockNumber(blockNumber);
|
|
1447
|
+
public getBlockNumber(): Promise<BlockNumber> {
|
|
1448
|
+
return this.store.getLatestBlockNumber();
|
|
1175
1449
|
}
|
|
1176
1450
|
|
|
1177
1451
|
public getContractClass(id: Fr): Promise<ContractClassPublic | undefined> {
|
|
@@ -1199,12 +1473,12 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1199
1473
|
}
|
|
1200
1474
|
|
|
1201
1475
|
/**
|
|
1202
|
-
* Gets L1 to L2 message (to be) included in a given
|
|
1203
|
-
* @param
|
|
1476
|
+
* Gets L1 to L2 message (to be) included in a given checkpoint.
|
|
1477
|
+
* @param checkpointNumber - Checkpoint number to get messages for.
|
|
1204
1478
|
* @returns The L1 to L2 messages/leaves of the messages subtree (throws if not found).
|
|
1205
1479
|
*/
|
|
1206
|
-
getL1ToL2Messages(
|
|
1207
|
-
return this.store.getL1ToL2Messages(
|
|
1480
|
+
getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
|
|
1481
|
+
return this.store.getL1ToL2Messages(checkpointNumber);
|
|
1208
1482
|
}
|
|
1209
1483
|
|
|
1210
1484
|
/**
|
|
@@ -1228,12 +1502,12 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1228
1502
|
return this.store.getDebugFunctionName(address, selector);
|
|
1229
1503
|
}
|
|
1230
1504
|
|
|
1231
|
-
getPendingChainValidationStatus(): Promise<ValidateBlockResult> {
|
|
1232
|
-
return
|
|
1505
|
+
async getPendingChainValidationStatus(): Promise<ValidateBlockResult> {
|
|
1506
|
+
return (await this.store.getPendingChainValidationStatus()) ?? { valid: true };
|
|
1233
1507
|
}
|
|
1234
1508
|
|
|
1235
1509
|
isPendingChainInvalid(): Promise<boolean> {
|
|
1236
|
-
return
|
|
1510
|
+
return this.getPendingChainValidationStatus().then(status => !status.valid);
|
|
1237
1511
|
}
|
|
1238
1512
|
|
|
1239
1513
|
async getL2Tips(): Promise<L2Tips> {
|
|
@@ -1245,7 +1519,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1245
1519
|
// TODO(#13569): Compute proper finalized block number based on L1 finalized block.
|
|
1246
1520
|
// We just force it 2 epochs worth of proven data for now.
|
|
1247
1521
|
// NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations
|
|
1248
|
-
const finalizedBlockNumber = Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0);
|
|
1522
|
+
const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0));
|
|
1249
1523
|
|
|
1250
1524
|
const [latestBlockHeader, provenBlockHeader, finalizedBlockHeader] = await Promise.all([
|
|
1251
1525
|
latestBlockNumber > 0 ? this.getBlockHeader(latestBlockNumber) : undefined,
|
|
@@ -1269,52 +1543,44 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1269
1543
|
);
|
|
1270
1544
|
}
|
|
1271
1545
|
|
|
1272
|
-
const latestBlockHeaderHash = await latestBlockHeader?.hash();
|
|
1273
|
-
const provenBlockHeaderHash = await provenBlockHeader?.hash();
|
|
1274
|
-
const finalizedBlockHeaderHash = await finalizedBlockHeader?.hash();
|
|
1546
|
+
const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1547
|
+
const provenBlockHeaderHash = (await provenBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1548
|
+
const finalizedBlockHeaderHash = (await finalizedBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1275
1549
|
|
|
1276
1550
|
return {
|
|
1277
|
-
latest: {
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
} as L2BlockId,
|
|
1281
|
-
proven: {
|
|
1282
|
-
number: provenBlockNumber,
|
|
1283
|
-
hash: provenBlockHeaderHash?.toString(),
|
|
1284
|
-
} as L2BlockId,
|
|
1285
|
-
finalized: {
|
|
1286
|
-
number: finalizedBlockNumber,
|
|
1287
|
-
hash: finalizedBlockHeaderHash?.toString(),
|
|
1288
|
-
} as L2BlockId,
|
|
1551
|
+
latest: { number: latestBlockNumber, hash: latestBlockHeaderHash.toString() },
|
|
1552
|
+
proven: { number: provenBlockNumber, hash: provenBlockHeaderHash.toString() },
|
|
1553
|
+
finalized: { number: finalizedBlockNumber, hash: finalizedBlockHeaderHash.toString() },
|
|
1289
1554
|
};
|
|
1290
1555
|
}
|
|
1291
1556
|
|
|
1292
|
-
public async rollbackTo(targetL2BlockNumber:
|
|
1557
|
+
public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise<void> {
|
|
1558
|
+
// TODO(pw/mbps): This still assumes 1 block per checkpoint
|
|
1293
1559
|
const currentBlocks = await this.getL2Tips();
|
|
1294
1560
|
const currentL2Block = currentBlocks.latest.number;
|
|
1295
1561
|
const currentProvenBlock = currentBlocks.proven.number;
|
|
1296
|
-
// const currentFinalizedBlock = currentBlocks.finalized.number;
|
|
1297
1562
|
|
|
1298
1563
|
if (targetL2BlockNumber >= currentL2Block) {
|
|
1299
1564
|
throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`);
|
|
1300
1565
|
}
|
|
1301
1566
|
const blocksToUnwind = currentL2Block - targetL2BlockNumber;
|
|
1302
|
-
const targetL2Block = await this.store.
|
|
1567
|
+
const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber);
|
|
1303
1568
|
if (!targetL2Block) {
|
|
1304
1569
|
throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
|
|
1305
1570
|
}
|
|
1306
1571
|
const targetL1BlockNumber = targetL2Block.l1.blockNumber;
|
|
1572
|
+
const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber);
|
|
1307
1573
|
const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber);
|
|
1308
|
-
this.log.info(`Unwinding ${blocksToUnwind}
|
|
1309
|
-
await this.store.
|
|
1310
|
-
this.log.info(`Unwinding L1 to L2 messages to ${
|
|
1311
|
-
await this.store.
|
|
1574
|
+
this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`);
|
|
1575
|
+
await this.store.unwindCheckpoints(CheckpointNumber(currentL2Block), blocksToUnwind);
|
|
1576
|
+
this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`);
|
|
1577
|
+
await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
1312
1578
|
this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
|
|
1313
1579
|
await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
|
|
1314
1580
|
await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash });
|
|
1315
1581
|
if (targetL2BlockNumber < currentProvenBlock) {
|
|
1316
1582
|
this.log.info(`Clearing proven L2 block number`);
|
|
1317
|
-
await this.store.
|
|
1583
|
+
await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
|
|
1318
1584
|
}
|
|
1319
1585
|
// TODO(palla/reorg): Set the finalized block when we add support for it.
|
|
1320
1586
|
// if (targetL2BlockNumber < currentFinalizedBlock) {
|
|
@@ -1322,6 +1588,150 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
|
|
|
1322
1588
|
// await this.store.setFinalizedL2BlockNumber(0);
|
|
1323
1589
|
// }
|
|
1324
1590
|
}
|
|
1591
|
+
|
|
1592
|
+
public async getPublishedCheckpoints(
|
|
1593
|
+
checkpointNumber: CheckpointNumber,
|
|
1594
|
+
limit: number,
|
|
1595
|
+
): Promise<PublishedCheckpoint[]> {
|
|
1596
|
+
const checkpoints = await this.store.getRangeOfCheckpoints(checkpointNumber, limit);
|
|
1597
|
+
const blocks = (
|
|
1598
|
+
await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber)))
|
|
1599
|
+
).filter(isDefined);
|
|
1600
|
+
|
|
1601
|
+
const fullCheckpoints: PublishedCheckpoint[] = [];
|
|
1602
|
+
for (let i = 0; i < checkpoints.length; i++) {
|
|
1603
|
+
const blocksForCheckpoint = blocks[i];
|
|
1604
|
+
const checkpoint = checkpoints[i];
|
|
1605
|
+
const fullCheckpoint = new Checkpoint(
|
|
1606
|
+
checkpoint.archive,
|
|
1607
|
+
checkpoint.header,
|
|
1608
|
+
blocksForCheckpoint,
|
|
1609
|
+
checkpoint.checkpointNumber,
|
|
1610
|
+
);
|
|
1611
|
+
const publishedCheckpoint = new PublishedCheckpoint(
|
|
1612
|
+
fullCheckpoint,
|
|
1613
|
+
checkpoint.l1,
|
|
1614
|
+
checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
|
|
1615
|
+
);
|
|
1616
|
+
fullCheckpoints.push(publishedCheckpoint);
|
|
1617
|
+
}
|
|
1618
|
+
return fullCheckpoints;
|
|
1619
|
+
}
|
|
1620
|
+
|
|
1621
|
+
public async getCheckpointsForEpoch(epochNumber: EpochNumber): Promise<Checkpoint[]> {
|
|
1622
|
+
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1623
|
+
const checkpoints: Checkpoint[] = [];
|
|
1624
|
+
|
|
1625
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1626
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1627
|
+
let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1628
|
+
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1629
|
+
while (checkpointData && slot(checkpointData) >= start) {
|
|
1630
|
+
if (slot(checkpointData) <= end) {
|
|
1631
|
+
// push the checkpoints on backwards
|
|
1632
|
+
const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1);
|
|
1633
|
+
checkpoints.push(checkpoint.checkpoint);
|
|
1634
|
+
}
|
|
1635
|
+
checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1));
|
|
1636
|
+
}
|
|
1637
|
+
|
|
1638
|
+
return checkpoints.reverse();
|
|
1639
|
+
}
|
|
1640
|
+
|
|
1641
|
+
/* Legacy APIs */
|
|
1642
|
+
|
|
1643
|
+
public async getPublishedBlockByHash(blockHash: Fr): Promise<PublishedL2Block | undefined> {
|
|
1644
|
+
const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
|
|
1645
|
+
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1646
|
+
}
|
|
1647
|
+
public async getPublishedBlockByArchive(archive: Fr): Promise<PublishedL2Block | undefined> {
|
|
1648
|
+
const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive);
|
|
1649
|
+
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1650
|
+
}
|
|
1651
|
+
|
|
1652
|
+
/**
|
|
1653
|
+
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
1654
|
+
* @param from - Number of the first block to return (inclusive).
|
|
1655
|
+
* @param limit - The number of blocks to return.
|
|
1656
|
+
* @param proven - If true, only return blocks that have been proven.
|
|
1657
|
+
* @returns The requested L2 blocks.
|
|
1658
|
+
*/
|
|
1659
|
+
public async getBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise<L2Block[]> {
|
|
1660
|
+
const publishedBlocks = await this.getPublishedBlocks(from, limit, proven);
|
|
1661
|
+
return publishedBlocks.map(x => x.block);
|
|
1662
|
+
}
|
|
1663
|
+
|
|
1664
|
+
public async getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise<PublishedL2Block[]> {
|
|
1665
|
+
const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit);
|
|
1666
|
+
const provenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
1667
|
+
const blocks = (
|
|
1668
|
+
await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber)))
|
|
1669
|
+
).filter(isDefined);
|
|
1670
|
+
|
|
1671
|
+
const olbBlocks: PublishedL2Block[] = [];
|
|
1672
|
+
for (let i = 0; i < checkpoints.length; i++) {
|
|
1673
|
+
const blockForCheckpoint = blocks[i][0];
|
|
1674
|
+
const checkpoint = checkpoints[i];
|
|
1675
|
+
if (checkpoint.checkpointNumber > provenCheckpointNumber && proven === true) {
|
|
1676
|
+
// this checkpointisn't proven and we only want proven
|
|
1677
|
+
continue;
|
|
1678
|
+
}
|
|
1679
|
+
const oldCheckpoint = new Checkpoint(
|
|
1680
|
+
blockForCheckpoint.archive,
|
|
1681
|
+
checkpoint.header,
|
|
1682
|
+
[blockForCheckpoint],
|
|
1683
|
+
checkpoint.checkpointNumber,
|
|
1684
|
+
);
|
|
1685
|
+
const oldBlock = L2Block.fromCheckpoint(oldCheckpoint);
|
|
1686
|
+
const publishedBlock = new PublishedL2Block(
|
|
1687
|
+
oldBlock,
|
|
1688
|
+
checkpoint.l1,
|
|
1689
|
+
checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
|
|
1690
|
+
);
|
|
1691
|
+
olbBlocks.push(publishedBlock);
|
|
1692
|
+
}
|
|
1693
|
+
return olbBlocks;
|
|
1694
|
+
}
|
|
1695
|
+
|
|
1696
|
+
private async buildOldBlockFromCheckpointedBlock(
|
|
1697
|
+
checkpointedBlock: CheckpointedL2Block | undefined,
|
|
1698
|
+
): Promise<PublishedL2Block | undefined> {
|
|
1699
|
+
if (!checkpointedBlock) {
|
|
1700
|
+
return undefined;
|
|
1701
|
+
}
|
|
1702
|
+
const checkpoint = await this.store.getCheckpointData(checkpointedBlock.checkpointNumber);
|
|
1703
|
+
if (!checkpoint) {
|
|
1704
|
+
return checkpoint;
|
|
1705
|
+
}
|
|
1706
|
+
const fullCheckpoint = new Checkpoint(
|
|
1707
|
+
checkpointedBlock?.block.archive,
|
|
1708
|
+
checkpoint?.header,
|
|
1709
|
+
[checkpointedBlock.block],
|
|
1710
|
+
checkpoint.checkpointNumber,
|
|
1711
|
+
);
|
|
1712
|
+
const oldBlock = L2Block.fromCheckpoint(fullCheckpoint);
|
|
1713
|
+
const published = new PublishedL2Block(
|
|
1714
|
+
oldBlock,
|
|
1715
|
+
checkpoint.l1,
|
|
1716
|
+
checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
|
|
1717
|
+
);
|
|
1718
|
+
return published;
|
|
1719
|
+
}
|
|
1720
|
+
|
|
1721
|
+
public async getBlock(number: BlockNumber): Promise<L2Block | undefined> {
|
|
1722
|
+
// If the number provided is -ve, then return the latest block.
|
|
1723
|
+
if (number < 0) {
|
|
1724
|
+
number = await this.store.getSynchedL2BlockNumber();
|
|
1725
|
+
}
|
|
1726
|
+
if (number === 0) {
|
|
1727
|
+
return undefined;
|
|
1728
|
+
}
|
|
1729
|
+
const publishedBlocks = await this.getPublishedBlocks(number, 1);
|
|
1730
|
+
if (publishedBlocks.length === 0) {
|
|
1731
|
+
return undefined;
|
|
1732
|
+
}
|
|
1733
|
+
return publishedBlocks[0].block;
|
|
1734
|
+
}
|
|
1325
1735
|
}
|
|
1326
1736
|
|
|
1327
1737
|
enum Operation {
|
|
@@ -1351,17 +1761,20 @@ export class ArchiverStoreHelper
|
|
|
1351
1761
|
| 'backupTo'
|
|
1352
1762
|
| 'close'
|
|
1353
1763
|
| 'transactionAsync'
|
|
1764
|
+
| 'addBlocks'
|
|
1765
|
+
| 'getBlock'
|
|
1766
|
+
| 'getBlocks'
|
|
1354
1767
|
>
|
|
1355
1768
|
{
|
|
1356
1769
|
#log = createLogger('archiver:block-helper');
|
|
1357
1770
|
|
|
1358
|
-
constructor(
|
|
1771
|
+
constructor(public readonly store: ArchiverDataStore) {}
|
|
1359
1772
|
|
|
1360
1773
|
/**
|
|
1361
1774
|
* Extracts and stores contract classes out of ContractClassPublished events emitted by the class registry contract.
|
|
1362
1775
|
* @param allLogs - All logs emitted in a bunch of blocks.
|
|
1363
1776
|
*/
|
|
1364
|
-
async #updatePublishedContractClasses(allLogs: ContractClassLog[], blockNum:
|
|
1777
|
+
async #updatePublishedContractClasses(allLogs: ContractClassLog[], blockNum: BlockNumber, operation: Operation) {
|
|
1365
1778
|
const contractClassPublishedEvents = allLogs
|
|
1366
1779
|
.filter(log => ContractClassPublishedEvent.isContractClassPublishedEvent(log))
|
|
1367
1780
|
.map(log => ContractClassPublishedEvent.fromLog(log));
|
|
@@ -1386,7 +1799,7 @@ export class ArchiverStoreHelper
|
|
|
1386
1799
|
* Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract.
|
|
1387
1800
|
* @param allLogs - All logs emitted in a bunch of blocks.
|
|
1388
1801
|
*/
|
|
1389
|
-
async #updateDeployedContractInstances(allLogs: PrivateLog[], blockNum:
|
|
1802
|
+
async #updateDeployedContractInstances(allLogs: PrivateLog[], blockNum: BlockNumber, operation: Operation) {
|
|
1390
1803
|
const contractInstances = allLogs
|
|
1391
1804
|
.filter(log => ContractInstancePublishedEvent.isContractInstancePublishedEvent(log))
|
|
1392
1805
|
.map(log => ContractInstancePublishedEvent.fromLog(log))
|
|
@@ -1439,7 +1852,7 @@ export class ArchiverStoreHelper
|
|
|
1439
1852
|
* @param _blockNum - The block number
|
|
1440
1853
|
* @returns
|
|
1441
1854
|
*/
|
|
1442
|
-
async #storeBroadcastedIndividualFunctions(allLogs: ContractClassLog[], _blockNum:
|
|
1855
|
+
async #storeBroadcastedIndividualFunctions(allLogs: ContractClassLog[], _blockNum: BlockNumber) {
|
|
1443
1856
|
// Filter out private and utility function broadcast events
|
|
1444
1857
|
const privateFnEvents = allLogs
|
|
1445
1858
|
.filter(log => PrivateFunctionBroadcastedEvent.isPrivateFunctionBroadcastedEvent(log))
|
|
@@ -1493,32 +1906,36 @@ export class ArchiverStoreHelper
|
|
|
1493
1906
|
return true;
|
|
1494
1907
|
}
|
|
1495
1908
|
|
|
1496
|
-
|
|
1909
|
+
private async addBlockDataToDB(block: L2BlockNew) {
|
|
1910
|
+
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
1911
|
+
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1912
|
+
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
1913
|
+
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
1914
|
+
|
|
1915
|
+
return (
|
|
1916
|
+
await Promise.all([
|
|
1917
|
+
this.#updatePublishedContractClasses(contractClassLogs, block.number, Operation.Store),
|
|
1918
|
+
this.#updateDeployedContractInstances(privateLogs, block.number, Operation.Store),
|
|
1919
|
+
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Store),
|
|
1920
|
+
this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.number),
|
|
1921
|
+
])
|
|
1922
|
+
).every(Boolean);
|
|
1923
|
+
}
|
|
1924
|
+
|
|
1925
|
+
public addBlocks(blocks: L2BlockNew[], pendingChainValidationStatus?: ValidateBlockResult): Promise<boolean> {
|
|
1497
1926
|
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
1498
1927
|
// or if the previous block is not in the store.
|
|
1499
1928
|
return this.store.transactionAsync(async () => {
|
|
1500
1929
|
await this.store.addBlocks(blocks);
|
|
1501
1930
|
|
|
1502
1931
|
const opResults = await Promise.all([
|
|
1503
|
-
|
|
1932
|
+
// Update the pending chain validation status if provided
|
|
1933
|
+
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
1934
|
+
// Add any logs emitted during the retrieved blocks
|
|
1935
|
+
this.store.addLogs(blocks),
|
|
1504
1936
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1505
|
-
...blocks.map(
|
|
1506
|
-
|
|
1507
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1508
|
-
const privateLogs = block.block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
1509
|
-
const publicLogs = block.block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
1510
|
-
return (
|
|
1511
|
-
await Promise.all([
|
|
1512
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.block.number, Operation.Store),
|
|
1513
|
-
this.#updateDeployedContractInstances(privateLogs, block.block.number, Operation.Store),
|
|
1514
|
-
this.#updateUpdatedContractInstances(
|
|
1515
|
-
publicLogs,
|
|
1516
|
-
block.block.header.globalVariables.timestamp,
|
|
1517
|
-
Operation.Store,
|
|
1518
|
-
),
|
|
1519
|
-
this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.block.number),
|
|
1520
|
-
])
|
|
1521
|
-
).every(Boolean);
|
|
1937
|
+
...blocks.map(block => {
|
|
1938
|
+
return this.addBlockDataToDB(block);
|
|
1522
1939
|
}),
|
|
1523
1940
|
]);
|
|
1524
1941
|
|
|
@@ -1526,55 +1943,124 @@ export class ArchiverStoreHelper
|
|
|
1526
1943
|
});
|
|
1527
1944
|
}
|
|
1528
1945
|
|
|
1529
|
-
public
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1946
|
+
public addCheckpoints(
|
|
1947
|
+
checkpoints: PublishedCheckpoint[],
|
|
1948
|
+
pendingChainValidationStatus?: ValidateBlockResult,
|
|
1949
|
+
): Promise<boolean> {
|
|
1950
|
+
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
1951
|
+
// or if the previous block is not in the store.
|
|
1952
|
+
return this.store.transactionAsync(async () => {
|
|
1953
|
+
await this.store.addCheckpoints(checkpoints);
|
|
1954
|
+
const allBlocks = checkpoints.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks);
|
|
1955
|
+
|
|
1956
|
+
const opResults = await Promise.all([
|
|
1957
|
+
// Update the pending chain validation status if provided
|
|
1958
|
+
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
1959
|
+
// Add any logs emitted during the retrieved blocks
|
|
1960
|
+
this.store.addLogs(allBlocks),
|
|
1961
|
+
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1962
|
+
...allBlocks.map(block => {
|
|
1963
|
+
return this.addBlockDataToDB(block);
|
|
1964
|
+
}),
|
|
1965
|
+
]);
|
|
1966
|
+
|
|
1967
|
+
return opResults.every(Boolean);
|
|
1968
|
+
});
|
|
1969
|
+
}
|
|
1970
|
+
|
|
1971
|
+
public async unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
1972
|
+
if (checkpointsToUnwind <= 0) {
|
|
1973
|
+
throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
|
|
1533
1974
|
}
|
|
1534
|
-
|
|
1535
|
-
|
|
1975
|
+
|
|
1976
|
+
const last = await this.getSynchedCheckpointNumber();
|
|
1977
|
+
if (from != last) {
|
|
1978
|
+
throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`);
|
|
1536
1979
|
}
|
|
1537
1980
|
|
|
1538
|
-
|
|
1539
|
-
const
|
|
1981
|
+
const blocks = [];
|
|
1982
|
+
const lastCheckpointNumber = from + checkpointsToUnwind - 1;
|
|
1983
|
+
for (let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++) {
|
|
1984
|
+
const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
1985
|
+
if (!blocksForCheckpoint) {
|
|
1986
|
+
continue;
|
|
1987
|
+
}
|
|
1988
|
+
blocks.push(...blocksForCheckpoint);
|
|
1989
|
+
}
|
|
1540
1990
|
|
|
1541
1991
|
const opResults = await Promise.all([
|
|
1992
|
+
// Prune rolls back to the last proven block, which is by definition valid
|
|
1993
|
+
this.store.setPendingChainValidationStatus({ valid: true }),
|
|
1542
1994
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1543
1995
|
...blocks.map(async block => {
|
|
1544
|
-
const contractClassLogs = block.
|
|
1996
|
+
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
1545
1997
|
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1546
|
-
const privateLogs = block.
|
|
1547
|
-
const publicLogs = block.
|
|
1998
|
+
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
1999
|
+
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
1548
2000
|
|
|
1549
2001
|
return (
|
|
1550
2002
|
await Promise.all([
|
|
1551
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.
|
|
1552
|
-
this.#updateDeployedContractInstances(privateLogs, block.
|
|
1553
|
-
this.#updateUpdatedContractInstances(
|
|
1554
|
-
publicLogs,
|
|
1555
|
-
block.block.header.globalVariables.timestamp,
|
|
1556
|
-
Operation.Delete,
|
|
1557
|
-
),
|
|
2003
|
+
this.#updatePublishedContractClasses(contractClassLogs, block.number, Operation.Delete),
|
|
2004
|
+
this.#updateDeployedContractInstances(privateLogs, block.number, Operation.Delete),
|
|
2005
|
+
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Delete),
|
|
1558
2006
|
])
|
|
1559
2007
|
).every(Boolean);
|
|
1560
2008
|
}),
|
|
1561
2009
|
|
|
1562
|
-
this.store.deleteLogs(blocks
|
|
1563
|
-
this.store.
|
|
2010
|
+
this.store.deleteLogs(blocks),
|
|
2011
|
+
this.store.unwindCheckpoints(from, checkpointsToUnwind),
|
|
1564
2012
|
]);
|
|
1565
2013
|
|
|
1566
2014
|
return opResults.every(Boolean);
|
|
1567
2015
|
}
|
|
1568
2016
|
|
|
1569
|
-
|
|
1570
|
-
return this.store.
|
|
2017
|
+
getCheckpointData(checkpointNumber: CheckpointNumber): Promise<CheckpointData | undefined> {
|
|
2018
|
+
return this.store.getCheckpointData(checkpointNumber);
|
|
2019
|
+
}
|
|
2020
|
+
|
|
2021
|
+
getRangeOfCheckpoints(from: CheckpointNumber, limit: number): Promise<CheckpointData[]> {
|
|
2022
|
+
return this.store.getRangeOfCheckpoints(from, limit);
|
|
2023
|
+
}
|
|
2024
|
+
|
|
2025
|
+
getCheckpointedL2BlockNumber(): Promise<BlockNumber> {
|
|
2026
|
+
return this.store.getCheckpointedL2BlockNumber();
|
|
2027
|
+
}
|
|
2028
|
+
getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
|
|
2029
|
+
return this.store.getSynchedCheckpointNumber();
|
|
1571
2030
|
}
|
|
1572
|
-
|
|
1573
|
-
return this.store.
|
|
2031
|
+
setCheckpointSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
|
|
2032
|
+
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
1574
2033
|
}
|
|
1575
|
-
|
|
2034
|
+
getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
|
|
2035
|
+
return this.store.getCheckpointedBlock(number);
|
|
2036
|
+
}
|
|
2037
|
+
getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
2038
|
+
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
2039
|
+
}
|
|
2040
|
+
getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
2041
|
+
return this.store.getCheckpointedBlockByArchive(archive);
|
|
2042
|
+
}
|
|
2043
|
+
getBlockHeaders(from: BlockNumber, limit: number): Promise<BlockHeader[]> {
|
|
1576
2044
|
return this.store.getBlockHeaders(from, limit);
|
|
1577
2045
|
}
|
|
2046
|
+
getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
|
|
2047
|
+
return this.store.getBlockHeaderByHash(blockHash);
|
|
2048
|
+
}
|
|
2049
|
+
getBlockHeaderByArchive(archive: Fr): Promise<BlockHeader | undefined> {
|
|
2050
|
+
return this.store.getBlockHeaderByArchive(archive);
|
|
2051
|
+
}
|
|
2052
|
+
getBlockByHash(blockHash: Fr): Promise<L2BlockNew | undefined> {
|
|
2053
|
+
return this.store.getBlockByHash(blockHash);
|
|
2054
|
+
}
|
|
2055
|
+
getBlockByArchive(archive: Fr): Promise<L2BlockNew | undefined> {
|
|
2056
|
+
return this.store.getBlockByArchive(archive);
|
|
2057
|
+
}
|
|
2058
|
+
getLatestBlockNumber(): Promise<BlockNumber> {
|
|
2059
|
+
return this.store.getLatestBlockNumber();
|
|
2060
|
+
}
|
|
2061
|
+
getBlocksForCheckpoint(checkpointNumber: CheckpointNumber): Promise<L2BlockNew[] | undefined> {
|
|
2062
|
+
return this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
2063
|
+
}
|
|
1578
2064
|
getTxEffect(txHash: TxHash): Promise<IndexedTxEffect | undefined> {
|
|
1579
2065
|
return this.store.getTxEffect(txHash);
|
|
1580
2066
|
}
|
|
@@ -1584,17 +2070,17 @@ export class ArchiverStoreHelper
|
|
|
1584
2070
|
addL1ToL2Messages(messages: InboxMessage[]): Promise<void> {
|
|
1585
2071
|
return this.store.addL1ToL2Messages(messages);
|
|
1586
2072
|
}
|
|
1587
|
-
getL1ToL2Messages(
|
|
1588
|
-
return this.store.getL1ToL2Messages(
|
|
2073
|
+
getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
|
|
2074
|
+
return this.store.getL1ToL2Messages(checkpointNumber);
|
|
1589
2075
|
}
|
|
1590
2076
|
getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined> {
|
|
1591
2077
|
return this.store.getL1ToL2MessageIndex(l1ToL2Message);
|
|
1592
2078
|
}
|
|
1593
|
-
|
|
1594
|
-
return this.store.
|
|
2079
|
+
getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
2080
|
+
return this.store.getPrivateLogsByTags(tags);
|
|
1595
2081
|
}
|
|
1596
|
-
|
|
1597
|
-
return this.store.
|
|
2082
|
+
getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
|
|
2083
|
+
return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
1598
2084
|
}
|
|
1599
2085
|
getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
|
|
1600
2086
|
return this.store.getPublicLogs(filter);
|
|
@@ -1602,17 +2088,20 @@ export class ArchiverStoreHelper
|
|
|
1602
2088
|
getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
|
|
1603
2089
|
return this.store.getContractClassLogs(filter);
|
|
1604
2090
|
}
|
|
1605
|
-
getSynchedL2BlockNumber(): Promise<
|
|
1606
|
-
return this.store.
|
|
2091
|
+
getSynchedL2BlockNumber(): Promise<BlockNumber> {
|
|
2092
|
+
return this.store.getCheckpointedL2BlockNumber();
|
|
2093
|
+
}
|
|
2094
|
+
getProvenCheckpointNumber(): Promise<CheckpointNumber> {
|
|
2095
|
+
return this.store.getProvenCheckpointNumber();
|
|
1607
2096
|
}
|
|
1608
|
-
|
|
1609
|
-
return this.store.
|
|
2097
|
+
getProvenBlockNumber(): Promise<BlockNumber> {
|
|
2098
|
+
return this.store.getProvenBlockNumber();
|
|
1610
2099
|
}
|
|
1611
|
-
|
|
1612
|
-
return this.store.
|
|
2100
|
+
setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise<void> {
|
|
2101
|
+
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
1613
2102
|
}
|
|
1614
2103
|
setBlockSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
|
|
1615
|
-
return this.store.
|
|
2104
|
+
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
1616
2105
|
}
|
|
1617
2106
|
setMessageSynchedL1Block(l1Block: L1BlockId): Promise<void> {
|
|
1618
2107
|
return this.store.setMessageSynchedL1Block(l1Block);
|
|
@@ -1644,8 +2133,8 @@ export class ArchiverStoreHelper
|
|
|
1644
2133
|
estimateSize(): Promise<{ mappingSize: number; physicalFileSize: number; actualSize: number; numItems: number }> {
|
|
1645
2134
|
return this.store.estimateSize();
|
|
1646
2135
|
}
|
|
1647
|
-
|
|
1648
|
-
return this.store.
|
|
2136
|
+
rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber: CheckpointNumber): Promise<void> {
|
|
2137
|
+
return this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
1649
2138
|
}
|
|
1650
2139
|
iterateL1ToL2Messages(range: CustomRange<bigint> = {}): AsyncIterableIterator<InboxMessage> {
|
|
1651
2140
|
return this.store.iterateL1ToL2Messages(range);
|
|
@@ -1656,4 +2145,11 @@ export class ArchiverStoreHelper
|
|
|
1656
2145
|
getLastL1ToL2Message(): Promise<InboxMessage | undefined> {
|
|
1657
2146
|
return this.store.getLastL1ToL2Message();
|
|
1658
2147
|
}
|
|
2148
|
+
getPendingChainValidationStatus(): Promise<ValidateBlockResult | undefined> {
|
|
2149
|
+
return this.store.getPendingChainValidationStatus();
|
|
2150
|
+
}
|
|
2151
|
+
setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void> {
|
|
2152
|
+
this.#log.debug(`Setting pending chain validation status to valid ${status?.valid}`, status);
|
|
2153
|
+
return this.store.setPendingChainValidationStatus(status);
|
|
2154
|
+
}
|
|
1659
2155
|
}
|