@aztec/archiver 4.0.0-nightly.20260113 → 4.0.0-nightly.20260115
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +139 -22
- package/dest/archiver.d.ts +134 -0
- package/dest/archiver.d.ts.map +1 -0
- package/dest/archiver.js +767 -0
- package/dest/{archiver/config.d.ts → config.d.ts} +9 -1
- package/dest/config.d.ts.map +1 -0
- package/dest/{archiver/config.js → config.js} +9 -0
- package/dest/{archiver/errors.d.ts → errors.d.ts} +1 -1
- package/dest/errors.d.ts.map +1 -0
- package/dest/factory.d.ts +5 -6
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +82 -5
- package/dest/index.d.ts +10 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +8 -3
- package/dest/interfaces.d.ts +9 -0
- package/dest/interfaces.d.ts.map +1 -0
- package/dest/interfaces.js +3 -0
- package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.d.ts +1 -1
- package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/calldata_retriever.d.ts +2 -2
- package/dest/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/l1/data_retrieval.d.ts +88 -0
- package/dest/l1/data_retrieval.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/data_retrieval.js +32 -51
- package/dest/{archiver/l1 → l1}/debug_tx.d.ts +1 -1
- package/dest/l1/debug_tx.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/spire_proposer.d.ts +1 -1
- package/dest/l1/spire_proposer.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/trace_tx.d.ts +1 -1
- package/dest/l1/trace_tx.d.ts.map +1 -0
- package/dest/l1/types.d.ts +12 -0
- package/dest/l1/types.d.ts.map +1 -0
- package/dest/{archiver/l1 → l1}/validate_trace.d.ts +1 -1
- package/dest/l1/validate_trace.d.ts.map +1 -0
- package/dest/modules/data_source_base.d.ts +83 -0
- package/dest/modules/data_source_base.d.ts.map +1 -0
- package/dest/modules/data_source_base.js +301 -0
- package/dest/modules/data_store_updater.d.ts +46 -0
- package/dest/modules/data_store_updater.d.ts.map +1 -0
- package/dest/modules/data_store_updater.js +216 -0
- package/dest/modules/instrumentation.d.ts +37 -0
- package/dest/modules/instrumentation.d.ts.map +1 -0
- package/dest/modules/l1_synchronizer.d.ts +67 -0
- package/dest/modules/l1_synchronizer.d.ts.map +1 -0
- package/dest/modules/l1_synchronizer.js +1064 -0
- package/dest/{archiver → modules}/validation.d.ts +1 -1
- package/dest/modules/validation.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/block_store.d.ts +2 -2
- package/dest/store/block_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/block_store.js +1 -1
- package/dest/store/contract_class_store.d.ts +18 -0
- package/dest/store/contract_class_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/contract_class_store.js +1 -1
- package/dest/store/contract_instance_store.d.ts +24 -0
- package/dest/store/contract_instance_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +1 -1
- package/dest/{archiver/archiver_store.d.ts → store/kv_archiver_store.d.ts} +143 -139
- package/dest/store/kv_archiver_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/kv_archiver_store.js +157 -49
- package/dest/{archiver/kv_archiver_store → store}/log_store.d.ts +1 -1
- package/dest/store/log_store.d.ts.map +1 -0
- package/dest/{archiver/kv_archiver_store → store}/message_store.d.ts +1 -1
- package/dest/store/message_store.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
- package/dest/structs/data_retrieval.d.ts.map +1 -0
- package/dest/structs/inbox_message.d.ts +15 -0
- package/dest/structs/inbox_message.d.ts.map +1 -0
- package/dest/{archiver/structs → structs}/published.d.ts +1 -1
- package/dest/structs/published.d.ts.map +1 -0
- package/dest/test/fake_l1_state.d.ts +173 -0
- package/dest/test/fake_l1_state.d.ts.map +1 -0
- package/dest/test/fake_l1_state.js +364 -0
- package/dest/test/index.d.ts +2 -1
- package/dest/test/index.d.ts.map +1 -1
- package/dest/test/index.js +1 -0
- package/dest/test/mock_structs.d.ts +76 -2
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +133 -2
- package/package.json +15 -17
- package/src/archiver.ts +522 -0
- package/src/{archiver/config.ts → config.ts} +11 -0
- package/src/factory.ts +118 -6
- package/src/index.ts +10 -3
- package/src/interfaces.ts +9 -0
- package/src/{archiver/l1 → l1}/calldata_retriever.ts +1 -1
- package/src/{archiver/l1 → l1}/data_retrieval.ts +52 -69
- package/src/modules/data_source_base.ts +439 -0
- package/src/modules/data_store_updater.ts +318 -0
- package/src/modules/l1_synchronizer.ts +870 -0
- package/src/{archiver/kv_archiver_store → store}/block_store.ts +1 -1
- package/src/{archiver/kv_archiver_store → store}/contract_class_store.ts +1 -1
- package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +1 -1
- package/src/{archiver/kv_archiver_store → store}/kv_archiver_store.ts +170 -8
- package/src/test/fake_l1_state.ts +561 -0
- package/src/test/index.ts +1 -0
- package/src/test/mock_structs.ts +247 -2
- package/dest/archiver/archiver.d.ts +0 -307
- package/dest/archiver/archiver.d.ts.map +0 -1
- package/dest/archiver/archiver.js +0 -2102
- package/dest/archiver/archiver_store.d.ts.map +0 -1
- package/dest/archiver/archiver_store.js +0 -4
- package/dest/archiver/archiver_store_test_suite.d.ts +0 -8
- package/dest/archiver/archiver_store_test_suite.d.ts.map +0 -1
- package/dest/archiver/archiver_store_test_suite.js +0 -2770
- package/dest/archiver/config.d.ts.map +0 -1
- package/dest/archiver/errors.d.ts.map +0 -1
- package/dest/archiver/index.d.ts +0 -7
- package/dest/archiver/index.d.ts.map +0 -1
- package/dest/archiver/index.js +0 -4
- package/dest/archiver/instrumentation.d.ts +0 -37
- package/dest/archiver/instrumentation.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +0 -159
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +0 -1
- package/dest/archiver/l1/calldata_retriever.d.ts.map +0 -1
- package/dest/archiver/l1/data_retrieval.d.ts +0 -90
- package/dest/archiver/l1/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/l1/debug_tx.d.ts.map +0 -1
- package/dest/archiver/l1/spire_proposer.d.ts.map +0 -1
- package/dest/archiver/l1/trace_tx.d.ts.map +0 -1
- package/dest/archiver/l1/types.d.ts +0 -12
- package/dest/archiver/l1/types.d.ts.map +0 -1
- package/dest/archiver/l1/validate_trace.d.ts.map +0 -1
- package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/structs/inbox_message.d.ts +0 -15
- package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
- package/dest/archiver/structs/published.d.ts.map +0 -1
- package/dest/archiver/validation.d.ts.map +0 -1
- package/dest/rpc/index.d.ts +0 -9
- package/dest/rpc/index.d.ts.map +0 -1
- package/dest/rpc/index.js +0 -15
- package/src/archiver/archiver.ts +0 -2265
- package/src/archiver/archiver_store.ts +0 -380
- package/src/archiver/archiver_store_test_suite.ts +0 -2842
- package/src/archiver/index.ts +0 -6
- package/src/rpc/index.ts +0 -16
- /package/dest/{archiver/errors.js → errors.js} +0 -0
- /package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.js +0 -0
- /package/dest/{archiver/l1 → l1}/calldata_retriever.js +0 -0
- /package/dest/{archiver/l1 → l1}/debug_tx.js +0 -0
- /package/dest/{archiver/l1 → l1}/spire_proposer.js +0 -0
- /package/dest/{archiver/l1 → l1}/trace_tx.js +0 -0
- /package/dest/{archiver/l1 → l1}/types.js +0 -0
- /package/dest/{archiver/l1 → l1}/validate_trace.js +0 -0
- /package/dest/{archiver → modules}/instrumentation.js +0 -0
- /package/dest/{archiver → modules}/validation.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/log_store.js +0 -0
- /package/dest/{archiver/kv_archiver_store → store}/message_store.js +0 -0
- /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
- /package/dest/{archiver/structs → structs}/inbox_message.js +0 -0
- /package/dest/{archiver/structs → structs}/published.js +0 -0
- /package/src/{archiver/errors.ts → errors.ts} +0 -0
- /package/src/{archiver/l1 → l1}/README.md +0 -0
- /package/src/{archiver/l1 → l1}/bin/retrieve-calldata.ts +0 -0
- /package/src/{archiver/l1 → l1}/debug_tx.ts +0 -0
- /package/src/{archiver/l1 → l1}/spire_proposer.ts +0 -0
- /package/src/{archiver/l1 → l1}/trace_tx.ts +0 -0
- /package/src/{archiver/l1 → l1}/types.ts +0 -0
- /package/src/{archiver/l1 → l1}/validate_trace.ts +0 -0
- /package/src/{archiver → modules}/instrumentation.ts +0 -0
- /package/src/{archiver → modules}/validation.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/log_store.ts +0 -0
- /package/src/{archiver/kv_archiver_store → store}/message_store.ts +0 -0
- /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
- /package/src/{archiver/structs → structs}/inbox_message.ts +0 -0
- /package/src/{archiver/structs → structs}/published.ts +0 -0
package/src/archiver/archiver.ts
DELETED
|
@@ -1,2265 +0,0 @@
|
|
|
1
|
-
import type { BlobClientInterface } from '@aztec/blob-client/client';
|
|
2
|
-
import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
|
|
3
|
-
import { EpochCache } from '@aztec/epoch-cache';
|
|
4
|
-
import { createEthereumChain } from '@aztec/ethereum/chain';
|
|
5
|
-
import { BlockTagTooOldError, InboxContract, RollupContract } from '@aztec/ethereum/contracts';
|
|
6
|
-
import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
|
|
7
|
-
import type { L1BlockId } from '@aztec/ethereum/l1-types';
|
|
8
|
-
import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types';
|
|
9
|
-
import { maxBigint } from '@aztec/foundation/bigint';
|
|
10
|
-
import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types';
|
|
11
|
-
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
12
|
-
import { merge, pick } from '@aztec/foundation/collection';
|
|
13
|
-
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
14
|
-
import type { EthAddress } from '@aztec/foundation/eth-address';
|
|
15
|
-
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
16
|
-
import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
17
|
-
import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
|
|
18
|
-
import { count } from '@aztec/foundation/string';
|
|
19
|
-
import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
|
|
20
|
-
import { isDefined } from '@aztec/foundation/types';
|
|
21
|
-
import type { CustomRange } from '@aztec/kv-store';
|
|
22
|
-
import { RollupAbi } from '@aztec/l1-artifacts';
|
|
23
|
-
import {
|
|
24
|
-
ContractClassPublishedEvent,
|
|
25
|
-
PrivateFunctionBroadcastedEvent,
|
|
26
|
-
UtilityFunctionBroadcastedEvent,
|
|
27
|
-
} from '@aztec/protocol-contracts/class-registry';
|
|
28
|
-
import {
|
|
29
|
-
ContractInstancePublishedEvent,
|
|
30
|
-
ContractInstanceUpdatedEvent,
|
|
31
|
-
} from '@aztec/protocol-contracts/instance-registry';
|
|
32
|
-
import type { FunctionSelector } from '@aztec/stdlib/abi';
|
|
33
|
-
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
34
|
-
import {
|
|
35
|
-
type ArchiverEmitter,
|
|
36
|
-
type CheckpointId,
|
|
37
|
-
CheckpointedL2Block,
|
|
38
|
-
CommitteeAttestation,
|
|
39
|
-
GENESIS_CHECKPOINT_HEADER_HASH,
|
|
40
|
-
L2Block,
|
|
41
|
-
L2BlockNew,
|
|
42
|
-
type L2BlockSink,
|
|
43
|
-
type L2BlockSource,
|
|
44
|
-
L2BlockSourceEvents,
|
|
45
|
-
type L2Tips,
|
|
46
|
-
PublishedL2Block,
|
|
47
|
-
} from '@aztec/stdlib/block';
|
|
48
|
-
import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
49
|
-
import {
|
|
50
|
-
type ContractClassPublic,
|
|
51
|
-
type ContractDataSource,
|
|
52
|
-
type ContractInstanceWithAddress,
|
|
53
|
-
type ExecutablePrivateFunctionWithMembershipProof,
|
|
54
|
-
type UtilityFunctionWithMembershipProof,
|
|
55
|
-
computePublicBytecodeCommitment,
|
|
56
|
-
isValidPrivateFunctionMembershipProof,
|
|
57
|
-
isValidUtilityFunctionMembershipProof,
|
|
58
|
-
} from '@aztec/stdlib/contract';
|
|
59
|
-
import {
|
|
60
|
-
type L1RollupConstants,
|
|
61
|
-
getEpochAtSlot,
|
|
62
|
-
getEpochNumberAtTimestamp,
|
|
63
|
-
getSlotAtTimestamp,
|
|
64
|
-
getSlotRangeForEpoch,
|
|
65
|
-
getTimestampRangeForEpoch,
|
|
66
|
-
} from '@aztec/stdlib/epoch-helpers';
|
|
67
|
-
import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
|
|
68
|
-
import type { L2LogsSource } from '@aztec/stdlib/interfaces/server';
|
|
69
|
-
import {
|
|
70
|
-
ContractClassLog,
|
|
71
|
-
type LogFilter,
|
|
72
|
-
type PrivateLog,
|
|
73
|
-
type PublicLog,
|
|
74
|
-
type SiloedTag,
|
|
75
|
-
Tag,
|
|
76
|
-
TxScopedL2Log,
|
|
77
|
-
} from '@aztec/stdlib/logs';
|
|
78
|
-
import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
79
|
-
import type { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
80
|
-
import { type BlockHeader, type IndexedTxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx';
|
|
81
|
-
import type { UInt64 } from '@aztec/stdlib/types';
|
|
82
|
-
import {
|
|
83
|
-
type TelemetryClient,
|
|
84
|
-
type Traceable,
|
|
85
|
-
type Tracer,
|
|
86
|
-
execInSpan,
|
|
87
|
-
getTelemetryClient,
|
|
88
|
-
trackSpan,
|
|
89
|
-
} from '@aztec/telemetry-client';
|
|
90
|
-
|
|
91
|
-
import { EventEmitter } from 'events';
|
|
92
|
-
import groupBy from 'lodash.groupby';
|
|
93
|
-
import { type GetContractReturnType, type Hex, createPublicClient, fallback, http } from 'viem';
|
|
94
|
-
|
|
95
|
-
import type { ArchiverDataStore, ArchiverL1SynchPoint } from './archiver_store.js';
|
|
96
|
-
import type { ArchiverConfig } from './config.js';
|
|
97
|
-
import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
|
|
98
|
-
import { ArchiverInstrumentation } from './instrumentation.js';
|
|
99
|
-
import type { CheckpointData } from './kv_archiver_store/block_store.js';
|
|
100
|
-
import {
|
|
101
|
-
retrieveCheckpointsFromRollup,
|
|
102
|
-
retrieveL1ToL2Message,
|
|
103
|
-
retrieveL1ToL2Messages,
|
|
104
|
-
retrievedToPublishedCheckpoint,
|
|
105
|
-
} from './l1/data_retrieval.js';
|
|
106
|
-
import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
|
|
107
|
-
import type { InboxMessage } from './structs/inbox_message.js';
|
|
108
|
-
import { type ValidateCheckpointResult, validateCheckpointAttestations } from './validation.js';
|
|
109
|
-
|
|
110
|
-
/**
|
|
111
|
-
* Helper interface to combine all sources this archiver implementation provides.
|
|
112
|
-
*/
|
|
113
|
-
export type ArchiveSource = L2BlockSource & L2LogsSource & ContractDataSource & L1ToL2MessageSource;
|
|
114
|
-
|
|
115
|
-
/** Request to add a block to the archiver, queued for processing by the sync loop. */
|
|
116
|
-
type AddBlockRequest = {
|
|
117
|
-
block: L2BlockNew;
|
|
118
|
-
resolve: () => void;
|
|
119
|
-
reject: (err: Error) => void;
|
|
120
|
-
};
|
|
121
|
-
|
|
122
|
-
export type ArchiverDeps = {
|
|
123
|
-
telemetry?: TelemetryClient;
|
|
124
|
-
blobClient: BlobClientInterface;
|
|
125
|
-
epochCache?: EpochCache;
|
|
126
|
-
dateProvider?: DateProvider;
|
|
127
|
-
};
|
|
128
|
-
|
|
129
|
-
function mapArchiverConfig(config: Partial<ArchiverConfig>) {
|
|
130
|
-
return {
|
|
131
|
-
pollingIntervalMs: config.archiverPollingIntervalMS,
|
|
132
|
-
batchSize: config.archiverBatchSize,
|
|
133
|
-
skipValidateCheckpointAttestations: config.skipValidateCheckpointAttestations,
|
|
134
|
-
maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds,
|
|
135
|
-
ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts,
|
|
136
|
-
};
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
type RollupStatus = {
|
|
140
|
-
provenCheckpointNumber: CheckpointNumber;
|
|
141
|
-
provenArchive: Hex;
|
|
142
|
-
pendingCheckpointNumber: CheckpointNumber;
|
|
143
|
-
pendingArchive: Hex;
|
|
144
|
-
validationResult: ValidateCheckpointResult | undefined;
|
|
145
|
-
lastRetrievedCheckpoint?: PublishedCheckpoint;
|
|
146
|
-
lastL1BlockWithCheckpoint?: bigint;
|
|
147
|
-
};
|
|
148
|
-
|
|
149
|
-
/**
|
|
150
|
-
* Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
|
|
151
|
-
* Responsible for handling robust L1 polling so that other components do not need to
|
|
152
|
-
* concern themselves with it.
|
|
153
|
-
*/
|
|
154
|
-
export class Archiver
|
|
155
|
-
extends (EventEmitter as new () => ArchiverEmitter)
|
|
156
|
-
implements ArchiveSource, L2BlockSink, Traceable
|
|
157
|
-
{
|
|
158
|
-
/** A loop in which we will be continually fetching new checkpoints. */
|
|
159
|
-
private runningPromise: RunningPromise;
|
|
160
|
-
|
|
161
|
-
private rollup: RollupContract;
|
|
162
|
-
private inbox: InboxContract;
|
|
163
|
-
|
|
164
|
-
private store: ArchiverStoreHelper;
|
|
165
|
-
|
|
166
|
-
private l1BlockNumber: bigint | undefined;
|
|
167
|
-
private l1Timestamp: bigint | undefined;
|
|
168
|
-
private initialSyncComplete: boolean = false;
|
|
169
|
-
private initialSyncPromise: PromiseWithResolvers<void>;
|
|
170
|
-
|
|
171
|
-
/** Queue of blocks to be added to the store, processed by the sync loop. */
|
|
172
|
-
private blockQueue: AddBlockRequest[] = [];
|
|
173
|
-
|
|
174
|
-
public readonly tracer: Tracer;
|
|
175
|
-
|
|
176
|
-
/**
|
|
177
|
-
* Creates a new instance of the Archiver.
|
|
178
|
-
* @param publicClient - A client for interacting with the Ethereum node.
|
|
179
|
-
* @param debugClient - A client for interacting with the Ethereum node for debug/trace methods.
|
|
180
|
-
* @param rollupAddress - Ethereum address of the rollup contract.
|
|
181
|
-
* @param inboxAddress - Ethereum address of the inbox contract.
|
|
182
|
-
* @param registryAddress - Ethereum address of the registry contract.
|
|
183
|
-
* @param pollingIntervalMs - The interval for polling for L1 logs (in milliseconds).
|
|
184
|
-
* @param store - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data.
|
|
185
|
-
* @param log - A logger.
|
|
186
|
-
*/
|
|
187
|
-
constructor(
|
|
188
|
-
private readonly publicClient: ViemPublicClient,
|
|
189
|
-
private readonly debugClient: ViemPublicDebugClient,
|
|
190
|
-
private readonly l1Addresses: Pick<
|
|
191
|
-
L1ContractAddresses,
|
|
192
|
-
'rollupAddress' | 'inboxAddress' | 'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress'
|
|
193
|
-
> & { slashingProposerAddress: EthAddress },
|
|
194
|
-
readonly dataStore: ArchiverDataStore,
|
|
195
|
-
private config: {
|
|
196
|
-
pollingIntervalMs: number;
|
|
197
|
-
batchSize: number;
|
|
198
|
-
skipValidateCheckpointAttestations?: boolean;
|
|
199
|
-
maxAllowedEthClientDriftSeconds: number;
|
|
200
|
-
ethereumAllowNoDebugHosts?: boolean;
|
|
201
|
-
},
|
|
202
|
-
private readonly blobClient: BlobClientInterface,
|
|
203
|
-
private readonly epochCache: EpochCache,
|
|
204
|
-
private readonly dateProvider: DateProvider,
|
|
205
|
-
private readonly instrumentation: ArchiverInstrumentation,
|
|
206
|
-
private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
|
|
207
|
-
private readonly log: Logger = createLogger('archiver'),
|
|
208
|
-
) {
|
|
209
|
-
super();
|
|
210
|
-
|
|
211
|
-
this.tracer = instrumentation.tracer;
|
|
212
|
-
this.store = new ArchiverStoreHelper(dataStore);
|
|
213
|
-
|
|
214
|
-
this.rollup = new RollupContract(publicClient, l1Addresses.rollupAddress);
|
|
215
|
-
this.inbox = new InboxContract(publicClient, l1Addresses.inboxAddress);
|
|
216
|
-
this.initialSyncPromise = promiseWithResolvers();
|
|
217
|
-
|
|
218
|
-
// Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
|
|
219
|
-
// are done as fast as possible. This then gets updated once the initial sync completes.
|
|
220
|
-
this.runningPromise = new RunningPromise(
|
|
221
|
-
() => this.sync(),
|
|
222
|
-
this.log,
|
|
223
|
-
this.config.pollingIntervalMs / 10,
|
|
224
|
-
makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError),
|
|
225
|
-
);
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
/**
|
|
229
|
-
* Creates a new instance of the Archiver and blocks until it syncs from chain.
|
|
230
|
-
* @param config - The archiver's desired configuration.
|
|
231
|
-
* @param archiverStore - The backing store for the archiver.
|
|
232
|
-
* @param blockUntilSynced - If true, blocks until the archiver has fully synced.
|
|
233
|
-
* @returns - An instance of the archiver.
|
|
234
|
-
*/
|
|
235
|
-
public static async createAndSync(
|
|
236
|
-
config: ArchiverConfig,
|
|
237
|
-
archiverStore: ArchiverDataStore,
|
|
238
|
-
deps: ArchiverDeps,
|
|
239
|
-
blockUntilSynced = true,
|
|
240
|
-
): Promise<Archiver> {
|
|
241
|
-
const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId);
|
|
242
|
-
const publicClient = createPublicClient({
|
|
243
|
-
chain: chain.chainInfo,
|
|
244
|
-
transport: fallback(config.l1RpcUrls.map(url => http(url, { batch: false }))),
|
|
245
|
-
pollingInterval: config.viemPollingIntervalMS,
|
|
246
|
-
});
|
|
247
|
-
|
|
248
|
-
// Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs
|
|
249
|
-
const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls;
|
|
250
|
-
const debugClient = createPublicClient({
|
|
251
|
-
chain: chain.chainInfo,
|
|
252
|
-
transport: fallback(debugRpcUrls.map(url => http(url, { batch: false }))),
|
|
253
|
-
pollingInterval: config.viemPollingIntervalMS,
|
|
254
|
-
}) as ViemPublicDebugClient;
|
|
255
|
-
|
|
256
|
-
const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress);
|
|
257
|
-
|
|
258
|
-
const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] =
|
|
259
|
-
await Promise.all([
|
|
260
|
-
rollup.getL1StartBlock(),
|
|
261
|
-
rollup.getL1GenesisTime(),
|
|
262
|
-
rollup.getProofSubmissionEpochs(),
|
|
263
|
-
rollup.getGenesisArchiveTreeRoot(),
|
|
264
|
-
rollup.getSlashingProposerAddress(),
|
|
265
|
-
] as const);
|
|
266
|
-
|
|
267
|
-
const l1StartBlockHash = await publicClient
|
|
268
|
-
.getBlock({ blockNumber: l1StartBlock, includeTransactions: false })
|
|
269
|
-
.then(block => Buffer32.fromString(block.hash));
|
|
270
|
-
|
|
271
|
-
const { aztecEpochDuration: epochDuration, aztecSlotDuration: slotDuration, ethereumSlotDuration } = config;
|
|
272
|
-
|
|
273
|
-
const l1Constants = {
|
|
274
|
-
l1StartBlockHash,
|
|
275
|
-
l1StartBlock,
|
|
276
|
-
l1GenesisTime,
|
|
277
|
-
epochDuration,
|
|
278
|
-
slotDuration,
|
|
279
|
-
ethereumSlotDuration,
|
|
280
|
-
proofSubmissionEpochs: Number(proofSubmissionEpochs),
|
|
281
|
-
genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()),
|
|
282
|
-
};
|
|
283
|
-
|
|
284
|
-
const opts = merge(
|
|
285
|
-
{
|
|
286
|
-
pollingIntervalMs: 10_000,
|
|
287
|
-
batchSize: 100,
|
|
288
|
-
maxAllowedEthClientDriftSeconds: 300,
|
|
289
|
-
ethereumAllowNoDebugHosts: false,
|
|
290
|
-
},
|
|
291
|
-
mapArchiverConfig(config),
|
|
292
|
-
);
|
|
293
|
-
|
|
294
|
-
const epochCache = deps.epochCache ?? (await EpochCache.create(config.l1Contracts.rollupAddress, config, deps));
|
|
295
|
-
const telemetry = deps.telemetry ?? getTelemetryClient();
|
|
296
|
-
|
|
297
|
-
const archiver = new Archiver(
|
|
298
|
-
publicClient,
|
|
299
|
-
debugClient,
|
|
300
|
-
{ ...config.l1Contracts, slashingProposerAddress },
|
|
301
|
-
archiverStore,
|
|
302
|
-
opts,
|
|
303
|
-
deps.blobClient,
|
|
304
|
-
epochCache,
|
|
305
|
-
deps.dateProvider ?? new DateProvider(),
|
|
306
|
-
await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()),
|
|
307
|
-
l1Constants,
|
|
308
|
-
);
|
|
309
|
-
await archiver.start(blockUntilSynced);
|
|
310
|
-
return archiver;
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
/** Updates archiver config */
|
|
314
|
-
public updateConfig(newConfig: Partial<ArchiverConfig>) {
|
|
315
|
-
this.config = merge(this.config, mapArchiverConfig(newConfig));
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
/**
|
|
319
|
-
* Starts sync process.
|
|
320
|
-
* @param blockUntilSynced - If true, blocks until the archiver has fully synced.
|
|
321
|
-
*/
|
|
322
|
-
public async start(blockUntilSynced: boolean): Promise<void> {
|
|
323
|
-
if (this.runningPromise.isRunning()) {
|
|
324
|
-
throw new Error('Archiver is already running');
|
|
325
|
-
}
|
|
326
|
-
|
|
327
|
-
await this.blobClient.testSources();
|
|
328
|
-
await this.testEthereumNodeSynced();
|
|
329
|
-
await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
|
|
330
|
-
|
|
331
|
-
// Log initial state for the archiver
|
|
332
|
-
const { l1StartBlock } = this.l1constants;
|
|
333
|
-
const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
|
|
334
|
-
const currentL2Checkpoint = await this.getSynchedCheckpointNumber();
|
|
335
|
-
this.log.info(
|
|
336
|
-
`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`,
|
|
337
|
-
{ blocksSynchedTo, messagesSynchedTo, currentL2Checkpoint },
|
|
338
|
-
);
|
|
339
|
-
|
|
340
|
-
// Start sync loop, and return the wait for initial sync if we are asked to block until synced
|
|
341
|
-
this.runningPromise.start();
|
|
342
|
-
if (blockUntilSynced) {
|
|
343
|
-
return this.waitForInitialSync();
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
public syncImmediate() {
|
|
348
|
-
return this.runningPromise.trigger();
|
|
349
|
-
}
|
|
350
|
-
|
|
351
|
-
/**
|
|
352
|
-
* Queues a block to be added to the archiver store and triggers processing.
|
|
353
|
-
* The block will be processed by the sync loop.
|
|
354
|
-
* Implements the L2BlockSink interface.
|
|
355
|
-
* @param block - The L2 block to add.
|
|
356
|
-
* @returns A promise that resolves when the block has been added to the store, or rejects on error.
|
|
357
|
-
*/
|
|
358
|
-
public addBlock(block: L2BlockNew): Promise<void> {
|
|
359
|
-
return new Promise<void>((resolve, reject) => {
|
|
360
|
-
this.blockQueue.push({ block, resolve, reject });
|
|
361
|
-
this.log.debug(`Queued block ${block.number} for processing`);
|
|
362
|
-
// Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed
|
|
363
|
-
this.syncImmediate().catch(err => {
|
|
364
|
-
this.log.error(`Sync immediate call failed: ${err}`);
|
|
365
|
-
});
|
|
366
|
-
});
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
/**
|
|
370
|
-
* Processes all queued blocks, adding them to the store.
|
|
371
|
-
* Called at the beginning of each sync iteration.
|
|
372
|
-
* Blocks are processed in the order they were queued.
|
|
373
|
-
*/
|
|
374
|
-
private async processQueuedBlocks(): Promise<void> {
|
|
375
|
-
if (this.blockQueue.length === 0) {
|
|
376
|
-
return;
|
|
377
|
-
}
|
|
378
|
-
|
|
379
|
-
// Take all blocks from the queue
|
|
380
|
-
const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
|
|
381
|
-
this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
|
|
382
|
-
|
|
383
|
-
// Process each block individually to properly resolve/reject each promise
|
|
384
|
-
for (const { block, resolve, reject } of queuedItems) {
|
|
385
|
-
try {
|
|
386
|
-
await this.store.addBlocks([block]);
|
|
387
|
-
this.log.debug(`Added block ${block.number} to store`);
|
|
388
|
-
resolve();
|
|
389
|
-
} catch (err: any) {
|
|
390
|
-
this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
|
|
391
|
-
reject(err);
|
|
392
|
-
}
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
public waitForInitialSync() {
|
|
397
|
-
return this.initialSyncPromise.promise;
|
|
398
|
-
}
|
|
399
|
-
|
|
400
|
-
/** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */
|
|
401
|
-
private async testEthereumNodeSynced() {
|
|
402
|
-
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
403
|
-
if (maxAllowedDelay === 0) {
|
|
404
|
-
return;
|
|
405
|
-
}
|
|
406
|
-
const { number, timestamp: l1Timestamp } = await this.publicClient.getBlock({ includeTransactions: false });
|
|
407
|
-
const currentTime = BigInt(this.dateProvider.nowInSeconds());
|
|
408
|
-
if (currentTime - l1Timestamp > BigInt(maxAllowedDelay)) {
|
|
409
|
-
throw new Error(
|
|
410
|
-
`Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`,
|
|
411
|
-
);
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
@trackSpan('Archiver.syncFromL1')
|
|
416
|
-
private async syncFromL1() {
|
|
417
|
-
/**
|
|
418
|
-
* We keep track of three "pointers" to L1 blocks:
|
|
419
|
-
* 1. the last L1 block that published an L2 block
|
|
420
|
-
* 2. the last L1 block that added L1 to L2 messages
|
|
421
|
-
* 3. the last L1 block that cancelled L1 to L2 messages
|
|
422
|
-
*
|
|
423
|
-
* We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
|
|
424
|
-
* We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
|
|
425
|
-
* The archiver will stay back, until there's data on L1 that will move the pointers forward.
|
|
426
|
-
*/
|
|
427
|
-
const { l1StartBlock, l1StartBlockHash } = this.l1constants;
|
|
428
|
-
const {
|
|
429
|
-
blocksSynchedTo = l1StartBlock,
|
|
430
|
-
messagesSynchedTo = { l1BlockNumber: l1StartBlock, l1BlockHash: l1StartBlockHash },
|
|
431
|
-
} = await this.store.getSynchPoint();
|
|
432
|
-
|
|
433
|
-
const currentL1Block = await this.publicClient.getBlock({ includeTransactions: false });
|
|
434
|
-
const currentL1BlockNumber = currentL1Block.number;
|
|
435
|
-
const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
|
|
436
|
-
|
|
437
|
-
this.log.trace(`Starting new archiver sync iteration`, {
|
|
438
|
-
blocksSynchedTo,
|
|
439
|
-
messagesSynchedTo,
|
|
440
|
-
currentL1BlockNumber,
|
|
441
|
-
currentL1BlockHash,
|
|
442
|
-
});
|
|
443
|
-
|
|
444
|
-
// ********** Ensuring Consistency of data pulled from L1 **********
|
|
445
|
-
|
|
446
|
-
/**
|
|
447
|
-
* There are a number of calls in this sync operation to L1 for retrieving
|
|
448
|
-
* events and transaction data. There are a couple of things we need to bear in mind
|
|
449
|
-
* to ensure that data is read exactly once.
|
|
450
|
-
*
|
|
451
|
-
* The first is the problem of eventually consistent ETH service providers like Infura.
|
|
452
|
-
* Each L1 read operation will query data from the last L1 block that it saw emit its kind of data.
|
|
453
|
-
* (so pending L1 to L2 messages will read from the last L1 block that emitted a message and so on)
|
|
454
|
-
* This will mean the archiver will lag behind L1 and will only advance when there's L2-relevant activity on the chain.
|
|
455
|
-
*
|
|
456
|
-
* The second is that in between the various calls to L1, the block number can move meaning some
|
|
457
|
-
* of the following calls will return data for blocks that were not present during earlier calls.
|
|
458
|
-
* To combat this for the time being we simply ensure that all data retrieval methods only retrieve
|
|
459
|
-
* data up to the currentBlockNumber captured at the top of this function. We might want to improve on this
|
|
460
|
-
* in future but for the time being it should give us the guarantees that we need
|
|
461
|
-
*/
|
|
462
|
-
|
|
463
|
-
// ********** Events that are processed per L1 block **********
|
|
464
|
-
await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber, currentL1BlockHash);
|
|
465
|
-
|
|
466
|
-
// Get L1 timestamp for the current block
|
|
467
|
-
const currentL1Timestamp =
|
|
468
|
-
!this.l1Timestamp || !this.l1BlockNumber || this.l1BlockNumber !== currentL1BlockNumber
|
|
469
|
-
? (await this.publicClient.getBlock({ blockNumber: currentL1BlockNumber })).timestamp
|
|
470
|
-
: this.l1Timestamp;
|
|
471
|
-
|
|
472
|
-
// Warn if the latest L1 block timestamp is too old
|
|
473
|
-
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
474
|
-
const now = this.dateProvider.nowInSeconds();
|
|
475
|
-
if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
|
|
476
|
-
this.log.warn(
|
|
477
|
-
`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`,
|
|
478
|
-
{ currentL1BlockNumber, currentL1Timestamp, now, maxAllowedDelay },
|
|
479
|
-
);
|
|
480
|
-
}
|
|
481
|
-
|
|
482
|
-
// ********** Events that are processed per checkpoint **********
|
|
483
|
-
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
484
|
-
// First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
|
|
485
|
-
// pending chain validation status, proven checkpoint number, and synched L1 block number.
|
|
486
|
-
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
|
|
487
|
-
// Then we prune the current epoch if it'd reorg on next submission.
|
|
488
|
-
// Note that we don't do this before retrieving checkpoints because we may need to retrieve
|
|
489
|
-
// checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
|
|
490
|
-
// the chain locally before we start unwinding stuff. This can be optimized by figuring out
|
|
491
|
-
// up to which point we're pruning, and then requesting checkpoints up to that point only.
|
|
492
|
-
const { rollupCanPrune } = await this.handleEpochPrune(
|
|
493
|
-
rollupStatus.provenCheckpointNumber,
|
|
494
|
-
currentL1BlockNumber,
|
|
495
|
-
currentL1Timestamp,
|
|
496
|
-
);
|
|
497
|
-
|
|
498
|
-
// If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
|
|
499
|
-
// past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
|
|
500
|
-
// we get a valid checkpoint to advance the syncpoint.
|
|
501
|
-
if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
|
|
502
|
-
await this.store.setCheckpointSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
|
|
503
|
-
}
|
|
504
|
-
|
|
505
|
-
// And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
|
|
506
|
-
// We only do this if rollup cant prune on the next submission. Otherwise we will end up
|
|
507
|
-
// re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
|
|
508
|
-
// since the archiver will rightfully refuse to sync up to it.
|
|
509
|
-
if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
|
|
510
|
-
await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
|
|
511
|
-
}
|
|
512
|
-
|
|
513
|
-
this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
|
|
514
|
-
}
|
|
515
|
-
|
|
516
|
-
// After syncing has completed, update the current l1 block number and timestamp,
|
|
517
|
-
// otherwise we risk announcing to the world that we've synced to a given point,
|
|
518
|
-
// but the corresponding blocks have not been processed (see #12631).
|
|
519
|
-
this.l1Timestamp = currentL1Timestamp;
|
|
520
|
-
this.l1BlockNumber = currentL1BlockNumber;
|
|
521
|
-
|
|
522
|
-
// We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace)
|
|
523
|
-
// so if the initial sync took too long, we still go for another iteration.
|
|
524
|
-
if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= (await this.publicClient.getBlockNumber())) {
|
|
525
|
-
this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, {
|
|
526
|
-
l1BlockNumber: currentL1BlockNumber,
|
|
527
|
-
syncPoint: await this.store.getSynchPoint(),
|
|
528
|
-
...(await this.getL2Tips()),
|
|
529
|
-
});
|
|
530
|
-
this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs);
|
|
531
|
-
this.initialSyncComplete = true;
|
|
532
|
-
this.initialSyncPromise.resolve();
|
|
533
|
-
}
|
|
534
|
-
}
|
|
535
|
-
|
|
536
|
-
/**
|
|
537
|
-
* Fetches logs from L1 contracts and processes them.
|
|
538
|
-
*/
|
|
539
|
-
@trackSpan('Archiver.sync')
|
|
540
|
-
private async sync() {
|
|
541
|
-
// Process any queued blocks first, before doing L1 sync
|
|
542
|
-
await this.processQueuedBlocks();
|
|
543
|
-
// Now perform L1 sync
|
|
544
|
-
await this.syncFromL1();
|
|
545
|
-
}
|
|
546
|
-
|
|
547
|
-
/** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */
|
|
548
|
-
private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint) {
|
|
549
|
-
const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
|
|
550
|
-
const result = await this.rollup.canPruneAtTime(time, { blockNumber: currentL1BlockNumber });
|
|
551
|
-
if (result) {
|
|
552
|
-
this.log.debug(`Rollup contract allows pruning at L1 block ${currentL1BlockNumber} time ${time}`, {
|
|
553
|
-
currentL1Timestamp,
|
|
554
|
-
pruneTime: time,
|
|
555
|
-
currentL1BlockNumber,
|
|
556
|
-
});
|
|
557
|
-
}
|
|
558
|
-
return result;
|
|
559
|
-
}
|
|
560
|
-
|
|
561
|
-
/** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */
|
|
562
|
-
@trackSpan('Archiver.handleEpochPrune')
|
|
563
|
-
private async handleEpochPrune(
|
|
564
|
-
provenCheckpointNumber: CheckpointNumber,
|
|
565
|
-
currentL1BlockNumber: bigint,
|
|
566
|
-
currentL1Timestamp: bigint,
|
|
567
|
-
) {
|
|
568
|
-
const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
|
|
569
|
-
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
570
|
-
const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
|
|
571
|
-
|
|
572
|
-
if (canPrune) {
|
|
573
|
-
const timer = new Timer();
|
|
574
|
-
const pruneFrom = CheckpointNumber(provenCheckpointNumber + 1);
|
|
575
|
-
|
|
576
|
-
const header = await this.getCheckpointHeader(pruneFrom);
|
|
577
|
-
if (header === undefined) {
|
|
578
|
-
throw new Error(`Missing checkpoint header ${pruneFrom}`);
|
|
579
|
-
}
|
|
580
|
-
|
|
581
|
-
const pruneFromSlotNumber = header.slotNumber;
|
|
582
|
-
const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
|
|
583
|
-
|
|
584
|
-
const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
|
|
585
|
-
|
|
586
|
-
const checkpointPromises = Array.from({ length: checkpointsToUnwind })
|
|
587
|
-
.fill(0)
|
|
588
|
-
.map((_, i) => this.store.getCheckpointData(CheckpointNumber(i + pruneFrom)));
|
|
589
|
-
const checkpoints = await Promise.all(checkpointPromises);
|
|
590
|
-
|
|
591
|
-
const blockPromises = await Promise.all(
|
|
592
|
-
checkpoints
|
|
593
|
-
.filter(isDefined)
|
|
594
|
-
.map(cp => this.store.getBlocksForCheckpoint(CheckpointNumber(cp.checkpointNumber))),
|
|
595
|
-
);
|
|
596
|
-
const newBlocks = blockPromises.filter(isDefined).flat();
|
|
597
|
-
|
|
598
|
-
// Emit an event for listening services to react to the chain prune
|
|
599
|
-
this.emit(L2BlockSourceEvents.L2PruneDetected, {
|
|
600
|
-
type: L2BlockSourceEvents.L2PruneDetected,
|
|
601
|
-
epochNumber: pruneFromEpochNumber,
|
|
602
|
-
blocks: newBlocks,
|
|
603
|
-
});
|
|
604
|
-
|
|
605
|
-
this.log.debug(
|
|
606
|
-
`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`,
|
|
607
|
-
);
|
|
608
|
-
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
609
|
-
this.log.warn(
|
|
610
|
-
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
611
|
-
`to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
|
|
612
|
-
`Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
613
|
-
);
|
|
614
|
-
this.instrumentation.processPrune(timer.ms());
|
|
615
|
-
// TODO(palla/reorg): Do we need to set the block synched L1 block number here?
|
|
616
|
-
// Seems like the next iteration should handle this.
|
|
617
|
-
// await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
|
|
618
|
-
}
|
|
619
|
-
|
|
620
|
-
return { rollupCanPrune };
|
|
621
|
-
}
|
|
622
|
-
|
|
623
|
-
private nextRange(end: bigint, limit: bigint): [bigint, bigint] {
|
|
624
|
-
const batchSize = (this.config.batchSize * this.l1constants.slotDuration) / this.l1constants.ethereumSlotDuration;
|
|
625
|
-
const nextStart = end + 1n;
|
|
626
|
-
const nextEnd = nextStart + BigInt(batchSize);
|
|
627
|
-
if (nextEnd > limit) {
|
|
628
|
-
return [nextStart, limit];
|
|
629
|
-
}
|
|
630
|
-
return [nextStart, nextEnd];
|
|
631
|
-
}
|
|
632
|
-
|
|
633
|
-
@trackSpan('Archiver.handleL1ToL2Messages')
|
|
634
|
-
private async handleL1ToL2Messages(
|
|
635
|
-
messagesSyncPoint: L1BlockId,
|
|
636
|
-
currentL1BlockNumber: bigint,
|
|
637
|
-
_currentL1BlockHash: Buffer32,
|
|
638
|
-
) {
|
|
639
|
-
this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`);
|
|
640
|
-
if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) {
|
|
641
|
-
return;
|
|
642
|
-
}
|
|
643
|
-
|
|
644
|
-
// Load remote and local inbox states.
|
|
645
|
-
const localMessagesInserted = await this.store.getTotalL1ToL2MessageCount();
|
|
646
|
-
const localLastMessage = await this.store.getLastL1ToL2Message();
|
|
647
|
-
const remoteMessagesState = await this.inbox.getState({ blockNumber: currentL1BlockNumber });
|
|
648
|
-
|
|
649
|
-
this.log.trace(`Retrieved remote inbox state at L1 block ${currentL1BlockNumber}.`, {
|
|
650
|
-
localMessagesInserted,
|
|
651
|
-
localLastMessage,
|
|
652
|
-
remoteMessagesState,
|
|
653
|
-
});
|
|
654
|
-
|
|
655
|
-
// Compare message count and rolling hash. If they match, no need to retrieve anything.
|
|
656
|
-
if (
|
|
657
|
-
remoteMessagesState.totalMessagesInserted === localMessagesInserted &&
|
|
658
|
-
remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer16.ZERO)
|
|
659
|
-
) {
|
|
660
|
-
this.log.trace(
|
|
661
|
-
`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`,
|
|
662
|
-
);
|
|
663
|
-
return;
|
|
664
|
-
}
|
|
665
|
-
|
|
666
|
-
// Check if our syncpoint is still valid. If not, there was an L1 reorg and we need to re-retrieve messages.
|
|
667
|
-
// Note that we need to fetch it from logs and not from inbox state at the syncpoint l1 block number, since it
|
|
668
|
-
// could be older than 128 blocks and non-archive nodes cannot resolve it.
|
|
669
|
-
if (localLastMessage) {
|
|
670
|
-
const remoteLastMessage = await this.retrieveL1ToL2Message(localLastMessage.leaf);
|
|
671
|
-
this.log.trace(`Retrieved remote message for local last`, { remoteLastMessage, localLastMessage });
|
|
672
|
-
if (!remoteLastMessage || !remoteLastMessage.rollingHash.equals(localLastMessage.rollingHash)) {
|
|
673
|
-
this.log.warn(`Rolling back L1 to L2 messages due to hash mismatch or msg not found.`, {
|
|
674
|
-
remoteLastMessage,
|
|
675
|
-
messagesSyncPoint,
|
|
676
|
-
localLastMessage,
|
|
677
|
-
});
|
|
678
|
-
|
|
679
|
-
messagesSyncPoint = await this.rollbackL1ToL2Messages(localLastMessage, messagesSyncPoint);
|
|
680
|
-
this.log.debug(`Rolled back L1 to L2 messages to L1 block ${messagesSyncPoint.l1BlockNumber}.`, {
|
|
681
|
-
messagesSyncPoint,
|
|
682
|
-
});
|
|
683
|
-
}
|
|
684
|
-
}
|
|
685
|
-
|
|
686
|
-
// Retrieve and save messages in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
|
|
687
|
-
let searchStartBlock: bigint = 0n;
|
|
688
|
-
let searchEndBlock: bigint = messagesSyncPoint.l1BlockNumber;
|
|
689
|
-
|
|
690
|
-
let lastMessage: InboxMessage | undefined;
|
|
691
|
-
let messageCount = 0;
|
|
692
|
-
|
|
693
|
-
do {
|
|
694
|
-
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
695
|
-
this.log.trace(`Retrieving L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`);
|
|
696
|
-
const messages = await retrieveL1ToL2Messages(this.inbox.getContract(), searchStartBlock, searchEndBlock);
|
|
697
|
-
this.log.verbose(
|
|
698
|
-
`Retrieved ${messages.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`,
|
|
699
|
-
);
|
|
700
|
-
const timer = new Timer();
|
|
701
|
-
await this.store.addL1ToL2Messages(messages);
|
|
702
|
-
const perMsg = timer.ms() / messages.length;
|
|
703
|
-
this.instrumentation.processNewMessages(messages.length, perMsg);
|
|
704
|
-
for (const msg of messages) {
|
|
705
|
-
this.log.debug(`Downloaded L1 to L2 message`, { ...msg, leaf: msg.leaf.toString() });
|
|
706
|
-
lastMessage = msg;
|
|
707
|
-
messageCount++;
|
|
708
|
-
}
|
|
709
|
-
} while (searchEndBlock < currentL1BlockNumber);
|
|
710
|
-
|
|
711
|
-
// Log stats for messages retrieved (if any).
|
|
712
|
-
if (messageCount > 0) {
|
|
713
|
-
this.log.info(
|
|
714
|
-
`Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for checkpoint ${lastMessage?.checkpointNumber}`,
|
|
715
|
-
{ lastMessage, messageCount },
|
|
716
|
-
);
|
|
717
|
-
}
|
|
718
|
-
|
|
719
|
-
// Warn if the resulting rolling hash does not match the remote state we had retrieved.
|
|
720
|
-
if (lastMessage && !lastMessage.rollingHash.equals(remoteMessagesState.messagesRollingHash)) {
|
|
721
|
-
this.log.warn(`Last message retrieved rolling hash does not match remote state.`, {
|
|
722
|
-
lastMessage,
|
|
723
|
-
remoteMessagesState,
|
|
724
|
-
});
|
|
725
|
-
}
|
|
726
|
-
}
|
|
727
|
-
|
|
728
|
-
private async retrieveL1ToL2Message(leaf: Fr): Promise<InboxMessage | undefined> {
|
|
729
|
-
const currentL1BlockNumber = await this.publicClient.getBlockNumber();
|
|
730
|
-
let searchStartBlock: bigint = 0n;
|
|
731
|
-
let searchEndBlock: bigint = this.l1constants.l1StartBlock - 1n;
|
|
732
|
-
|
|
733
|
-
do {
|
|
734
|
-
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
735
|
-
|
|
736
|
-
const message = await retrieveL1ToL2Message(this.inbox.getContract(), leaf, searchStartBlock, searchEndBlock);
|
|
737
|
-
|
|
738
|
-
if (message) {
|
|
739
|
-
return message;
|
|
740
|
-
}
|
|
741
|
-
} while (searchEndBlock < currentL1BlockNumber);
|
|
742
|
-
|
|
743
|
-
return undefined;
|
|
744
|
-
}
|
|
745
|
-
|
|
746
|
-
private async rollbackL1ToL2Messages(localLastMessage: InboxMessage, messagesSyncPoint: L1BlockId) {
|
|
747
|
-
// Slowly go back through our messages until we find the last common message.
|
|
748
|
-
// We could query the logs in batch as an optimization, but the depth of the reorg should not be deep, and this
|
|
749
|
-
// is a very rare case, so it's fine to query one log at a time.
|
|
750
|
-
let commonMsg: undefined | InboxMessage;
|
|
751
|
-
this.log.verbose(`Searching most recent common L1 to L2 message at or before index ${localLastMessage.index}`);
|
|
752
|
-
for await (const msg of this.store.iterateL1ToL2Messages({ reverse: true, end: localLastMessage.index })) {
|
|
753
|
-
const remoteMsg = await this.retrieveL1ToL2Message(msg.leaf);
|
|
754
|
-
const logCtx = { remoteMsg, localMsg: msg };
|
|
755
|
-
if (remoteMsg && remoteMsg.rollingHash.equals(msg.rollingHash)) {
|
|
756
|
-
this.log.verbose(
|
|
757
|
-
`Found most recent common L1 to L2 message at index ${msg.index} on L1 block ${msg.l1BlockNumber}`,
|
|
758
|
-
logCtx,
|
|
759
|
-
);
|
|
760
|
-
commonMsg = remoteMsg;
|
|
761
|
-
break;
|
|
762
|
-
} else if (remoteMsg) {
|
|
763
|
-
this.log.debug(`Local L1 to L2 message with index ${msg.index} has different rolling hash`, logCtx);
|
|
764
|
-
} else {
|
|
765
|
-
this.log.debug(`Local L1 to L2 message with index ${msg.index} not found on L1`, logCtx);
|
|
766
|
-
}
|
|
767
|
-
}
|
|
768
|
-
|
|
769
|
-
// Delete everything after the common message we found.
|
|
770
|
-
const lastGoodIndex = commonMsg?.index;
|
|
771
|
-
this.log.warn(`Deleting all local L1 to L2 messages after index ${lastGoodIndex ?? 'undefined'}`);
|
|
772
|
-
await this.store.removeL1ToL2Messages(lastGoodIndex !== undefined ? lastGoodIndex + 1n : 0n);
|
|
773
|
-
|
|
774
|
-
// Update the syncpoint so the loop below reprocesses the changed messages. We go to the block before
|
|
775
|
-
// the last common one, so we force reprocessing it, in case new messages were added on that same L1 block
|
|
776
|
-
// after the last common message.
|
|
777
|
-
const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.l1constants.l1StartBlock;
|
|
778
|
-
const syncPointL1BlockHash = await this.getL1BlockHash(syncPointL1BlockNumber);
|
|
779
|
-
messagesSyncPoint = { l1BlockNumber: syncPointL1BlockNumber, l1BlockHash: syncPointL1BlockHash };
|
|
780
|
-
await this.store.setMessageSynchedL1Block(messagesSyncPoint);
|
|
781
|
-
return messagesSyncPoint;
|
|
782
|
-
}
|
|
783
|
-
|
|
784
|
-
private async getL1BlockHash(l1BlockNumber: bigint): Promise<Buffer32> {
|
|
785
|
-
const block = await this.publicClient.getBlock({ blockNumber: l1BlockNumber, includeTransactions: false });
|
|
786
|
-
if (!block) {
|
|
787
|
-
throw new Error(`Missing L1 block ${l1BlockNumber}`);
|
|
788
|
-
}
|
|
789
|
-
return Buffer32.fromString(block.hash);
|
|
790
|
-
}
|
|
791
|
-
|
|
792
|
-
@trackSpan('Archiver.handleCheckpoints')
|
|
793
|
-
private async handleCheckpoints(blocksSynchedTo: bigint, currentL1BlockNumber: bigint): Promise<RollupStatus> {
|
|
794
|
-
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
795
|
-
const initialValidationResult: ValidateCheckpointResult | undefined =
|
|
796
|
-
await this.store.getPendingChainValidationStatus();
|
|
797
|
-
const {
|
|
798
|
-
provenCheckpointNumber,
|
|
799
|
-
provenArchive,
|
|
800
|
-
pendingCheckpointNumber,
|
|
801
|
-
pendingArchive,
|
|
802
|
-
archiveOfMyCheckpoint: archiveForLocalPendingCheckpointNumber,
|
|
803
|
-
} = await execInSpan(this.tracer, 'Archiver.getRollupStatus', () =>
|
|
804
|
-
this.rollup.status(localPendingCheckpointNumber, { blockNumber: currentL1BlockNumber }),
|
|
805
|
-
);
|
|
806
|
-
const rollupStatus: RollupStatus = {
|
|
807
|
-
provenCheckpointNumber,
|
|
808
|
-
provenArchive: provenArchive.toString(),
|
|
809
|
-
pendingCheckpointNumber,
|
|
810
|
-
pendingArchive: pendingArchive.toString(),
|
|
811
|
-
validationResult: initialValidationResult,
|
|
812
|
-
};
|
|
813
|
-
this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
|
|
814
|
-
localPendingCheckpointNumber,
|
|
815
|
-
blocksSynchedTo,
|
|
816
|
-
currentL1BlockNumber,
|
|
817
|
-
archiveForLocalPendingCheckpointNumber,
|
|
818
|
-
...rollupStatus,
|
|
819
|
-
});
|
|
820
|
-
|
|
821
|
-
const updateProvenCheckpoint = async () => {
|
|
822
|
-
// Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
|
|
823
|
-
// we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
|
|
824
|
-
// so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
|
|
825
|
-
if (provenCheckpointNumber === 0) {
|
|
826
|
-
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
827
|
-
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
828
|
-
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
829
|
-
this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
830
|
-
}
|
|
831
|
-
}
|
|
832
|
-
|
|
833
|
-
const localCheckpointForDestinationProvenCheckpointNumber =
|
|
834
|
-
await this.store.getCheckpointData(provenCheckpointNumber);
|
|
835
|
-
|
|
836
|
-
// Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
|
|
837
|
-
// synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
|
|
838
|
-
const synched = await this.getSynchedCheckpointNumber();
|
|
839
|
-
if (
|
|
840
|
-
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
841
|
-
synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber
|
|
842
|
-
) {
|
|
843
|
-
this.log.error(
|
|
844
|
-
`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber} > ${synched}`,
|
|
845
|
-
);
|
|
846
|
-
}
|
|
847
|
-
|
|
848
|
-
this.log.trace(
|
|
849
|
-
`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${
|
|
850
|
-
localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'
|
|
851
|
-
}`,
|
|
852
|
-
);
|
|
853
|
-
|
|
854
|
-
if (
|
|
855
|
-
localCheckpointForDestinationProvenCheckpointNumber &&
|
|
856
|
-
provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root)
|
|
857
|
-
) {
|
|
858
|
-
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
859
|
-
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
860
|
-
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
861
|
-
this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
862
|
-
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
|
|
863
|
-
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
|
|
864
|
-
const lastBlockNumberInCheckpoint =
|
|
865
|
-
localCheckpointForDestinationProvenCheckpointNumber.startBlock +
|
|
866
|
-
localCheckpointForDestinationProvenCheckpointNumber.numBlocks -
|
|
867
|
-
1;
|
|
868
|
-
|
|
869
|
-
this.emit(L2BlockSourceEvents.L2BlockProven, {
|
|
870
|
-
type: L2BlockSourceEvents.L2BlockProven,
|
|
871
|
-
blockNumber: BlockNumber(lastBlockNumberInCheckpoint),
|
|
872
|
-
slotNumber: provenSlotNumber,
|
|
873
|
-
epochNumber: provenEpochNumber,
|
|
874
|
-
});
|
|
875
|
-
this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint);
|
|
876
|
-
} else {
|
|
877
|
-
this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
|
|
878
|
-
}
|
|
879
|
-
}
|
|
880
|
-
};
|
|
881
|
-
|
|
882
|
-
// This is an edge case that we only hit if there are no proposed checkpoints.
|
|
883
|
-
// If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
|
|
884
|
-
const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
|
|
885
|
-
if (noCheckpoints) {
|
|
886
|
-
await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
|
|
887
|
-
this.log.debug(
|
|
888
|
-
`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`,
|
|
889
|
-
);
|
|
890
|
-
return rollupStatus;
|
|
891
|
-
}
|
|
892
|
-
|
|
893
|
-
await updateProvenCheckpoint();
|
|
894
|
-
|
|
895
|
-
// Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
|
|
896
|
-
// are any state that could be impacted by it. If we have no checkpoints, there is no impact.
|
|
897
|
-
if (localPendingCheckpointNumber > 0) {
|
|
898
|
-
const localPendingCheckpoint = await this.store.getCheckpointData(localPendingCheckpointNumber);
|
|
899
|
-
if (localPendingCheckpoint === undefined) {
|
|
900
|
-
throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
|
|
901
|
-
}
|
|
902
|
-
|
|
903
|
-
const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
|
|
904
|
-
const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive.toString() === localPendingArchiveRoot;
|
|
905
|
-
if (noCheckpointSinceLast) {
|
|
906
|
-
// We believe the following line causes a problem when we encounter L1 re-orgs.
|
|
907
|
-
// Basically, by setting the synched L1 block number here, we are saying that we have
|
|
908
|
-
// processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
|
|
909
|
-
// this block again (or any blocks before).
|
|
910
|
-
// However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
|
|
911
|
-
// We must only set this block number based on actually retrieved logs.
|
|
912
|
-
// TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
|
|
913
|
-
// await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
|
|
914
|
-
this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
|
|
915
|
-
return rollupStatus;
|
|
916
|
-
}
|
|
917
|
-
|
|
918
|
-
const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber.equals(
|
|
919
|
-
localPendingCheckpoint.archive.root,
|
|
920
|
-
);
|
|
921
|
-
if (!localPendingCheckpointInChain) {
|
|
922
|
-
// If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
|
|
923
|
-
// or the L1 have reorged.
|
|
924
|
-
// In any case, we have to figure out how far into the past the action will take us.
|
|
925
|
-
// For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
|
|
926
|
-
this.log.debug(
|
|
927
|
-
`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`,
|
|
928
|
-
{ localPendingCheckpointNumber, localPendingArchiveRoot, archiveForLocalPendingCheckpointNumber },
|
|
929
|
-
);
|
|
930
|
-
|
|
931
|
-
let tipAfterUnwind = localPendingCheckpointNumber;
|
|
932
|
-
while (true) {
|
|
933
|
-
const candidateCheckpoint = await this.store.getCheckpointData(tipAfterUnwind);
|
|
934
|
-
if (candidateCheckpoint === undefined) {
|
|
935
|
-
break;
|
|
936
|
-
}
|
|
937
|
-
|
|
938
|
-
const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.checkpointNumber);
|
|
939
|
-
this.log.trace(
|
|
940
|
-
`Checking local checkpoint ${candidateCheckpoint.checkpointNumber} with archive ${candidateCheckpoint.archive.root}`,
|
|
941
|
-
{
|
|
942
|
-
archiveAtContract,
|
|
943
|
-
archiveLocal: candidateCheckpoint.archive.root.toString(),
|
|
944
|
-
},
|
|
945
|
-
);
|
|
946
|
-
if (archiveAtContract.equals(candidateCheckpoint.archive.root)) {
|
|
947
|
-
break;
|
|
948
|
-
}
|
|
949
|
-
tipAfterUnwind--;
|
|
950
|
-
}
|
|
951
|
-
|
|
952
|
-
const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
|
|
953
|
-
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
954
|
-
|
|
955
|
-
this.log.warn(
|
|
956
|
-
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
957
|
-
`due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` +
|
|
958
|
-
`Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
|
|
959
|
-
);
|
|
960
|
-
}
|
|
961
|
-
}
|
|
962
|
-
|
|
963
|
-
// Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
|
|
964
|
-
// computed using the L2 block time vs the L1 block time.
|
|
965
|
-
let searchStartBlock: bigint = blocksSynchedTo;
|
|
966
|
-
let searchEndBlock: bigint = blocksSynchedTo;
|
|
967
|
-
let lastRetrievedCheckpoint: PublishedCheckpoint | undefined;
|
|
968
|
-
let lastL1BlockWithCheckpoint: bigint | undefined = undefined;
|
|
969
|
-
|
|
970
|
-
do {
|
|
971
|
-
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
972
|
-
|
|
973
|
-
this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
974
|
-
|
|
975
|
-
// TODO(md): Retrieve from blob client then from consensus client, then from peers
|
|
976
|
-
const retrievedCheckpoints = await execInSpan(this.tracer, 'Archiver.retrieveCheckpointsFromRollup', () =>
|
|
977
|
-
retrieveCheckpointsFromRollup(
|
|
978
|
-
this.rollup.getContract() as GetContractReturnType<typeof RollupAbi, ViemPublicClient>,
|
|
979
|
-
this.publicClient,
|
|
980
|
-
this.debugClient,
|
|
981
|
-
this.blobClient,
|
|
982
|
-
searchStartBlock, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier
|
|
983
|
-
searchEndBlock,
|
|
984
|
-
this.l1Addresses,
|
|
985
|
-
this.instrumentation,
|
|
986
|
-
this.log,
|
|
987
|
-
!this.initialSyncComplete, // isHistoricalSync
|
|
988
|
-
),
|
|
989
|
-
);
|
|
990
|
-
|
|
991
|
-
if (retrievedCheckpoints.length === 0) {
|
|
992
|
-
// We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
|
|
993
|
-
// See further details in earlier comments.
|
|
994
|
-
this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
995
|
-
continue;
|
|
996
|
-
}
|
|
997
|
-
|
|
998
|
-
this.log.debug(
|
|
999
|
-
`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`,
|
|
1000
|
-
{
|
|
1001
|
-
lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
|
|
1002
|
-
searchStartBlock,
|
|
1003
|
-
searchEndBlock,
|
|
1004
|
-
},
|
|
1005
|
-
);
|
|
1006
|
-
|
|
1007
|
-
const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map(b => retrievedToPublishedCheckpoint(b)));
|
|
1008
|
-
const validCheckpoints: PublishedCheckpoint[] = [];
|
|
1009
|
-
|
|
1010
|
-
for (const published of publishedCheckpoints) {
|
|
1011
|
-
const validationResult = this.config.skipValidateCheckpointAttestations
|
|
1012
|
-
? { valid: true as const }
|
|
1013
|
-
: await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
|
|
1014
|
-
|
|
1015
|
-
// Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
|
|
1016
|
-
// in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
|
|
1017
|
-
// There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
|
|
1018
|
-
// we need to update the validation result, since we need to be able to invalidate the new one.
|
|
1019
|
-
// See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
|
|
1020
|
-
if (
|
|
1021
|
-
rollupStatus.validationResult?.valid !== validationResult.valid ||
|
|
1022
|
-
(!rollupStatus.validationResult.valid &&
|
|
1023
|
-
!validationResult.valid &&
|
|
1024
|
-
rollupStatus.validationResult.checkpoint.checkpointNumber === validationResult.checkpoint.checkpointNumber)
|
|
1025
|
-
) {
|
|
1026
|
-
rollupStatus.validationResult = validationResult;
|
|
1027
|
-
}
|
|
1028
|
-
|
|
1029
|
-
if (!validationResult.valid) {
|
|
1030
|
-
this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
|
|
1031
|
-
checkpointHash: published.checkpoint.hash(),
|
|
1032
|
-
l1BlockNumber: published.l1.blockNumber,
|
|
1033
|
-
...pick(validationResult, 'reason'),
|
|
1034
|
-
});
|
|
1035
|
-
|
|
1036
|
-
// Emit event for invalid checkpoint detection
|
|
1037
|
-
this.emit(L2BlockSourceEvents.InvalidAttestationsCheckpointDetected, {
|
|
1038
|
-
type: L2BlockSourceEvents.InvalidAttestationsCheckpointDetected,
|
|
1039
|
-
validationResult,
|
|
1040
|
-
});
|
|
1041
|
-
|
|
1042
|
-
// We keep consuming checkpoints if we find an invalid one, since we do not listen for CheckpointInvalidated events
|
|
1043
|
-
// We just pretend the invalid ones are not there and keep consuming the next checkpoints
|
|
1044
|
-
// Note that this breaks if the committee ever attests to a descendant of an invalid checkpoint
|
|
1045
|
-
continue;
|
|
1046
|
-
}
|
|
1047
|
-
|
|
1048
|
-
// Check the inHash of the checkpoint against the l1->l2 messages.
|
|
1049
|
-
// The messages should've been synced up to the currentL1BlockNumber and must be available for the published
|
|
1050
|
-
// checkpoints we just retrieved.
|
|
1051
|
-
const l1ToL2Messages = await this.getL1ToL2Messages(published.checkpoint.number);
|
|
1052
|
-
const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);
|
|
1053
|
-
const publishedInHash = published.checkpoint.header.inHash;
|
|
1054
|
-
if (!computedInHash.equals(publishedInHash)) {
|
|
1055
|
-
this.log.fatal(`Mismatch inHash for checkpoint ${published.checkpoint.number}`, {
|
|
1056
|
-
checkpointHash: published.checkpoint.hash(),
|
|
1057
|
-
l1BlockNumber: published.l1.blockNumber,
|
|
1058
|
-
computedInHash,
|
|
1059
|
-
publishedInHash,
|
|
1060
|
-
});
|
|
1061
|
-
// Throwing an error since this is most likely caused by a bug.
|
|
1062
|
-
throw new Error(
|
|
1063
|
-
`Mismatch inHash for checkpoint ${published.checkpoint.number}. Expected ${computedInHash} but got ${publishedInHash}`,
|
|
1064
|
-
);
|
|
1065
|
-
}
|
|
1066
|
-
|
|
1067
|
-
validCheckpoints.push(published);
|
|
1068
|
-
this.log.debug(
|
|
1069
|
-
`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`,
|
|
1070
|
-
{
|
|
1071
|
-
checkpointHash: published.checkpoint.hash(),
|
|
1072
|
-
l1BlockNumber: published.l1.blockNumber,
|
|
1073
|
-
...published.checkpoint.header.toInspect(),
|
|
1074
|
-
blocks: published.checkpoint.blocks.map(b => b.getStats()),
|
|
1075
|
-
},
|
|
1076
|
-
);
|
|
1077
|
-
}
|
|
1078
|
-
|
|
1079
|
-
try {
|
|
1080
|
-
const updatedValidationResult =
|
|
1081
|
-
rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
1082
|
-
const [processDuration] = await elapsed(() =>
|
|
1083
|
-
execInSpan(this.tracer, 'Archiver.addCheckpoints', () =>
|
|
1084
|
-
this.addCheckpoints(validCheckpoints, updatedValidationResult),
|
|
1085
|
-
),
|
|
1086
|
-
);
|
|
1087
|
-
this.instrumentation.processNewBlocks(
|
|
1088
|
-
processDuration / validCheckpoints.length,
|
|
1089
|
-
validCheckpoints.flatMap(c => c.checkpoint.blocks),
|
|
1090
|
-
);
|
|
1091
|
-
} catch (err) {
|
|
1092
|
-
if (err instanceof InitialCheckpointNumberNotSequentialError) {
|
|
1093
|
-
const { previousCheckpointNumber, newCheckpointNumber } = err;
|
|
1094
|
-
const previousCheckpoint = previousCheckpointNumber
|
|
1095
|
-
? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber))
|
|
1096
|
-
: undefined;
|
|
1097
|
-
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1constants.l1StartBlock;
|
|
1098
|
-
await this.store.setBlockSynchedL1BlockNumber(updatedL1SyncPoint);
|
|
1099
|
-
this.log.warn(
|
|
1100
|
-
`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`,
|
|
1101
|
-
{
|
|
1102
|
-
previousCheckpointNumber,
|
|
1103
|
-
newCheckpointNumber,
|
|
1104
|
-
updatedL1SyncPoint,
|
|
1105
|
-
},
|
|
1106
|
-
);
|
|
1107
|
-
}
|
|
1108
|
-
throw err;
|
|
1109
|
-
}
|
|
1110
|
-
|
|
1111
|
-
for (const checkpoint of validCheckpoints) {
|
|
1112
|
-
this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
|
|
1113
|
-
checkpointHash: checkpoint.checkpoint.hash(),
|
|
1114
|
-
checkpointNumber: checkpoint.checkpoint.number,
|
|
1115
|
-
blockCount: checkpoint.checkpoint.blocks.length,
|
|
1116
|
-
txCount: checkpoint.checkpoint.blocks.reduce((acc, b) => acc + b.body.txEffects.length, 0),
|
|
1117
|
-
header: checkpoint.checkpoint.header.toInspect(),
|
|
1118
|
-
archiveRoot: checkpoint.checkpoint.archive.root.toString(),
|
|
1119
|
-
archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex,
|
|
1120
|
-
});
|
|
1121
|
-
}
|
|
1122
|
-
lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
|
|
1123
|
-
lastL1BlockWithCheckpoint = retrievedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
|
|
1124
|
-
} while (searchEndBlock < currentL1BlockNumber);
|
|
1125
|
-
|
|
1126
|
-
// Important that we update AFTER inserting the blocks.
|
|
1127
|
-
await updateProvenCheckpoint();
|
|
1128
|
-
|
|
1129
|
-
return { ...rollupStatus, lastRetrievedCheckpoint, lastL1BlockWithCheckpoint };
|
|
1130
|
-
}
|
|
1131
|
-
|
|
1132
|
-
private async checkForNewCheckpointsBeforeL1SyncPoint(
|
|
1133
|
-
status: RollupStatus,
|
|
1134
|
-
blocksSynchedTo: bigint,
|
|
1135
|
-
currentL1BlockNumber: bigint,
|
|
1136
|
-
) {
|
|
1137
|
-
const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
|
|
1138
|
-
// Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
|
|
1139
|
-
// rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
|
|
1140
|
-
const latestLocalCheckpointNumber =
|
|
1141
|
-
lastRetrievedCheckpoint?.checkpoint.number ?? (await this.getSynchedCheckpointNumber());
|
|
1142
|
-
if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
|
|
1143
|
-
// Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
|
|
1144
|
-
// but still haven't reached the pending checkpoint according to the call to the rollup contract.
|
|
1145
|
-
// We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
|
|
1146
|
-
// the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
|
|
1147
|
-
// we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
1148
|
-
let latestLocalCheckpointArchive: string | undefined = undefined;
|
|
1149
|
-
let targetL1BlockNumber = maxBigint(currentL1BlockNumber - 64n, 0n);
|
|
1150
|
-
if (lastRetrievedCheckpoint) {
|
|
1151
|
-
latestLocalCheckpointArchive = lastRetrievedCheckpoint.checkpoint.archive.root.toString();
|
|
1152
|
-
targetL1BlockNumber = lastRetrievedCheckpoint.l1.blockNumber;
|
|
1153
|
-
} else if (latestLocalCheckpointNumber > 0) {
|
|
1154
|
-
const checkpoint = await this.store.getRangeOfCheckpoints(latestLocalCheckpointNumber, 1).then(([c]) => c);
|
|
1155
|
-
latestLocalCheckpointArchive = checkpoint.archive.root.toString();
|
|
1156
|
-
targetL1BlockNumber = checkpoint.l1.blockNumber;
|
|
1157
|
-
}
|
|
1158
|
-
this.log.warn(
|
|
1159
|
-
`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` +
|
|
1160
|
-
`Rolling back last synched L1 block number to ${targetL1BlockNumber}.`,
|
|
1161
|
-
{
|
|
1162
|
-
latestLocalCheckpointNumber,
|
|
1163
|
-
latestLocalCheckpointArchive,
|
|
1164
|
-
blocksSynchedTo,
|
|
1165
|
-
currentL1BlockNumber,
|
|
1166
|
-
...status,
|
|
1167
|
-
},
|
|
1168
|
-
);
|
|
1169
|
-
await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
|
|
1170
|
-
} else {
|
|
1171
|
-
this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
|
|
1172
|
-
latestLocalCheckpointNumber,
|
|
1173
|
-
pendingCheckpointNumber,
|
|
1174
|
-
});
|
|
1175
|
-
}
|
|
1176
|
-
}
|
|
1177
|
-
|
|
1178
|
-
/** Resumes the archiver after a stop. */
|
|
1179
|
-
public resume() {
|
|
1180
|
-
if (this.runningPromise.isRunning()) {
|
|
1181
|
-
this.log.warn(`Archiver already running`);
|
|
1182
|
-
}
|
|
1183
|
-
this.log.info(`Restarting archiver`);
|
|
1184
|
-
this.runningPromise.start();
|
|
1185
|
-
}
|
|
1186
|
-
|
|
1187
|
-
/**
|
|
1188
|
-
* Stops the archiver.
|
|
1189
|
-
* @returns A promise signalling completion of the stop process.
|
|
1190
|
-
*/
|
|
1191
|
-
public async stop(): Promise<void> {
|
|
1192
|
-
this.log.debug('Stopping...');
|
|
1193
|
-
await this.runningPromise.stop();
|
|
1194
|
-
|
|
1195
|
-
this.log.info('Stopped.');
|
|
1196
|
-
return Promise.resolve();
|
|
1197
|
-
}
|
|
1198
|
-
|
|
1199
|
-
public backupTo(destPath: string): Promise<string> {
|
|
1200
|
-
return this.dataStore.backupTo(destPath);
|
|
1201
|
-
}
|
|
1202
|
-
|
|
1203
|
-
public getL1Constants(): Promise<L1RollupConstants> {
|
|
1204
|
-
return Promise.resolve(this.l1constants);
|
|
1205
|
-
}
|
|
1206
|
-
|
|
1207
|
-
public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> {
|
|
1208
|
-
return Promise.resolve({ genesisArchiveRoot: this.l1constants.genesisArchiveRoot });
|
|
1209
|
-
}
|
|
1210
|
-
|
|
1211
|
-
public getRollupAddress(): Promise<EthAddress> {
|
|
1212
|
-
return Promise.resolve(this.l1Addresses.rollupAddress);
|
|
1213
|
-
}
|
|
1214
|
-
|
|
1215
|
-
public getRegistryAddress(): Promise<EthAddress> {
|
|
1216
|
-
return Promise.resolve(this.l1Addresses.registryAddress);
|
|
1217
|
-
}
|
|
1218
|
-
|
|
1219
|
-
public getL1BlockNumber(): bigint | undefined {
|
|
1220
|
-
return this.l1BlockNumber;
|
|
1221
|
-
}
|
|
1222
|
-
|
|
1223
|
-
public getL1Timestamp(): Promise<bigint | undefined> {
|
|
1224
|
-
return Promise.resolve(this.l1Timestamp);
|
|
1225
|
-
}
|
|
1226
|
-
|
|
1227
|
-
public getL2SlotNumber(): Promise<SlotNumber | undefined> {
|
|
1228
|
-
return Promise.resolve(
|
|
1229
|
-
this.l1Timestamp === undefined ? undefined : getSlotAtTimestamp(this.l1Timestamp, this.l1constants),
|
|
1230
|
-
);
|
|
1231
|
-
}
|
|
1232
|
-
|
|
1233
|
-
public getL2EpochNumber(): Promise<EpochNumber | undefined> {
|
|
1234
|
-
return Promise.resolve(
|
|
1235
|
-
this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants),
|
|
1236
|
-
);
|
|
1237
|
-
}
|
|
1238
|
-
|
|
1239
|
-
public async getBlocksForEpoch(epochNumber: EpochNumber): Promise<L2Block[]> {
|
|
1240
|
-
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1241
|
-
const blocks: L2Block[] = [];
|
|
1242
|
-
|
|
1243
|
-
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1244
|
-
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1245
|
-
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1246
|
-
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1247
|
-
while (checkpoint && slot(checkpoint) >= start) {
|
|
1248
|
-
if (slot(checkpoint) <= end) {
|
|
1249
|
-
// push the blocks on backwards
|
|
1250
|
-
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1251
|
-
for (let i = endBlock; i >= checkpoint.startBlock; i--) {
|
|
1252
|
-
const block = await this.getBlock(BlockNumber(i));
|
|
1253
|
-
if (block) {
|
|
1254
|
-
blocks.push(block);
|
|
1255
|
-
}
|
|
1256
|
-
}
|
|
1257
|
-
}
|
|
1258
|
-
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
1259
|
-
}
|
|
1260
|
-
|
|
1261
|
-
return blocks.reverse();
|
|
1262
|
-
}
|
|
1263
|
-
|
|
1264
|
-
public async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise<BlockHeader[]> {
|
|
1265
|
-
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1266
|
-
const blocks: BlockHeader[] = [];
|
|
1267
|
-
|
|
1268
|
-
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1269
|
-
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1270
|
-
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1271
|
-
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1272
|
-
while (checkpoint && slot(checkpoint) >= start) {
|
|
1273
|
-
if (slot(checkpoint) <= end) {
|
|
1274
|
-
// push the blocks on backwards
|
|
1275
|
-
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1276
|
-
for (let i = endBlock; i >= checkpoint.startBlock; i--) {
|
|
1277
|
-
const block = await this.getBlockHeader(BlockNumber(i));
|
|
1278
|
-
if (block) {
|
|
1279
|
-
blocks.push(block);
|
|
1280
|
-
}
|
|
1281
|
-
}
|
|
1282
|
-
}
|
|
1283
|
-
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
1284
|
-
}
|
|
1285
|
-
return blocks.reverse();
|
|
1286
|
-
}
|
|
1287
|
-
|
|
1288
|
-
public async isEpochComplete(epochNumber: EpochNumber): Promise<boolean> {
|
|
1289
|
-
// The epoch is complete if the current L2 block is the last one in the epoch (or later)
|
|
1290
|
-
const header = await this.getBlockHeader('latest');
|
|
1291
|
-
const slot = header ? header.globalVariables.slotNumber : undefined;
|
|
1292
|
-
const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1293
|
-
if (slot && slot >= endSlot) {
|
|
1294
|
-
return true;
|
|
1295
|
-
}
|
|
1296
|
-
|
|
1297
|
-
// If we haven't run an initial sync, just return false.
|
|
1298
|
-
const l1Timestamp = this.l1Timestamp;
|
|
1299
|
-
if (l1Timestamp === undefined) {
|
|
1300
|
-
return false;
|
|
1301
|
-
}
|
|
1302
|
-
|
|
1303
|
-
// If not, the epoch may also be complete if the L2 slot has passed without a block
|
|
1304
|
-
// We compute this based on the end timestamp for the given epoch and the timestamp of the last L1 block
|
|
1305
|
-
const [_startTimestamp, endTimestamp] = getTimestampRangeForEpoch(epochNumber, this.l1constants);
|
|
1306
|
-
|
|
1307
|
-
// For this computation, we throw in a few extra seconds just for good measure,
|
|
1308
|
-
// since we know the next L1 block won't be mined within this range. Remember that
|
|
1309
|
-
// l1timestamp is the timestamp of the last l1 block we've seen, so this relies on
|
|
1310
|
-
// the fact that L1 won't mine two blocks within this time of each other.
|
|
1311
|
-
// TODO(palla/reorg): Is the above a safe assumption?
|
|
1312
|
-
const leeway = 1n;
|
|
1313
|
-
return l1Timestamp + leeway >= endTimestamp;
|
|
1314
|
-
}
|
|
1315
|
-
|
|
1316
|
-
/** Returns whether the archiver has completed an initial sync run successfully. */
|
|
1317
|
-
public isInitialSyncComplete(): boolean {
|
|
1318
|
-
return this.initialSyncComplete;
|
|
1319
|
-
}
|
|
1320
|
-
|
|
1321
|
-
public async getCheckpointHeader(number: CheckpointNumber | 'latest'): Promise<CheckpointHeader | undefined> {
|
|
1322
|
-
if (number === 'latest') {
|
|
1323
|
-
number = await this.getSynchedCheckpointNumber();
|
|
1324
|
-
}
|
|
1325
|
-
if (number === 0) {
|
|
1326
|
-
return undefined;
|
|
1327
|
-
}
|
|
1328
|
-
const checkpoint = await this.store.getCheckpointData(number);
|
|
1329
|
-
if (!checkpoint) {
|
|
1330
|
-
return undefined;
|
|
1331
|
-
}
|
|
1332
|
-
return checkpoint.header;
|
|
1333
|
-
}
|
|
1334
|
-
|
|
1335
|
-
public getCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1336
|
-
return this.getSynchedCheckpointNumber();
|
|
1337
|
-
}
|
|
1338
|
-
|
|
1339
|
-
public getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1340
|
-
return this.store.getSynchedCheckpointNumber();
|
|
1341
|
-
}
|
|
1342
|
-
|
|
1343
|
-
public getProvenCheckpointNumber(): Promise<CheckpointNumber> {
|
|
1344
|
-
return this.store.getProvenCheckpointNumber();
|
|
1345
|
-
}
|
|
1346
|
-
|
|
1347
|
-
public setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise<void> {
|
|
1348
|
-
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
1349
|
-
}
|
|
1350
|
-
|
|
1351
|
-
public unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
1352
|
-
return this.store.unwindCheckpoints(from, checkpointsToUnwind);
|
|
1353
|
-
}
|
|
1354
|
-
|
|
1355
|
-
public async getLastBlockNumberInCheckpoint(checkpointNumber: CheckpointNumber): Promise<BlockNumber | undefined> {
|
|
1356
|
-
const checkpointData = await this.store.getCheckpointData(checkpointNumber);
|
|
1357
|
-
if (!checkpointData) {
|
|
1358
|
-
return undefined;
|
|
1359
|
-
}
|
|
1360
|
-
return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1);
|
|
1361
|
-
}
|
|
1362
|
-
|
|
1363
|
-
public addCheckpoints(
|
|
1364
|
-
checkpoints: PublishedCheckpoint[],
|
|
1365
|
-
pendingChainValidationStatus?: ValidateCheckpointResult,
|
|
1366
|
-
): Promise<boolean> {
|
|
1367
|
-
return this.store.addCheckpoints(checkpoints, pendingChainValidationStatus);
|
|
1368
|
-
}
|
|
1369
|
-
|
|
1370
|
-
public getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
|
|
1371
|
-
return this.store.getBlockHeaderByHash(blockHash);
|
|
1372
|
-
}
|
|
1373
|
-
|
|
1374
|
-
public getBlockHeaderByArchive(archive: Fr): Promise<BlockHeader | undefined> {
|
|
1375
|
-
return this.store.getBlockHeaderByArchive(archive);
|
|
1376
|
-
}
|
|
1377
|
-
|
|
1378
|
-
/**
|
|
1379
|
-
* Gets an l2 block.
|
|
1380
|
-
* @param number - The block number to return.
|
|
1381
|
-
* @returns The requested L2 block.
|
|
1382
|
-
*/
|
|
1383
|
-
public async getL2BlockNew(number: BlockNumber): Promise<L2BlockNew | undefined> {
|
|
1384
|
-
// If the number provided is -ve, then return the latest block.
|
|
1385
|
-
if (number < 0) {
|
|
1386
|
-
number = await this.store.getSynchedL2BlockNumber();
|
|
1387
|
-
}
|
|
1388
|
-
if (number === 0) {
|
|
1389
|
-
return undefined;
|
|
1390
|
-
}
|
|
1391
|
-
const publishedBlock = await this.store.store.getBlock(number);
|
|
1392
|
-
return publishedBlock;
|
|
1393
|
-
}
|
|
1394
|
-
|
|
1395
|
-
public async getL2BlocksNew(from: BlockNumber, limit: number, proven?: boolean): Promise<L2BlockNew[]> {
|
|
1396
|
-
const blocks = await this.store.store.getBlocks(from, limit);
|
|
1397
|
-
|
|
1398
|
-
if (proven === true) {
|
|
1399
|
-
const provenBlockNumber = await this.store.getProvenBlockNumber();
|
|
1400
|
-
return blocks.filter(b => b.number <= provenBlockNumber);
|
|
1401
|
-
}
|
|
1402
|
-
return blocks;
|
|
1403
|
-
}
|
|
1404
|
-
|
|
1405
|
-
public async getBlockHeader(number: BlockNumber | 'latest'): Promise<BlockHeader | undefined> {
|
|
1406
|
-
if (number === 'latest') {
|
|
1407
|
-
number = await this.store.getSynchedL2BlockNumber();
|
|
1408
|
-
}
|
|
1409
|
-
if (number === 0) {
|
|
1410
|
-
return undefined;
|
|
1411
|
-
}
|
|
1412
|
-
const headers = await this.store.getBlockHeaders(number, 1);
|
|
1413
|
-
return headers.length === 0 ? undefined : headers[0];
|
|
1414
|
-
}
|
|
1415
|
-
|
|
1416
|
-
getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
|
|
1417
|
-
return this.store.getCheckpointedBlock(number);
|
|
1418
|
-
}
|
|
1419
|
-
|
|
1420
|
-
public async getCheckpointedBlocks(
|
|
1421
|
-
from: BlockNumber,
|
|
1422
|
-
limit: number,
|
|
1423
|
-
proven?: boolean,
|
|
1424
|
-
): Promise<CheckpointedL2Block[]> {
|
|
1425
|
-
const blocks = await this.store.store.getCheckpointedBlocks(from, limit);
|
|
1426
|
-
|
|
1427
|
-
if (proven === true) {
|
|
1428
|
-
const provenBlockNumber = await this.store.getProvenBlockNumber();
|
|
1429
|
-
return blocks.filter(b => b.block.number <= provenBlockNumber);
|
|
1430
|
-
}
|
|
1431
|
-
return blocks;
|
|
1432
|
-
}
|
|
1433
|
-
|
|
1434
|
-
getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
1435
|
-
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
1436
|
-
}
|
|
1437
|
-
|
|
1438
|
-
getProvenBlockNumber(): Promise<BlockNumber> {
|
|
1439
|
-
return this.store.getProvenBlockNumber();
|
|
1440
|
-
}
|
|
1441
|
-
getCheckpointedBlockNumber(): Promise<BlockNumber> {
|
|
1442
|
-
return this.store.getCheckpointedL2BlockNumber();
|
|
1443
|
-
}
|
|
1444
|
-
getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
1445
|
-
return this.store.getCheckpointedBlockByArchive(archive);
|
|
1446
|
-
}
|
|
1447
|
-
|
|
1448
|
-
public getTxEffect(txHash: TxHash) {
|
|
1449
|
-
return this.store.getTxEffect(txHash);
|
|
1450
|
-
}
|
|
1451
|
-
|
|
1452
|
-
public getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
|
|
1453
|
-
return this.store.getSettledTxReceipt(txHash);
|
|
1454
|
-
}
|
|
1455
|
-
|
|
1456
|
-
getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
1457
|
-
return this.store.getPrivateLogsByTags(tags);
|
|
1458
|
-
}
|
|
1459
|
-
|
|
1460
|
-
getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
|
|
1461
|
-
return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
1462
|
-
}
|
|
1463
|
-
|
|
1464
|
-
/**
|
|
1465
|
-
* Gets public logs based on the provided filter.
|
|
1466
|
-
* @param filter - The filter to apply to the logs.
|
|
1467
|
-
* @returns The requested logs.
|
|
1468
|
-
*/
|
|
1469
|
-
getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
|
|
1470
|
-
return this.store.getPublicLogs(filter);
|
|
1471
|
-
}
|
|
1472
|
-
|
|
1473
|
-
/**
|
|
1474
|
-
* Gets contract class logs based on the provided filter.
|
|
1475
|
-
* @param filter - The filter to apply to the logs.
|
|
1476
|
-
* @returns The requested logs.
|
|
1477
|
-
*/
|
|
1478
|
-
getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
|
|
1479
|
-
return this.store.getContractClassLogs(filter);
|
|
1480
|
-
}
|
|
1481
|
-
|
|
1482
|
-
/**
|
|
1483
|
-
* Gets the number of the latest L2 block processed by the block source implementation.
|
|
1484
|
-
* This includes both checkpointed and uncheckpointed blocks.
|
|
1485
|
-
* @returns The number of the latest L2 block processed by the block source implementation.
|
|
1486
|
-
*/
|
|
1487
|
-
public getBlockNumber(): Promise<BlockNumber> {
|
|
1488
|
-
return this.store.getLatestBlockNumber();
|
|
1489
|
-
}
|
|
1490
|
-
|
|
1491
|
-
public getContractClass(id: Fr): Promise<ContractClassPublic | undefined> {
|
|
1492
|
-
return this.store.getContractClass(id);
|
|
1493
|
-
}
|
|
1494
|
-
|
|
1495
|
-
public getBytecodeCommitment(id: Fr): Promise<Fr | undefined> {
|
|
1496
|
-
return this.store.getBytecodeCommitment(id);
|
|
1497
|
-
}
|
|
1498
|
-
|
|
1499
|
-
public async getContract(
|
|
1500
|
-
address: AztecAddress,
|
|
1501
|
-
maybeTimestamp?: UInt64,
|
|
1502
|
-
): Promise<ContractInstanceWithAddress | undefined> {
|
|
1503
|
-
let timestamp;
|
|
1504
|
-
if (maybeTimestamp === undefined) {
|
|
1505
|
-
const latestBlockHeader = await this.getBlockHeader('latest');
|
|
1506
|
-
// If we get undefined block header, it means that the archiver has not yet synced any block so we default to 0.
|
|
1507
|
-
timestamp = latestBlockHeader ? latestBlockHeader.globalVariables.timestamp : 0n;
|
|
1508
|
-
} else {
|
|
1509
|
-
timestamp = maybeTimestamp;
|
|
1510
|
-
}
|
|
1511
|
-
|
|
1512
|
-
return this.store.getContractInstance(address, timestamp);
|
|
1513
|
-
}
|
|
1514
|
-
|
|
1515
|
-
/**
|
|
1516
|
-
* Gets L1 to L2 message (to be) included in a given checkpoint.
|
|
1517
|
-
* @param checkpointNumber - Checkpoint number to get messages for.
|
|
1518
|
-
* @returns The L1 to L2 messages/leaves of the messages subtree (throws if not found).
|
|
1519
|
-
*/
|
|
1520
|
-
getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
|
|
1521
|
-
return this.store.getL1ToL2Messages(checkpointNumber);
|
|
1522
|
-
}
|
|
1523
|
-
|
|
1524
|
-
/**
|
|
1525
|
-
* Gets the L1 to L2 message index in the L1 to L2 message tree.
|
|
1526
|
-
* @param l1ToL2Message - The L1 to L2 message.
|
|
1527
|
-
* @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
|
|
1528
|
-
*/
|
|
1529
|
-
getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined> {
|
|
1530
|
-
return this.store.getL1ToL2MessageIndex(l1ToL2Message);
|
|
1531
|
-
}
|
|
1532
|
-
|
|
1533
|
-
getContractClassIds(): Promise<Fr[]> {
|
|
1534
|
-
return this.store.getContractClassIds();
|
|
1535
|
-
}
|
|
1536
|
-
|
|
1537
|
-
registerContractFunctionSignatures(signatures: string[]): Promise<void> {
|
|
1538
|
-
return this.store.registerContractFunctionSignatures(signatures);
|
|
1539
|
-
}
|
|
1540
|
-
|
|
1541
|
-
getDebugFunctionName(address: AztecAddress, selector: FunctionSelector): Promise<string | undefined> {
|
|
1542
|
-
return this.store.getDebugFunctionName(address, selector);
|
|
1543
|
-
}
|
|
1544
|
-
|
|
1545
|
-
async getPendingChainValidationStatus(): Promise<ValidateCheckpointResult> {
|
|
1546
|
-
return (await this.store.getPendingChainValidationStatus()) ?? { valid: true };
|
|
1547
|
-
}
|
|
1548
|
-
|
|
1549
|
-
isPendingChainInvalid(): Promise<boolean> {
|
|
1550
|
-
return this.getPendingChainValidationStatus().then(status => !status.valid);
|
|
1551
|
-
}
|
|
1552
|
-
|
|
1553
|
-
async getL2Tips(): Promise<L2Tips> {
|
|
1554
|
-
const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber] = await Promise.all([
|
|
1555
|
-
this.getBlockNumber(),
|
|
1556
|
-
this.getProvenBlockNumber(),
|
|
1557
|
-
this.getCheckpointedBlockNumber(),
|
|
1558
|
-
] as const);
|
|
1559
|
-
|
|
1560
|
-
// TODO(#13569): Compute proper finalized block number based on L1 finalized block.
|
|
1561
|
-
// We just force it 2 epochs worth of proven data for now.
|
|
1562
|
-
// NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations
|
|
1563
|
-
const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0));
|
|
1564
|
-
|
|
1565
|
-
const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
|
|
1566
|
-
|
|
1567
|
-
// Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks
|
|
1568
|
-
const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] =
|
|
1569
|
-
await Promise.all([
|
|
1570
|
-
latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined,
|
|
1571
|
-
provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined,
|
|
1572
|
-
finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined,
|
|
1573
|
-
checkpointedBlockNumber > beforeInitialblockNumber
|
|
1574
|
-
? this.getCheckpointedBlock(checkpointedBlockNumber)
|
|
1575
|
-
: undefined,
|
|
1576
|
-
] as const);
|
|
1577
|
-
|
|
1578
|
-
if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) {
|
|
1579
|
-
throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`);
|
|
1580
|
-
}
|
|
1581
|
-
|
|
1582
|
-
// Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number.
|
|
1583
|
-
if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) {
|
|
1584
|
-
throw new Error(
|
|
1585
|
-
`Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`,
|
|
1586
|
-
);
|
|
1587
|
-
}
|
|
1588
|
-
|
|
1589
|
-
if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) {
|
|
1590
|
-
throw new Error(
|
|
1591
|
-
`Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`,
|
|
1592
|
-
);
|
|
1593
|
-
}
|
|
1594
|
-
|
|
1595
|
-
if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) {
|
|
1596
|
-
throw new Error(
|
|
1597
|
-
`Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`,
|
|
1598
|
-
);
|
|
1599
|
-
}
|
|
1600
|
-
|
|
1601
|
-
const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1602
|
-
const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1603
|
-
const finalizedBlockHeaderHash =
|
|
1604
|
-
(await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1605
|
-
const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1606
|
-
|
|
1607
|
-
// Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks
|
|
1608
|
-
const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([
|
|
1609
|
-
provenCheckpointedBlock !== undefined
|
|
1610
|
-
? await this.getPublishedCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1)
|
|
1611
|
-
: [undefined],
|
|
1612
|
-
finalizedCheckpointedBlock !== undefined
|
|
1613
|
-
? await this.getPublishedCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1)
|
|
1614
|
-
: [undefined],
|
|
1615
|
-
checkpointedBlock !== undefined
|
|
1616
|
-
? await this.getPublishedCheckpoints(checkpointedBlock?.checkpointNumber, 1)
|
|
1617
|
-
: [undefined],
|
|
1618
|
-
]);
|
|
1619
|
-
|
|
1620
|
-
const initialcheckpointId: CheckpointId = {
|
|
1621
|
-
number: CheckpointNumber.ZERO,
|
|
1622
|
-
hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(),
|
|
1623
|
-
};
|
|
1624
|
-
|
|
1625
|
-
const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => {
|
|
1626
|
-
if (checkpoint === undefined) {
|
|
1627
|
-
return initialcheckpointId;
|
|
1628
|
-
}
|
|
1629
|
-
return {
|
|
1630
|
-
number: checkpoint.checkpoint.number,
|
|
1631
|
-
hash: checkpoint.checkpoint.hash().toString(),
|
|
1632
|
-
};
|
|
1633
|
-
};
|
|
1634
|
-
|
|
1635
|
-
const l2Tips: L2Tips = {
|
|
1636
|
-
proposed: {
|
|
1637
|
-
number: latestBlockNumber,
|
|
1638
|
-
hash: latestBlockHeaderHash.toString(),
|
|
1639
|
-
},
|
|
1640
|
-
proven: {
|
|
1641
|
-
block: {
|
|
1642
|
-
number: provenBlockNumber,
|
|
1643
|
-
hash: provenBlockHeaderHash.toString(),
|
|
1644
|
-
},
|
|
1645
|
-
checkpoint: makeCheckpointId(provenBlockCheckpoint),
|
|
1646
|
-
},
|
|
1647
|
-
finalized: {
|
|
1648
|
-
block: {
|
|
1649
|
-
number: finalizedBlockNumber,
|
|
1650
|
-
hash: finalizedBlockHeaderHash.toString(),
|
|
1651
|
-
},
|
|
1652
|
-
checkpoint: makeCheckpointId(finalizedBlockCheckpoint),
|
|
1653
|
-
},
|
|
1654
|
-
checkpointed: {
|
|
1655
|
-
block: {
|
|
1656
|
-
number: checkpointedBlockNumber,
|
|
1657
|
-
hash: checkpointedBlockHeaderHash.toString(),
|
|
1658
|
-
},
|
|
1659
|
-
checkpoint: makeCheckpointId(checkpointedBlockCheckpoint),
|
|
1660
|
-
},
|
|
1661
|
-
};
|
|
1662
|
-
|
|
1663
|
-
return l2Tips;
|
|
1664
|
-
}
|
|
1665
|
-
|
|
1666
|
-
public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise<void> {
|
|
1667
|
-
// TODO(pw/mbps): This still assumes 1 block per checkpoint
|
|
1668
|
-
const currentBlocks = await this.getL2Tips();
|
|
1669
|
-
const currentL2Block = currentBlocks.proposed.number;
|
|
1670
|
-
const currentProvenBlock = currentBlocks.proven.block.number;
|
|
1671
|
-
|
|
1672
|
-
if (targetL2BlockNumber >= currentL2Block) {
|
|
1673
|
-
throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`);
|
|
1674
|
-
}
|
|
1675
|
-
const blocksToUnwind = currentL2Block - targetL2BlockNumber;
|
|
1676
|
-
const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber);
|
|
1677
|
-
if (!targetL2Block) {
|
|
1678
|
-
throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
|
|
1679
|
-
}
|
|
1680
|
-
const targetL1BlockNumber = targetL2Block.l1.blockNumber;
|
|
1681
|
-
const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber);
|
|
1682
|
-
const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber);
|
|
1683
|
-
this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`);
|
|
1684
|
-
await this.store.unwindCheckpoints(CheckpointNumber(currentL2Block), blocksToUnwind);
|
|
1685
|
-
this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`);
|
|
1686
|
-
await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
1687
|
-
this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
|
|
1688
|
-
await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
|
|
1689
|
-
await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash });
|
|
1690
|
-
if (targetL2BlockNumber < currentProvenBlock) {
|
|
1691
|
-
this.log.info(`Clearing proven L2 block number`);
|
|
1692
|
-
await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
|
|
1693
|
-
}
|
|
1694
|
-
// TODO(palla/reorg): Set the finalized block when we add support for it.
|
|
1695
|
-
// if (targetL2BlockNumber < currentFinalizedBlock) {
|
|
1696
|
-
// this.log.info(`Clearing finalized L2 block number`);
|
|
1697
|
-
// await this.store.setFinalizedL2BlockNumber(0);
|
|
1698
|
-
// }
|
|
1699
|
-
}
|
|
1700
|
-
|
|
1701
|
-
public async getPublishedCheckpoints(
|
|
1702
|
-
checkpointNumber: CheckpointNumber,
|
|
1703
|
-
limit: number,
|
|
1704
|
-
): Promise<PublishedCheckpoint[]> {
|
|
1705
|
-
const checkpoints = await this.store.getRangeOfCheckpoints(checkpointNumber, limit);
|
|
1706
|
-
const blocks = (
|
|
1707
|
-
await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber)))
|
|
1708
|
-
).filter(isDefined);
|
|
1709
|
-
|
|
1710
|
-
const fullCheckpoints: PublishedCheckpoint[] = [];
|
|
1711
|
-
for (let i = 0; i < checkpoints.length; i++) {
|
|
1712
|
-
const blocksForCheckpoint = blocks[i];
|
|
1713
|
-
const checkpoint = checkpoints[i];
|
|
1714
|
-
const fullCheckpoint = new Checkpoint(
|
|
1715
|
-
checkpoint.archive,
|
|
1716
|
-
checkpoint.header,
|
|
1717
|
-
blocksForCheckpoint,
|
|
1718
|
-
checkpoint.checkpointNumber,
|
|
1719
|
-
);
|
|
1720
|
-
const publishedCheckpoint = new PublishedCheckpoint(
|
|
1721
|
-
fullCheckpoint,
|
|
1722
|
-
checkpoint.l1,
|
|
1723
|
-
checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
|
|
1724
|
-
);
|
|
1725
|
-
fullCheckpoints.push(publishedCheckpoint);
|
|
1726
|
-
}
|
|
1727
|
-
return fullCheckpoints;
|
|
1728
|
-
}
|
|
1729
|
-
|
|
1730
|
-
public async getCheckpointsForEpoch(epochNumber: EpochNumber): Promise<Checkpoint[]> {
|
|
1731
|
-
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1732
|
-
const checkpoints: Checkpoint[] = [];
|
|
1733
|
-
|
|
1734
|
-
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1735
|
-
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1736
|
-
let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1737
|
-
const slot = (b: CheckpointData) => b.header.slotNumber;
|
|
1738
|
-
while (checkpointData && slot(checkpointData) >= start) {
|
|
1739
|
-
if (slot(checkpointData) <= end) {
|
|
1740
|
-
// push the checkpoints on backwards
|
|
1741
|
-
const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1);
|
|
1742
|
-
checkpoints.push(checkpoint.checkpoint);
|
|
1743
|
-
}
|
|
1744
|
-
checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1));
|
|
1745
|
-
}
|
|
1746
|
-
|
|
1747
|
-
return checkpoints.reverse();
|
|
1748
|
-
}
|
|
1749
|
-
|
|
1750
|
-
/* Legacy APIs */
|
|
1751
|
-
|
|
1752
|
-
public async getPublishedBlockByHash(blockHash: Fr): Promise<PublishedL2Block | undefined> {
|
|
1753
|
-
const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
|
|
1754
|
-
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1755
|
-
}
|
|
1756
|
-
public async getPublishedBlockByArchive(archive: Fr): Promise<PublishedL2Block | undefined> {
|
|
1757
|
-
const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive);
|
|
1758
|
-
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1759
|
-
}
|
|
1760
|
-
|
|
1761
|
-
/**
|
|
1762
|
-
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
1763
|
-
* @param from - Number of the first block to return (inclusive).
|
|
1764
|
-
* @param limit - The number of blocks to return.
|
|
1765
|
-
* @param proven - If true, only return blocks that have been proven.
|
|
1766
|
-
* @returns The requested L2 blocks.
|
|
1767
|
-
*/
|
|
1768
|
-
public async getBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise<L2Block[]> {
|
|
1769
|
-
const publishedBlocks = await this.getPublishedBlocks(from, limit, proven);
|
|
1770
|
-
return publishedBlocks.map(x => x.block);
|
|
1771
|
-
}
|
|
1772
|
-
|
|
1773
|
-
public async getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise<PublishedL2Block[]> {
|
|
1774
|
-
const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit);
|
|
1775
|
-
const provenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
1776
|
-
const blocks = (
|
|
1777
|
-
await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber)))
|
|
1778
|
-
).filter(isDefined);
|
|
1779
|
-
|
|
1780
|
-
const olbBlocks: PublishedL2Block[] = [];
|
|
1781
|
-
for (let i = 0; i < checkpoints.length; i++) {
|
|
1782
|
-
const blockForCheckpoint = blocks[i][0];
|
|
1783
|
-
const checkpoint = checkpoints[i];
|
|
1784
|
-
if (checkpoint.checkpointNumber > provenCheckpointNumber && proven === true) {
|
|
1785
|
-
// this checkpointisn't proven and we only want proven
|
|
1786
|
-
continue;
|
|
1787
|
-
}
|
|
1788
|
-
const oldCheckpoint = new Checkpoint(
|
|
1789
|
-
blockForCheckpoint.archive,
|
|
1790
|
-
checkpoint.header,
|
|
1791
|
-
[blockForCheckpoint],
|
|
1792
|
-
checkpoint.checkpointNumber,
|
|
1793
|
-
);
|
|
1794
|
-
const oldBlock = L2Block.fromCheckpoint(oldCheckpoint);
|
|
1795
|
-
const publishedBlock = new PublishedL2Block(
|
|
1796
|
-
oldBlock,
|
|
1797
|
-
checkpoint.l1,
|
|
1798
|
-
checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
|
|
1799
|
-
);
|
|
1800
|
-
olbBlocks.push(publishedBlock);
|
|
1801
|
-
}
|
|
1802
|
-
return olbBlocks;
|
|
1803
|
-
}
|
|
1804
|
-
|
|
1805
|
-
private async buildOldBlockFromCheckpointedBlock(
|
|
1806
|
-
checkpointedBlock: CheckpointedL2Block | undefined,
|
|
1807
|
-
): Promise<PublishedL2Block | undefined> {
|
|
1808
|
-
if (!checkpointedBlock) {
|
|
1809
|
-
return undefined;
|
|
1810
|
-
}
|
|
1811
|
-
const checkpoint = await this.store.getCheckpointData(checkpointedBlock.checkpointNumber);
|
|
1812
|
-
if (!checkpoint) {
|
|
1813
|
-
return checkpoint;
|
|
1814
|
-
}
|
|
1815
|
-
const fullCheckpoint = new Checkpoint(
|
|
1816
|
-
checkpointedBlock?.block.archive,
|
|
1817
|
-
checkpoint?.header,
|
|
1818
|
-
[checkpointedBlock.block],
|
|
1819
|
-
checkpoint.checkpointNumber,
|
|
1820
|
-
);
|
|
1821
|
-
const oldBlock = L2Block.fromCheckpoint(fullCheckpoint);
|
|
1822
|
-
const published = new PublishedL2Block(
|
|
1823
|
-
oldBlock,
|
|
1824
|
-
checkpoint.l1,
|
|
1825
|
-
checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
|
|
1826
|
-
);
|
|
1827
|
-
return published;
|
|
1828
|
-
}
|
|
1829
|
-
|
|
1830
|
-
public async getBlock(number: BlockNumber): Promise<L2Block | undefined> {
|
|
1831
|
-
// If the number provided is -ve, then return the latest block.
|
|
1832
|
-
if (number < 0) {
|
|
1833
|
-
number = await this.store.getSynchedL2BlockNumber();
|
|
1834
|
-
}
|
|
1835
|
-
if (number === 0) {
|
|
1836
|
-
return undefined;
|
|
1837
|
-
}
|
|
1838
|
-
const publishedBlocks = await this.getPublishedBlocks(number, 1);
|
|
1839
|
-
if (publishedBlocks.length === 0) {
|
|
1840
|
-
return undefined;
|
|
1841
|
-
}
|
|
1842
|
-
return publishedBlocks[0].block;
|
|
1843
|
-
}
|
|
1844
|
-
}
|
|
1845
|
-
|
|
1846
|
-
enum Operation {
|
|
1847
|
-
Store,
|
|
1848
|
-
Delete,
|
|
1849
|
-
}
|
|
1850
|
-
|
|
1851
|
-
/**
|
|
1852
|
-
* A helper class that we use to deal with some of the logic needed when adding blocks.
|
|
1853
|
-
*
|
|
1854
|
-
* I would have preferred to not have this type. But it is useful for handling the logic that any
|
|
1855
|
-
* store would need to include otherwise while exposing fewer functions and logic directly to the archiver.
|
|
1856
|
-
*/
|
|
1857
|
-
export class ArchiverStoreHelper
|
|
1858
|
-
implements
|
|
1859
|
-
Omit<
|
|
1860
|
-
ArchiverDataStore,
|
|
1861
|
-
| 'addLogs'
|
|
1862
|
-
| 'deleteLogs'
|
|
1863
|
-
| 'addContractClasses'
|
|
1864
|
-
| 'deleteContractClasses'
|
|
1865
|
-
| 'addContractInstances'
|
|
1866
|
-
| 'deleteContractInstances'
|
|
1867
|
-
| 'addContractInstanceUpdates'
|
|
1868
|
-
| 'deleteContractInstanceUpdates'
|
|
1869
|
-
| 'addFunctions'
|
|
1870
|
-
| 'backupTo'
|
|
1871
|
-
| 'close'
|
|
1872
|
-
| 'transactionAsync'
|
|
1873
|
-
| 'addBlocks'
|
|
1874
|
-
| 'getBlock'
|
|
1875
|
-
| 'getBlocks'
|
|
1876
|
-
| 'getCheckpointedBlocks'
|
|
1877
|
-
>
|
|
1878
|
-
{
|
|
1879
|
-
#log = createLogger('archiver:block-helper');
|
|
1880
|
-
|
|
1881
|
-
constructor(public readonly store: ArchiverDataStore) {}
|
|
1882
|
-
|
|
1883
|
-
/**
|
|
1884
|
-
* Extracts and stores contract classes out of ContractClassPublished events emitted by the class registry contract.
|
|
1885
|
-
* @param allLogs - All logs emitted in a bunch of blocks.
|
|
1886
|
-
*/
|
|
1887
|
-
async #updatePublishedContractClasses(allLogs: ContractClassLog[], blockNum: BlockNumber, operation: Operation) {
|
|
1888
|
-
const contractClassPublishedEvents = allLogs
|
|
1889
|
-
.filter(log => ContractClassPublishedEvent.isContractClassPublishedEvent(log))
|
|
1890
|
-
.map(log => ContractClassPublishedEvent.fromLog(log));
|
|
1891
|
-
|
|
1892
|
-
const contractClasses = await Promise.all(contractClassPublishedEvents.map(e => e.toContractClassPublic()));
|
|
1893
|
-
if (contractClasses.length > 0) {
|
|
1894
|
-
contractClasses.forEach(c => this.#log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`));
|
|
1895
|
-
if (operation == Operation.Store) {
|
|
1896
|
-
// TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive
|
|
1897
|
-
const commitments = await Promise.all(
|
|
1898
|
-
contractClasses.map(c => computePublicBytecodeCommitment(c.packedBytecode)),
|
|
1899
|
-
);
|
|
1900
|
-
return await this.store.addContractClasses(contractClasses, commitments, blockNum);
|
|
1901
|
-
} else if (operation == Operation.Delete) {
|
|
1902
|
-
return await this.store.deleteContractClasses(contractClasses, blockNum);
|
|
1903
|
-
}
|
|
1904
|
-
}
|
|
1905
|
-
return true;
|
|
1906
|
-
}
|
|
1907
|
-
|
|
1908
|
-
/**
|
|
1909
|
-
* Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract.
|
|
1910
|
-
* @param allLogs - All logs emitted in a bunch of blocks.
|
|
1911
|
-
*/
|
|
1912
|
-
async #updateDeployedContractInstances(allLogs: PrivateLog[], blockNum: BlockNumber, operation: Operation) {
|
|
1913
|
-
const contractInstances = allLogs
|
|
1914
|
-
.filter(log => ContractInstancePublishedEvent.isContractInstancePublishedEvent(log))
|
|
1915
|
-
.map(log => ContractInstancePublishedEvent.fromLog(log))
|
|
1916
|
-
.map(e => e.toContractInstance());
|
|
1917
|
-
if (contractInstances.length > 0) {
|
|
1918
|
-
contractInstances.forEach(c =>
|
|
1919
|
-
this.#log.verbose(`${Operation[operation]} contract instance at ${c.address.toString()}`),
|
|
1920
|
-
);
|
|
1921
|
-
if (operation == Operation.Store) {
|
|
1922
|
-
return await this.store.addContractInstances(contractInstances, blockNum);
|
|
1923
|
-
} else if (operation == Operation.Delete) {
|
|
1924
|
-
return await this.store.deleteContractInstances(contractInstances, blockNum);
|
|
1925
|
-
}
|
|
1926
|
-
}
|
|
1927
|
-
return true;
|
|
1928
|
-
}
|
|
1929
|
-
|
|
1930
|
-
/**
|
|
1931
|
-
* Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract.
|
|
1932
|
-
* @param allLogs - All logs emitted in a bunch of blocks.
|
|
1933
|
-
* @param timestamp - Timestamp at which the updates were scheduled.
|
|
1934
|
-
* @param operation - The operation to perform on the contract instance updates (Store or Delete).
|
|
1935
|
-
*/
|
|
1936
|
-
async #updateUpdatedContractInstances(allLogs: PublicLog[], timestamp: UInt64, operation: Operation) {
|
|
1937
|
-
const contractUpdates = allLogs
|
|
1938
|
-
.filter(log => ContractInstanceUpdatedEvent.isContractInstanceUpdatedEvent(log))
|
|
1939
|
-
.map(log => ContractInstanceUpdatedEvent.fromLog(log))
|
|
1940
|
-
.map(e => e.toContractInstanceUpdate());
|
|
1941
|
-
|
|
1942
|
-
if (contractUpdates.length > 0) {
|
|
1943
|
-
contractUpdates.forEach(c =>
|
|
1944
|
-
this.#log.verbose(`${Operation[operation]} contract instance update at ${c.address.toString()}`),
|
|
1945
|
-
);
|
|
1946
|
-
if (operation == Operation.Store) {
|
|
1947
|
-
return await this.store.addContractInstanceUpdates(contractUpdates, timestamp);
|
|
1948
|
-
} else if (operation == Operation.Delete) {
|
|
1949
|
-
return await this.store.deleteContractInstanceUpdates(contractUpdates, timestamp);
|
|
1950
|
-
}
|
|
1951
|
-
}
|
|
1952
|
-
return true;
|
|
1953
|
-
}
|
|
1954
|
-
|
|
1955
|
-
/**
|
|
1956
|
-
* Stores the functions that were broadcasted individually
|
|
1957
|
-
*
|
|
1958
|
-
* @dev Beware that there is not a delete variant of this, since they are added to contract classes
|
|
1959
|
-
* and will be deleted as part of the class if needed.
|
|
1960
|
-
*
|
|
1961
|
-
* @param allLogs - The logs from the block
|
|
1962
|
-
* @param _blockNum - The block number
|
|
1963
|
-
* @returns
|
|
1964
|
-
*/
|
|
1965
|
-
async #storeBroadcastedIndividualFunctions(allLogs: ContractClassLog[], _blockNum: BlockNumber) {
|
|
1966
|
-
// Filter out private and utility function broadcast events
|
|
1967
|
-
const privateFnEvents = allLogs
|
|
1968
|
-
.filter(log => PrivateFunctionBroadcastedEvent.isPrivateFunctionBroadcastedEvent(log))
|
|
1969
|
-
.map(log => PrivateFunctionBroadcastedEvent.fromLog(log));
|
|
1970
|
-
const utilityFnEvents = allLogs
|
|
1971
|
-
.filter(log => UtilityFunctionBroadcastedEvent.isUtilityFunctionBroadcastedEvent(log))
|
|
1972
|
-
.map(log => UtilityFunctionBroadcastedEvent.fromLog(log));
|
|
1973
|
-
|
|
1974
|
-
// Group all events by contract class id
|
|
1975
|
-
for (const [classIdString, classEvents] of Object.entries(
|
|
1976
|
-
groupBy([...privateFnEvents, ...utilityFnEvents], e => e.contractClassId.toString()),
|
|
1977
|
-
)) {
|
|
1978
|
-
const contractClassId = Fr.fromHexString(classIdString);
|
|
1979
|
-
const contractClass = await this.getContractClass(contractClassId);
|
|
1980
|
-
if (!contractClass) {
|
|
1981
|
-
this.#log.warn(`Skipping broadcasted functions as contract class ${contractClassId.toString()} was not found`);
|
|
1982
|
-
continue;
|
|
1983
|
-
}
|
|
1984
|
-
|
|
1985
|
-
// Split private and utility functions, and filter out invalid ones
|
|
1986
|
-
const allFns = classEvents.map(e => e.toFunctionWithMembershipProof());
|
|
1987
|
-
const privateFns = allFns.filter(
|
|
1988
|
-
(fn): fn is ExecutablePrivateFunctionWithMembershipProof => 'utilityFunctionsTreeRoot' in fn,
|
|
1989
|
-
);
|
|
1990
|
-
const utilityFns = allFns.filter(
|
|
1991
|
-
(fn): fn is UtilityFunctionWithMembershipProof => 'privateFunctionsArtifactTreeRoot' in fn,
|
|
1992
|
-
);
|
|
1993
|
-
|
|
1994
|
-
const privateFunctionsWithValidity = await Promise.all(
|
|
1995
|
-
privateFns.map(async fn => ({ fn, valid: await isValidPrivateFunctionMembershipProof(fn, contractClass) })),
|
|
1996
|
-
);
|
|
1997
|
-
const validPrivateFns = privateFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn);
|
|
1998
|
-
const utilityFunctionsWithValidity = await Promise.all(
|
|
1999
|
-
utilityFns.map(async fn => ({
|
|
2000
|
-
fn,
|
|
2001
|
-
valid: await isValidUtilityFunctionMembershipProof(fn, contractClass),
|
|
2002
|
-
})),
|
|
2003
|
-
);
|
|
2004
|
-
const validUtilityFns = utilityFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn);
|
|
2005
|
-
const validFnCount = validPrivateFns.length + validUtilityFns.length;
|
|
2006
|
-
if (validFnCount !== allFns.length) {
|
|
2007
|
-
this.#log.warn(`Skipping ${allFns.length - validFnCount} invalid functions`);
|
|
2008
|
-
}
|
|
2009
|
-
|
|
2010
|
-
// Store the functions in the contract class in a single operation
|
|
2011
|
-
if (validFnCount > 0) {
|
|
2012
|
-
this.#log.verbose(`Storing ${validFnCount} functions for contract class ${contractClassId.toString()}`);
|
|
2013
|
-
}
|
|
2014
|
-
return await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns);
|
|
2015
|
-
}
|
|
2016
|
-
return true;
|
|
2017
|
-
}
|
|
2018
|
-
|
|
2019
|
-
private async addBlockDataToDB(block: L2BlockNew) {
|
|
2020
|
-
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
2021
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
2022
|
-
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
2023
|
-
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
2024
|
-
|
|
2025
|
-
return (
|
|
2026
|
-
await Promise.all([
|
|
2027
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.number, Operation.Store),
|
|
2028
|
-
this.#updateDeployedContractInstances(privateLogs, block.number, Operation.Store),
|
|
2029
|
-
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Store),
|
|
2030
|
-
this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.number),
|
|
2031
|
-
])
|
|
2032
|
-
).every(Boolean);
|
|
2033
|
-
}
|
|
2034
|
-
|
|
2035
|
-
public addBlocks(blocks: L2BlockNew[], pendingChainValidationStatus?: ValidateCheckpointResult): Promise<boolean> {
|
|
2036
|
-
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
2037
|
-
// or if the previous block is not in the store.
|
|
2038
|
-
return this.store.transactionAsync(async () => {
|
|
2039
|
-
await this.store.addBlocks(blocks);
|
|
2040
|
-
|
|
2041
|
-
const opResults = await Promise.all([
|
|
2042
|
-
// Update the pending chain validation status if provided
|
|
2043
|
-
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
2044
|
-
// Add any logs emitted during the retrieved blocks
|
|
2045
|
-
this.store.addLogs(blocks),
|
|
2046
|
-
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
2047
|
-
...blocks.map(block => {
|
|
2048
|
-
return this.addBlockDataToDB(block);
|
|
2049
|
-
}),
|
|
2050
|
-
]);
|
|
2051
|
-
|
|
2052
|
-
return opResults.every(Boolean);
|
|
2053
|
-
});
|
|
2054
|
-
}
|
|
2055
|
-
|
|
2056
|
-
public addCheckpoints(
|
|
2057
|
-
checkpoints: PublishedCheckpoint[],
|
|
2058
|
-
pendingChainValidationStatus?: ValidateCheckpointResult,
|
|
2059
|
-
): Promise<boolean> {
|
|
2060
|
-
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
2061
|
-
// or if the previous block is not in the store.
|
|
2062
|
-
return this.store.transactionAsync(async () => {
|
|
2063
|
-
await this.store.addCheckpoints(checkpoints);
|
|
2064
|
-
const allBlocks = checkpoints.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks);
|
|
2065
|
-
|
|
2066
|
-
const opResults = await Promise.all([
|
|
2067
|
-
// Update the pending chain validation status if provided
|
|
2068
|
-
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
2069
|
-
// Add any logs emitted during the retrieved blocks
|
|
2070
|
-
this.store.addLogs(allBlocks),
|
|
2071
|
-
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
2072
|
-
...allBlocks.map(block => {
|
|
2073
|
-
return this.addBlockDataToDB(block);
|
|
2074
|
-
}),
|
|
2075
|
-
]);
|
|
2076
|
-
|
|
2077
|
-
return opResults.every(Boolean);
|
|
2078
|
-
});
|
|
2079
|
-
}
|
|
2080
|
-
|
|
2081
|
-
public async unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
2082
|
-
if (checkpointsToUnwind <= 0) {
|
|
2083
|
-
throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
|
|
2084
|
-
}
|
|
2085
|
-
|
|
2086
|
-
const last = await this.getSynchedCheckpointNumber();
|
|
2087
|
-
if (from != last) {
|
|
2088
|
-
throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`);
|
|
2089
|
-
}
|
|
2090
|
-
|
|
2091
|
-
const blocks = [];
|
|
2092
|
-
const lastCheckpointNumber = from + checkpointsToUnwind - 1;
|
|
2093
|
-
for (let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++) {
|
|
2094
|
-
const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
2095
|
-
if (!blocksForCheckpoint) {
|
|
2096
|
-
continue;
|
|
2097
|
-
}
|
|
2098
|
-
blocks.push(...blocksForCheckpoint);
|
|
2099
|
-
}
|
|
2100
|
-
|
|
2101
|
-
const opResults = await Promise.all([
|
|
2102
|
-
// Prune rolls back to the last proven block, which is by definition valid
|
|
2103
|
-
this.store.setPendingChainValidationStatus({ valid: true }),
|
|
2104
|
-
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
2105
|
-
...blocks.map(async block => {
|
|
2106
|
-
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
2107
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
2108
|
-
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
2109
|
-
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
2110
|
-
|
|
2111
|
-
return (
|
|
2112
|
-
await Promise.all([
|
|
2113
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.number, Operation.Delete),
|
|
2114
|
-
this.#updateDeployedContractInstances(privateLogs, block.number, Operation.Delete),
|
|
2115
|
-
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Delete),
|
|
2116
|
-
])
|
|
2117
|
-
).every(Boolean);
|
|
2118
|
-
}),
|
|
2119
|
-
|
|
2120
|
-
this.store.deleteLogs(blocks),
|
|
2121
|
-
this.store.unwindCheckpoints(from, checkpointsToUnwind),
|
|
2122
|
-
]);
|
|
2123
|
-
|
|
2124
|
-
return opResults.every(Boolean);
|
|
2125
|
-
}
|
|
2126
|
-
|
|
2127
|
-
getCheckpointData(checkpointNumber: CheckpointNumber): Promise<CheckpointData | undefined> {
|
|
2128
|
-
return this.store.getCheckpointData(checkpointNumber);
|
|
2129
|
-
}
|
|
2130
|
-
|
|
2131
|
-
getRangeOfCheckpoints(from: CheckpointNumber, limit: number): Promise<CheckpointData[]> {
|
|
2132
|
-
return this.store.getRangeOfCheckpoints(from, limit);
|
|
2133
|
-
}
|
|
2134
|
-
|
|
2135
|
-
getCheckpointedL2BlockNumber(): Promise<BlockNumber> {
|
|
2136
|
-
return this.store.getCheckpointedL2BlockNumber();
|
|
2137
|
-
}
|
|
2138
|
-
getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
|
|
2139
|
-
return this.store.getSynchedCheckpointNumber();
|
|
2140
|
-
}
|
|
2141
|
-
setCheckpointSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
|
|
2142
|
-
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
2143
|
-
}
|
|
2144
|
-
getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
|
|
2145
|
-
return this.store.getCheckpointedBlock(number);
|
|
2146
|
-
}
|
|
2147
|
-
getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
2148
|
-
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
2149
|
-
}
|
|
2150
|
-
getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
|
|
2151
|
-
return this.store.getCheckpointedBlockByArchive(archive);
|
|
2152
|
-
}
|
|
2153
|
-
getBlockHeaders(from: BlockNumber, limit: number): Promise<BlockHeader[]> {
|
|
2154
|
-
return this.store.getBlockHeaders(from, limit);
|
|
2155
|
-
}
|
|
2156
|
-
getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
|
|
2157
|
-
return this.store.getBlockHeaderByHash(blockHash);
|
|
2158
|
-
}
|
|
2159
|
-
getBlockHeaderByArchive(archive: Fr): Promise<BlockHeader | undefined> {
|
|
2160
|
-
return this.store.getBlockHeaderByArchive(archive);
|
|
2161
|
-
}
|
|
2162
|
-
getBlockByHash(blockHash: Fr): Promise<L2BlockNew | undefined> {
|
|
2163
|
-
return this.store.getBlockByHash(blockHash);
|
|
2164
|
-
}
|
|
2165
|
-
getBlockByArchive(archive: Fr): Promise<L2BlockNew | undefined> {
|
|
2166
|
-
return this.store.getBlockByArchive(archive);
|
|
2167
|
-
}
|
|
2168
|
-
getLatestBlockNumber(): Promise<BlockNumber> {
|
|
2169
|
-
return this.store.getLatestBlockNumber();
|
|
2170
|
-
}
|
|
2171
|
-
getBlocksForCheckpoint(checkpointNumber: CheckpointNumber): Promise<L2BlockNew[] | undefined> {
|
|
2172
|
-
return this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
2173
|
-
}
|
|
2174
|
-
getTxEffect(txHash: TxHash): Promise<IndexedTxEffect | undefined> {
|
|
2175
|
-
return this.store.getTxEffect(txHash);
|
|
2176
|
-
}
|
|
2177
|
-
getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
|
|
2178
|
-
return this.store.getSettledTxReceipt(txHash);
|
|
2179
|
-
}
|
|
2180
|
-
addL1ToL2Messages(messages: InboxMessage[]): Promise<void> {
|
|
2181
|
-
return this.store.addL1ToL2Messages(messages);
|
|
2182
|
-
}
|
|
2183
|
-
getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
|
|
2184
|
-
return this.store.getL1ToL2Messages(checkpointNumber);
|
|
2185
|
-
}
|
|
2186
|
-
getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined> {
|
|
2187
|
-
return this.store.getL1ToL2MessageIndex(l1ToL2Message);
|
|
2188
|
-
}
|
|
2189
|
-
getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
|
|
2190
|
-
return this.store.getPrivateLogsByTags(tags);
|
|
2191
|
-
}
|
|
2192
|
-
getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
|
|
2193
|
-
return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
2194
|
-
}
|
|
2195
|
-
getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
|
|
2196
|
-
return this.store.getPublicLogs(filter);
|
|
2197
|
-
}
|
|
2198
|
-
getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
|
|
2199
|
-
return this.store.getContractClassLogs(filter);
|
|
2200
|
-
}
|
|
2201
|
-
getSynchedL2BlockNumber(): Promise<BlockNumber> {
|
|
2202
|
-
return this.store.getLatestBlockNumber();
|
|
2203
|
-
}
|
|
2204
|
-
getProvenCheckpointNumber(): Promise<CheckpointNumber> {
|
|
2205
|
-
return this.store.getProvenCheckpointNumber();
|
|
2206
|
-
}
|
|
2207
|
-
getProvenBlockNumber(): Promise<BlockNumber> {
|
|
2208
|
-
return this.store.getProvenBlockNumber();
|
|
2209
|
-
}
|
|
2210
|
-
setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise<void> {
|
|
2211
|
-
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
2212
|
-
}
|
|
2213
|
-
setBlockSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
|
|
2214
|
-
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
2215
|
-
}
|
|
2216
|
-
setMessageSynchedL1Block(l1Block: L1BlockId): Promise<void> {
|
|
2217
|
-
return this.store.setMessageSynchedL1Block(l1Block);
|
|
2218
|
-
}
|
|
2219
|
-
getSynchPoint(): Promise<ArchiverL1SynchPoint> {
|
|
2220
|
-
return this.store.getSynchPoint();
|
|
2221
|
-
}
|
|
2222
|
-
getContractClass(id: Fr): Promise<ContractClassPublic | undefined> {
|
|
2223
|
-
return this.store.getContractClass(id);
|
|
2224
|
-
}
|
|
2225
|
-
getBytecodeCommitment(contractClassId: Fr): Promise<Fr | undefined> {
|
|
2226
|
-
return this.store.getBytecodeCommitment(contractClassId);
|
|
2227
|
-
}
|
|
2228
|
-
getContractInstance(address: AztecAddress, timestamp: UInt64): Promise<ContractInstanceWithAddress | undefined> {
|
|
2229
|
-
return this.store.getContractInstance(address, timestamp);
|
|
2230
|
-
}
|
|
2231
|
-
getContractClassIds(): Promise<Fr[]> {
|
|
2232
|
-
return this.store.getContractClassIds();
|
|
2233
|
-
}
|
|
2234
|
-
registerContractFunctionSignatures(signatures: string[]): Promise<void> {
|
|
2235
|
-
return this.store.registerContractFunctionSignatures(signatures);
|
|
2236
|
-
}
|
|
2237
|
-
getDebugFunctionName(address: AztecAddress, selector: FunctionSelector): Promise<string | undefined> {
|
|
2238
|
-
return this.store.getDebugFunctionName(address, selector);
|
|
2239
|
-
}
|
|
2240
|
-
getTotalL1ToL2MessageCount(): Promise<bigint> {
|
|
2241
|
-
return this.store.getTotalL1ToL2MessageCount();
|
|
2242
|
-
}
|
|
2243
|
-
estimateSize(): Promise<{ mappingSize: number; physicalFileSize: number; actualSize: number; numItems: number }> {
|
|
2244
|
-
return this.store.estimateSize();
|
|
2245
|
-
}
|
|
2246
|
-
rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber: CheckpointNumber): Promise<void> {
|
|
2247
|
-
return this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
2248
|
-
}
|
|
2249
|
-
iterateL1ToL2Messages(range: CustomRange<bigint> = {}): AsyncIterableIterator<InboxMessage> {
|
|
2250
|
-
return this.store.iterateL1ToL2Messages(range);
|
|
2251
|
-
}
|
|
2252
|
-
removeL1ToL2Messages(startIndex: bigint): Promise<void> {
|
|
2253
|
-
return this.store.removeL1ToL2Messages(startIndex);
|
|
2254
|
-
}
|
|
2255
|
-
getLastL1ToL2Message(): Promise<InboxMessage | undefined> {
|
|
2256
|
-
return this.store.getLastL1ToL2Message();
|
|
2257
|
-
}
|
|
2258
|
-
getPendingChainValidationStatus(): Promise<ValidateCheckpointResult | undefined> {
|
|
2259
|
-
return this.store.getPendingChainValidationStatus();
|
|
2260
|
-
}
|
|
2261
|
-
setPendingChainValidationStatus(status: ValidateCheckpointResult | undefined): Promise<void> {
|
|
2262
|
-
this.#log.debug(`Setting pending chain validation status to valid ${status?.valid}`, status);
|
|
2263
|
-
return this.store.setPendingChainValidationStatus(status);
|
|
2264
|
-
}
|
|
2265
|
-
}
|