@aztec/archiver 4.0.0-nightly.20260115 → 4.0.0-nightly.20260117
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -2
- package/dest/archiver.d.ts +3 -2
- package/dest/archiver.d.ts.map +1 -1
- package/dest/archiver.js +8 -6
- package/dest/modules/data_source_base.d.ts +2 -1
- package/dest/modules/data_source_base.d.ts.map +1 -1
- package/dest/modules/data_source_base.js +3 -0
- package/dest/modules/data_store_updater.d.ts +29 -6
- package/dest/modules/data_store_updater.d.ts.map +1 -1
- package/dest/modules/data_store_updater.js +117 -29
- package/dest/modules/l1_synchronizer.d.ts +12 -4
- package/dest/modules/l1_synchronizer.d.ts.map +1 -1
- package/dest/modules/l1_synchronizer.js +101 -52
- package/dest/store/block_store.d.ts +16 -2
- package/dest/store/block_store.d.ts.map +1 -1
- package/dest/store/block_store.js +62 -8
- package/dest/store/kv_archiver_store.d.ts +14 -2
- package/dest/store/kv_archiver_store.d.ts.map +1 -1
- package/dest/store/kv_archiver_store.js +14 -0
- package/dest/store/log_store.d.ts +1 -1
- package/dest/store/log_store.d.ts.map +1 -1
- package/dest/store/log_store.js +69 -48
- package/dest/test/fake_l1_state.d.ts +18 -1
- package/dest/test/fake_l1_state.d.ts.map +1 -1
- package/dest/test/fake_l1_state.js +36 -17
- package/dest/test/mock_l2_block_source.d.ts +2 -1
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +4 -0
- package/package.json +13 -13
- package/src/archiver.ts +9 -6
- package/src/modules/data_source_base.ts +4 -0
- package/src/modules/data_store_updater.ts +143 -42
- package/src/modules/l1_synchronizer.ts +123 -62
- package/src/store/block_store.ts +79 -10
- package/src/store/kv_archiver_store.ts +19 -1
- package/src/store/log_store.ts +112 -76
- package/src/test/fake_l1_state.ts +62 -24
- package/src/test/mock_l2_block_source.ts +5 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { BlockNumber, type CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
2
2
|
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
3
|
import { createLogger } from '@aztec/foundation/log';
|
|
4
4
|
import {
|
|
@@ -32,6 +32,14 @@ enum Operation {
|
|
|
32
32
|
Delete,
|
|
33
33
|
}
|
|
34
34
|
|
|
35
|
+
/** Result of adding checkpoints with information about any pruned blocks. */
|
|
36
|
+
type ReconcileCheckpointsResult = {
|
|
37
|
+
/** Blocks that were pruned due to conflict with L1 checkpoints. */
|
|
38
|
+
prunedBlocks: L2BlockNew[] | undefined;
|
|
39
|
+
/** Last block number that was already inserted locally, or undefined if none. */
|
|
40
|
+
lastAlreadyInsertedBlockNumber: BlockNumber | undefined;
|
|
41
|
+
};
|
|
42
|
+
|
|
35
43
|
/** Archiver helper module to handle updates to the data store. */
|
|
36
44
|
export class ArchiverDataStoreUpdater {
|
|
37
45
|
private readonly log = createLogger('archiver:store_updater');
|
|
@@ -47,10 +55,7 @@ export class ArchiverDataStoreUpdater {
|
|
|
47
55
|
* @param pendingChainValidationStatus - Optional validation status to set.
|
|
48
56
|
* @returns True if the operation is successful.
|
|
49
57
|
*/
|
|
50
|
-
public
|
|
51
|
-
blocks: L2BlockNew[],
|
|
52
|
-
pendingChainValidationStatus?: ValidateCheckpointResult,
|
|
53
|
-
): Promise<boolean> {
|
|
58
|
+
public addBlocks(blocks: L2BlockNew[], pendingChainValidationStatus?: ValidateCheckpointResult): Promise<boolean> {
|
|
54
59
|
return this.store.transactionAsync(async () => {
|
|
55
60
|
await this.store.addBlocks(blocks);
|
|
56
61
|
|
|
@@ -68,32 +73,136 @@ export class ArchiverDataStoreUpdater {
|
|
|
68
73
|
}
|
|
69
74
|
|
|
70
75
|
/**
|
|
76
|
+
* Reconciles local blocks with incoming checkpoints from L1.
|
|
71
77
|
* Adds checkpoints to the store with contract class/instance extraction from logs.
|
|
78
|
+
* Prunes any local blocks that conflict with checkpoint data (by comparing archive roots).
|
|
72
79
|
* Extracts ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated events,
|
|
73
80
|
* and individually broadcasted functions from the checkpoint block logs.
|
|
74
81
|
*
|
|
75
82
|
* @param checkpoints - The published checkpoints to add.
|
|
76
83
|
* @param pendingChainValidationStatus - Optional validation status to set.
|
|
77
|
-
* @returns
|
|
84
|
+
* @returns Result with information about any pruned blocks.
|
|
78
85
|
*/
|
|
79
|
-
public
|
|
86
|
+
public setNewCheckpointData(
|
|
80
87
|
checkpoints: PublishedCheckpoint[],
|
|
81
88
|
pendingChainValidationStatus?: ValidateCheckpointResult,
|
|
82
|
-
): Promise<
|
|
89
|
+
): Promise<ReconcileCheckpointsResult> {
|
|
83
90
|
return this.store.transactionAsync(async () => {
|
|
91
|
+
// Before adding checkpoints, check for conflicts with local blocks if any
|
|
92
|
+
const { prunedBlocks, lastAlreadyInsertedBlockNumber } = await this.pruneMismatchingLocalBlocks(checkpoints);
|
|
93
|
+
|
|
84
94
|
await this.store.addCheckpoints(checkpoints);
|
|
85
|
-
const allBlocks = checkpoints.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks);
|
|
86
95
|
|
|
87
|
-
|
|
96
|
+
// Filter out blocks that were already inserted via addBlocks() to avoid duplicating logs/contract data
|
|
97
|
+
const newBlocks = checkpoints
|
|
98
|
+
.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks)
|
|
99
|
+
.filter(b => lastAlreadyInsertedBlockNumber === undefined || b.number > lastAlreadyInsertedBlockNumber);
|
|
100
|
+
|
|
101
|
+
await Promise.all([
|
|
88
102
|
// Update the pending chain validation status if provided
|
|
89
103
|
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
90
104
|
// Add any logs emitted during the retrieved blocks
|
|
91
|
-
this.store.addLogs(
|
|
105
|
+
this.store.addLogs(newBlocks),
|
|
92
106
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
93
|
-
...
|
|
107
|
+
...newBlocks.map(block => this.addBlockDataToDB(block)),
|
|
94
108
|
]);
|
|
95
109
|
|
|
96
|
-
return
|
|
110
|
+
return { prunedBlocks, lastAlreadyInsertedBlockNumber };
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Checks for local proposed blocks that do not match the ones to be checkpointed and prunes them.
|
|
116
|
+
* This method handles multiple checkpoints but returns after pruning the first conflict found.
|
|
117
|
+
* This is correct because pruning from the first conflict point removes all subsequent blocks,
|
|
118
|
+
* and when checkpoints are added afterward, they include all the correct blocks.
|
|
119
|
+
*/
|
|
120
|
+
private async pruneMismatchingLocalBlocks(checkpoints: PublishedCheckpoint[]): Promise<ReconcileCheckpointsResult> {
|
|
121
|
+
const [lastCheckpointedBlockNumber, lastBlockNumber] = await Promise.all([
|
|
122
|
+
this.store.getCheckpointedL2BlockNumber(),
|
|
123
|
+
this.store.getLatestBlockNumber(),
|
|
124
|
+
]);
|
|
125
|
+
|
|
126
|
+
// Exit early if there are no local uncheckpointed blocks
|
|
127
|
+
if (lastBlockNumber === lastCheckpointedBlockNumber) {
|
|
128
|
+
return { prunedBlocks: undefined, lastAlreadyInsertedBlockNumber: undefined };
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Get all uncheckpointed local blocks
|
|
132
|
+
const uncheckpointedLocalBlocks = await this.store.getBlocks(
|
|
133
|
+
BlockNumber.add(lastCheckpointedBlockNumber, 1),
|
|
134
|
+
lastBlockNumber - lastCheckpointedBlockNumber,
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
let lastAlreadyInsertedBlockNumber: BlockNumber | undefined;
|
|
138
|
+
|
|
139
|
+
for (const publishedCheckpoint of checkpoints) {
|
|
140
|
+
const checkpointBlocks = publishedCheckpoint.checkpoint.blocks;
|
|
141
|
+
const slot = publishedCheckpoint.checkpoint.slot;
|
|
142
|
+
const localBlocksInSlot = uncheckpointedLocalBlocks.filter(b => b.slot === slot);
|
|
143
|
+
|
|
144
|
+
if (checkpointBlocks.length === 0) {
|
|
145
|
+
this.log.warn(`Checkpoint ${publishedCheckpoint.checkpoint.number} for slot ${slot} has no blocks`);
|
|
146
|
+
continue;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// Find the first checkpoint block that conflicts with an existing local block and prune local afterwards
|
|
150
|
+
for (const checkpointBlock of checkpointBlocks) {
|
|
151
|
+
const blockNumber = checkpointBlock.number;
|
|
152
|
+
const existingBlock = localBlocksInSlot.find(b => b.number === blockNumber);
|
|
153
|
+
const blockInfos = {
|
|
154
|
+
existingBlock: existingBlock?.toBlockInfo(),
|
|
155
|
+
checkpointBlock: checkpointBlock.toBlockInfo(),
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
if (!existingBlock) {
|
|
159
|
+
this.log.verbose(`No local block found for checkpointed block number ${blockNumber}`, blockInfos);
|
|
160
|
+
} else if (existingBlock.archive.root.equals(checkpointBlock.archive.root)) {
|
|
161
|
+
this.log.verbose(`Block number ${blockNumber} already inserted and matches checkpoint`, blockInfos);
|
|
162
|
+
lastAlreadyInsertedBlockNumber = blockNumber;
|
|
163
|
+
} else {
|
|
164
|
+
this.log.warn(`Conflict detected at block ${blockNumber} between checkpointed and local block`, blockInfos);
|
|
165
|
+
const prunedBlocks = await this.removeBlocksAfter(BlockNumber(blockNumber - 1));
|
|
166
|
+
return { prunedBlocks, lastAlreadyInsertedBlockNumber };
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// If local has more blocks than the checkpoint (e.g., local has [2,3,4] but checkpoint has [2,3]),
|
|
171
|
+
// we need to prune the extra local blocks so they match what was checkpointed
|
|
172
|
+
const lastCheckpointBlockNumber = checkpointBlocks.at(-1)!.number;
|
|
173
|
+
const lastLocalBlockNumber = localBlocksInSlot.at(-1)?.number;
|
|
174
|
+
|
|
175
|
+
if (lastLocalBlockNumber !== undefined && lastLocalBlockNumber > lastCheckpointBlockNumber) {
|
|
176
|
+
this.log.warn(
|
|
177
|
+
`Local chain for slot ${slot} ends at block ${lastLocalBlockNumber} but checkpoint ends at ${lastCheckpointBlockNumber}. Pruning blocks after block ${lastCheckpointBlockNumber}.`,
|
|
178
|
+
);
|
|
179
|
+
const prunedBlocks = await this.removeBlocksAfter(lastCheckpointBlockNumber);
|
|
180
|
+
return { prunedBlocks, lastAlreadyInsertedBlockNumber };
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
return { prunedBlocks: undefined, lastAlreadyInsertedBlockNumber };
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Removes all blocks strictly after the specified block number and cleans up associated contract data.
|
|
189
|
+
* This handles removal of provisionally added blocks along with their contract classes/instances.
|
|
190
|
+
*
|
|
191
|
+
* @param blockNumber - Remove all blocks with number greater than this.
|
|
192
|
+
* @returns The removed blocks.
|
|
193
|
+
*/
|
|
194
|
+
public removeBlocksAfter(blockNumber: BlockNumber): Promise<L2BlockNew[]> {
|
|
195
|
+
return this.store.transactionAsync(async () => {
|
|
196
|
+
// First get the blocks to be removed so we can clean up contract data
|
|
197
|
+
const removedBlocks = await this.store.removeBlocksAfter(blockNumber);
|
|
198
|
+
|
|
199
|
+
// Clean up contract data and logs for the removed blocks
|
|
200
|
+
await Promise.all([
|
|
201
|
+
this.store.deleteLogs(removedBlocks),
|
|
202
|
+
...removedBlocks.map(block => this.removeBlockDataFromDB(block)),
|
|
203
|
+
]);
|
|
204
|
+
|
|
205
|
+
return removedBlocks;
|
|
97
206
|
});
|
|
98
207
|
}
|
|
99
208
|
|
|
@@ -106,10 +215,7 @@ export class ArchiverDataStoreUpdater {
|
|
|
106
215
|
* @param checkpointsToUnwind - The number of checkpoints to unwind.
|
|
107
216
|
* @returns True if the operation is successful.
|
|
108
217
|
*/
|
|
109
|
-
public async
|
|
110
|
-
from: CheckpointNumber,
|
|
111
|
-
checkpointsToUnwind: number,
|
|
112
|
-
): Promise<boolean> {
|
|
218
|
+
public async unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
113
219
|
if (checkpointsToUnwind <= 0) {
|
|
114
220
|
throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
|
|
115
221
|
}
|
|
@@ -132,22 +238,8 @@ export class ArchiverDataStoreUpdater {
|
|
|
132
238
|
const opResults = await Promise.all([
|
|
133
239
|
// Prune rolls back to the last proven block, which is by definition valid
|
|
134
240
|
this.store.setPendingChainValidationStatus({ valid: true }),
|
|
135
|
-
//
|
|
136
|
-
...blocks.map(
|
|
137
|
-
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
138
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
139
|
-
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
140
|
-
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
141
|
-
|
|
142
|
-
return (
|
|
143
|
-
await Promise.all([
|
|
144
|
-
this.updatePublishedContractClasses(contractClassLogs, block.number, Operation.Delete),
|
|
145
|
-
this.updateDeployedContractInstances(privateLogs, block.number, Operation.Delete),
|
|
146
|
-
this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Delete),
|
|
147
|
-
])
|
|
148
|
-
).every(Boolean);
|
|
149
|
-
}),
|
|
150
|
-
|
|
241
|
+
// Remove contract data for all blocks being unwound
|
|
242
|
+
...blocks.map(block => this.removeBlockDataFromDB(block)),
|
|
151
243
|
this.store.deleteLogs(blocks),
|
|
152
244
|
this.store.unwindCheckpoints(from, checkpointsToUnwind),
|
|
153
245
|
]);
|
|
@@ -155,21 +247,30 @@ export class ArchiverDataStoreUpdater {
|
|
|
155
247
|
return opResults.every(Boolean);
|
|
156
248
|
}
|
|
157
249
|
|
|
158
|
-
/**
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
250
|
+
/** Extracts and stores contract data from a single block. */
|
|
251
|
+
private addBlockDataToDB(block: L2BlockNew): Promise<boolean> {
|
|
252
|
+
return this.editContractBlockData(block, Operation.Store);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
/** Removes contract data associated with a block. */
|
|
256
|
+
private removeBlockDataFromDB(block: L2BlockNew): Promise<boolean> {
|
|
257
|
+
return this.editContractBlockData(block, Operation.Delete);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
/** Adds or remove contract data associated with a block. */
|
|
261
|
+
private async editContractBlockData(block: L2BlockNew, operation: Operation): Promise<boolean> {
|
|
162
262
|
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
163
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
164
263
|
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
165
264
|
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
166
265
|
|
|
167
266
|
return (
|
|
168
267
|
await Promise.all([
|
|
169
|
-
this.updatePublishedContractClasses(contractClassLogs, block.number,
|
|
170
|
-
this.updateDeployedContractInstances(privateLogs, block.number,
|
|
171
|
-
this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp,
|
|
172
|
-
|
|
268
|
+
this.updatePublishedContractClasses(contractClassLogs, block.number, operation),
|
|
269
|
+
this.updateDeployedContractInstances(privateLogs, block.number, operation),
|
|
270
|
+
this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, operation),
|
|
271
|
+
operation === Operation.Store
|
|
272
|
+
? this.storeBroadcastedIndividualFunctions(contractClassLogs, block.number)
|
|
273
|
+
: Promise.resolve(true),
|
|
173
274
|
])
|
|
174
275
|
).every(Boolean);
|
|
175
276
|
}
|
|
@@ -16,7 +16,7 @@ import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
|
|
|
16
16
|
import { isDefined } from '@aztec/foundation/types';
|
|
17
17
|
import { type ArchiverEmitter, L2BlockSourceEvents, type ValidateCheckpointResult } from '@aztec/stdlib/block';
|
|
18
18
|
import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
19
|
-
import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
|
|
19
|
+
import { type L1RollupConstants, getEpochAtSlot, getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers';
|
|
20
20
|
import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
21
21
|
import { type Traceable, type Tracer, execInSpan, trackSpan } from '@aztec/telemetry-client';
|
|
22
22
|
|
|
@@ -49,6 +49,7 @@ type RollupStatus = {
|
|
|
49
49
|
*/
|
|
50
50
|
export class ArchiverL1Synchronizer implements Traceable {
|
|
51
51
|
private l1BlockNumber: bigint | undefined;
|
|
52
|
+
private l1BlockHash: Buffer32 | undefined;
|
|
52
53
|
private l1Timestamp: bigint | undefined;
|
|
53
54
|
|
|
54
55
|
private readonly updater: ArchiverDataStoreUpdater;
|
|
@@ -64,7 +65,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
64
65
|
'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress'
|
|
65
66
|
> & { slashingProposerAddress: EthAddress },
|
|
66
67
|
private readonly store: KVArchiverDataStore,
|
|
67
|
-
private
|
|
68
|
+
private config: {
|
|
68
69
|
batchSize: number;
|
|
69
70
|
skipValidateCheckpointAttestations?: boolean;
|
|
70
71
|
maxAllowedEthClientDriftSeconds: number;
|
|
@@ -73,7 +74,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
73
74
|
private readonly epochCache: EpochCache,
|
|
74
75
|
private readonly dateProvider: DateProvider,
|
|
75
76
|
private readonly instrumentation: ArchiverInstrumentation,
|
|
76
|
-
private readonly
|
|
77
|
+
private readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
|
|
77
78
|
private readonly events: ArchiverEmitter,
|
|
78
79
|
tracer: Tracer,
|
|
79
80
|
private readonly log: Logger = createLogger('archiver:l1-sync'),
|
|
@@ -82,6 +83,15 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
82
83
|
this.tracer = tracer;
|
|
83
84
|
}
|
|
84
85
|
|
|
86
|
+
/** Sets new config */
|
|
87
|
+
public setConfig(newConfig: {
|
|
88
|
+
batchSize: number;
|
|
89
|
+
skipValidateCheckpointAttestations?: boolean;
|
|
90
|
+
maxAllowedEthClientDriftSeconds: number;
|
|
91
|
+
}) {
|
|
92
|
+
this.config = newConfig;
|
|
93
|
+
}
|
|
94
|
+
|
|
85
95
|
/** Returns the last L1 block number that was synced. */
|
|
86
96
|
public getL1BlockNumber(): bigint | undefined {
|
|
87
97
|
return this.l1BlockNumber;
|
|
@@ -109,27 +119,36 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
109
119
|
|
|
110
120
|
@trackSpan('Archiver.syncFromL1')
|
|
111
121
|
public async syncFromL1(initialSyncComplete: boolean): Promise<void> {
|
|
112
|
-
/**
|
|
113
|
-
* We keep track of three "pointers" to L1 blocks:
|
|
114
|
-
* 1. the last L1 block that published an L2 block
|
|
115
|
-
* 2. the last L1 block that added L1 to L2 messages
|
|
116
|
-
* 3. the last L1 block that cancelled L1 to L2 messages
|
|
117
|
-
*
|
|
118
|
-
* We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
|
|
119
|
-
* We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
|
|
120
|
-
* The archiver will stay back, until there's data on L1 that will move the pointers forward.
|
|
121
|
-
*/
|
|
122
|
-
const { l1StartBlock, l1StartBlockHash } = this.l1constants;
|
|
123
|
-
const {
|
|
124
|
-
blocksSynchedTo = l1StartBlock,
|
|
125
|
-
messagesSynchedTo = { l1BlockNumber: l1StartBlock, l1BlockHash: l1StartBlockHash },
|
|
126
|
-
} = await this.store.getSynchPoint();
|
|
127
|
-
|
|
128
122
|
const currentL1Block = await this.publicClient.getBlock({ includeTransactions: false });
|
|
129
123
|
const currentL1BlockNumber = currentL1Block.number;
|
|
130
124
|
const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
|
|
125
|
+
const currentL1Timestamp = currentL1Block.timestamp;
|
|
131
126
|
|
|
132
|
-
this.
|
|
127
|
+
if (this.l1BlockHash && currentL1BlockHash.equals(this.l1BlockHash)) {
|
|
128
|
+
this.log.trace(`No new L1 blocks since last sync at L1 block ${this.l1BlockNumber}`);
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Warn if the latest L1 block timestamp is too old
|
|
133
|
+
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
134
|
+
const now = this.dateProvider.nowInSeconds();
|
|
135
|
+
if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
|
|
136
|
+
this.log.warn(
|
|
137
|
+
`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`,
|
|
138
|
+
{ currentL1BlockNumber, currentL1Timestamp, now, maxAllowedDelay },
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Load sync point for blocks and messages defaulting to start block
|
|
143
|
+
const {
|
|
144
|
+
blocksSynchedTo = this.l1Constants.l1StartBlock,
|
|
145
|
+
messagesSynchedTo = {
|
|
146
|
+
l1BlockNumber: this.l1Constants.l1StartBlock,
|
|
147
|
+
l1BlockHash: this.l1Constants.l1StartBlockHash,
|
|
148
|
+
},
|
|
149
|
+
} = await this.store.getSynchPoint();
|
|
150
|
+
|
|
151
|
+
this.log.debug(`Starting new archiver sync iteration`, {
|
|
133
152
|
blocksSynchedTo,
|
|
134
153
|
messagesSynchedTo,
|
|
135
154
|
currentL1BlockNumber,
|
|
@@ -156,29 +175,17 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
156
175
|
*/
|
|
157
176
|
|
|
158
177
|
// ********** Events that are processed per L1 block **********
|
|
159
|
-
await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber
|
|
160
|
-
|
|
161
|
-
// Get L1 timestamp for the current block
|
|
162
|
-
const currentL1Timestamp =
|
|
163
|
-
!this.l1Timestamp || !this.l1BlockNumber || this.l1BlockNumber !== currentL1BlockNumber
|
|
164
|
-
? (await this.publicClient.getBlock({ blockNumber: currentL1BlockNumber })).timestamp
|
|
165
|
-
: this.l1Timestamp;
|
|
166
|
-
|
|
167
|
-
// Warn if the latest L1 block timestamp is too old
|
|
168
|
-
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
169
|
-
const now = this.dateProvider.nowInSeconds();
|
|
170
|
-
if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
|
|
171
|
-
this.log.warn(
|
|
172
|
-
`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`,
|
|
173
|
-
{ currentL1BlockNumber, currentL1Timestamp, now, maxAllowedDelay },
|
|
174
|
-
);
|
|
175
|
-
}
|
|
178
|
+
await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber);
|
|
176
179
|
|
|
177
180
|
// ********** Events that are processed per checkpoint **********
|
|
178
181
|
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
179
182
|
// First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
|
|
180
183
|
// pending chain validation status, proven checkpoint number, and synched L1 block number.
|
|
181
184
|
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber, initialSyncComplete);
|
|
185
|
+
|
|
186
|
+
// Then we try pruning uncheckpointed blocks if a new slot was mined without checkpoints
|
|
187
|
+
await this.pruneUncheckpointedBlocks(currentL1Timestamp);
|
|
188
|
+
|
|
182
189
|
// Then we prune the current epoch if it'd reorg on next submission.
|
|
183
190
|
// Note that we don't do this before retrieving checkpoints because we may need to retrieve
|
|
184
191
|
// checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
|
|
@@ -213,18 +220,59 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
213
220
|
// but the corresponding blocks have not been processed (see #12631).
|
|
214
221
|
this.l1Timestamp = currentL1Timestamp;
|
|
215
222
|
this.l1BlockNumber = currentL1BlockNumber;
|
|
223
|
+
this.l1BlockHash = currentL1BlockHash;
|
|
216
224
|
|
|
217
225
|
const l1BlockNumberAtEnd = await this.publicClient.getBlockNumber();
|
|
218
|
-
this.log.
|
|
226
|
+
this.log.debug(`Archiver sync iteration complete`, {
|
|
219
227
|
l1BlockNumberAtStart: currentL1BlockNumber,
|
|
220
228
|
l1TimestampAtStart: currentL1Timestamp,
|
|
221
229
|
l1BlockNumberAtEnd,
|
|
222
230
|
});
|
|
223
231
|
}
|
|
224
232
|
|
|
233
|
+
/** Prune all proposed local blocks that should have been checkpointed by now. */
|
|
234
|
+
private async pruneUncheckpointedBlocks(currentL1Timestamp: bigint) {
|
|
235
|
+
const [lastCheckpointedBlockNumber, lastProposedBlockNumber] = await Promise.all([
|
|
236
|
+
this.store.getCheckpointedL2BlockNumber(),
|
|
237
|
+
this.store.getLatestBlockNumber(),
|
|
238
|
+
]);
|
|
239
|
+
|
|
240
|
+
// If there are no uncheckpointed blocks, we got nothing to do
|
|
241
|
+
if (lastProposedBlockNumber === lastCheckpointedBlockNumber) {
|
|
242
|
+
this.log.trace(`No uncheckpointed blocks to prune.`);
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// What's the slot of the first uncheckpointed block?
|
|
247
|
+
const firstUncheckpointedBlockNumber = BlockNumber(lastCheckpointedBlockNumber + 1);
|
|
248
|
+
const [firstUncheckpointedBlockHeader] = await this.store.getBlockHeaders(firstUncheckpointedBlockNumber, 1);
|
|
249
|
+
const firstUncheckpointedBlockSlot = firstUncheckpointedBlockHeader?.getSlot();
|
|
250
|
+
|
|
251
|
+
// What's the slot at the next L1 block? All blocks for slots strictly before this one should've been checkpointed by now.
|
|
252
|
+
const nextL1BlockTimestamp = currentL1Timestamp + BigInt(this.l1Constants.ethereumSlotDuration);
|
|
253
|
+
const slotAtNextL1Block = getSlotAtTimestamp(nextL1BlockTimestamp, this.l1Constants);
|
|
254
|
+
|
|
255
|
+
// Prune provisional blocks from slots that have ended without being checkpointed
|
|
256
|
+
if (firstUncheckpointedBlockSlot !== undefined && firstUncheckpointedBlockSlot < slotAtNextL1Block) {
|
|
257
|
+
this.log.warn(
|
|
258
|
+
`Pruning blocks after block ${lastCheckpointedBlockNumber} due to slot ${firstUncheckpointedBlockSlot} not being checkpointed`,
|
|
259
|
+
{ firstUncheckpointedBlockHeader: firstUncheckpointedBlockHeader.toInspect(), slotAtNextL1Block },
|
|
260
|
+
);
|
|
261
|
+
const prunedBlocks = await this.updater.removeBlocksAfter(lastCheckpointedBlockNumber);
|
|
262
|
+
|
|
263
|
+
if (prunedBlocks.length > 0) {
|
|
264
|
+
this.events.emit(L2BlockSourceEvents.L2PruneUncheckpointed, {
|
|
265
|
+
type: L2BlockSourceEvents.L2PruneUncheckpointed,
|
|
266
|
+
slotNumber: firstUncheckpointedBlockSlot,
|
|
267
|
+
blocks: prunedBlocks,
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
225
273
|
/** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */
|
|
226
274
|
private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint): Promise<boolean> {
|
|
227
|
-
const time = (currentL1Timestamp ?? 0n) + BigInt(this.
|
|
275
|
+
const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1Constants.ethereumSlotDuration);
|
|
228
276
|
const result = await this.rollup.canPruneAtTime(time, { blockNumber: currentL1BlockNumber });
|
|
229
277
|
if (result) {
|
|
230
278
|
this.log.debug(`Rollup contract allows pruning at L1 block ${currentL1BlockNumber} time ${time}`, {
|
|
@@ -257,7 +305,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
257
305
|
}
|
|
258
306
|
|
|
259
307
|
const pruneFromSlotNumber = header.slotNumber;
|
|
260
|
-
const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.
|
|
308
|
+
const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1Constants);
|
|
261
309
|
|
|
262
310
|
const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
|
|
263
311
|
|
|
@@ -274,8 +322,8 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
274
322
|
const newBlocks = blockPromises.filter(isDefined).flat();
|
|
275
323
|
|
|
276
324
|
// Emit an event for listening services to react to the chain prune
|
|
277
|
-
this.events.emit(L2BlockSourceEvents.
|
|
278
|
-
type: L2BlockSourceEvents.
|
|
325
|
+
this.events.emit(L2BlockSourceEvents.L2PruneUnproven, {
|
|
326
|
+
type: L2BlockSourceEvents.L2PruneUnproven,
|
|
279
327
|
epochNumber: pruneFromEpochNumber,
|
|
280
328
|
blocks: newBlocks,
|
|
281
329
|
});
|
|
@@ -283,7 +331,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
283
331
|
this.log.debug(
|
|
284
332
|
`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`,
|
|
285
333
|
);
|
|
286
|
-
await this.updater.
|
|
334
|
+
await this.updater.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
287
335
|
this.log.warn(
|
|
288
336
|
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
289
337
|
`to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
|
|
@@ -299,7 +347,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
299
347
|
}
|
|
300
348
|
|
|
301
349
|
private nextRange(end: bigint, limit: bigint): [bigint, bigint] {
|
|
302
|
-
const batchSize = (this.config.batchSize * this.
|
|
350
|
+
const batchSize = (this.config.batchSize * this.l1Constants.slotDuration) / this.l1Constants.ethereumSlotDuration;
|
|
303
351
|
const nextStart = end + 1n;
|
|
304
352
|
const nextEnd = nextStart + BigInt(batchSize);
|
|
305
353
|
if (nextEnd > limit) {
|
|
@@ -309,11 +357,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
309
357
|
}
|
|
310
358
|
|
|
311
359
|
@trackSpan('Archiver.handleL1ToL2Messages')
|
|
312
|
-
private async handleL1ToL2Messages(
|
|
313
|
-
messagesSyncPoint: L1BlockId,
|
|
314
|
-
currentL1BlockNumber: bigint,
|
|
315
|
-
_currentL1BlockHash: Buffer32,
|
|
316
|
-
): Promise<void> {
|
|
360
|
+
private async handleL1ToL2Messages(messagesSyncPoint: L1BlockId, currentL1BlockNumber: bigint): Promise<void> {
|
|
317
361
|
this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`);
|
|
318
362
|
if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) {
|
|
319
363
|
return;
|
|
@@ -370,11 +414,8 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
370
414
|
|
|
371
415
|
do {
|
|
372
416
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
373
|
-
this.log.trace(`Retrieving L1 to L2 messages
|
|
417
|
+
this.log.trace(`Retrieving L1 to L2 messages in L1 blocks ${searchStartBlock}-${searchEndBlock}`);
|
|
374
418
|
const messages = await retrieveL1ToL2Messages(this.inbox, searchStartBlock, searchEndBlock);
|
|
375
|
-
this.log.verbose(
|
|
376
|
-
`Retrieved ${messages.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`,
|
|
377
|
-
);
|
|
378
419
|
const timer = new Timer();
|
|
379
420
|
await this.store.addL1ToL2Messages(messages);
|
|
380
421
|
const perMsg = timer.ms() / messages.length;
|
|
@@ -406,7 +447,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
406
447
|
private async retrieveL1ToL2Message(leaf: Fr): Promise<InboxMessage | undefined> {
|
|
407
448
|
const currentL1BlockNumber = await this.publicClient.getBlockNumber();
|
|
408
449
|
let searchStartBlock: bigint = 0n;
|
|
409
|
-
let searchEndBlock: bigint = this.
|
|
450
|
+
let searchEndBlock: bigint = this.l1Constants.l1StartBlock - 1n;
|
|
410
451
|
|
|
411
452
|
do {
|
|
412
453
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
@@ -455,7 +496,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
455
496
|
// Update the syncpoint so the loop below reprocesses the changed messages. We go to the block before
|
|
456
497
|
// the last common one, so we force reprocessing it, in case new messages were added on that same L1 block
|
|
457
498
|
// after the last common message.
|
|
458
|
-
const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.
|
|
499
|
+
const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.l1Constants.l1StartBlock;
|
|
459
500
|
const syncPointL1BlockHash = await this.getL1BlockHash(syncPointL1BlockNumber);
|
|
460
501
|
messagesSyncPoint = { l1BlockNumber: syncPointL1BlockNumber, l1BlockHash: syncPointL1BlockHash };
|
|
461
502
|
await this.store.setMessageSynchedL1Block(messagesSyncPoint);
|
|
@@ -545,7 +586,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
545
586
|
await this.store.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
546
587
|
this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
547
588
|
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
|
|
548
|
-
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.
|
|
589
|
+
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1Constants);
|
|
549
590
|
const lastBlockNumberInCheckpoint =
|
|
550
591
|
localCheckpointForDestinationProvenCheckpointNumber.startBlock +
|
|
551
592
|
localCheckpointForDestinationProvenCheckpointNumber.numBlocks -
|
|
@@ -635,7 +676,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
635
676
|
}
|
|
636
677
|
|
|
637
678
|
const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
|
|
638
|
-
await this.updater.
|
|
679
|
+
await this.updater.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
639
680
|
|
|
640
681
|
this.log.warn(
|
|
641
682
|
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
@@ -695,7 +736,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
695
736
|
for (const published of publishedCheckpoints) {
|
|
696
737
|
const validationResult = this.config.skipValidateCheckpointAttestations
|
|
697
738
|
? { valid: true as const }
|
|
698
|
-
: await validateCheckpointAttestations(published, this.epochCache, this.
|
|
739
|
+
: await validateCheckpointAttestations(published, this.epochCache, this.l1Constants, this.log);
|
|
699
740
|
|
|
700
741
|
// Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
|
|
701
742
|
// in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
|
|
@@ -764,22 +805,42 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
764
805
|
try {
|
|
765
806
|
const updatedValidationResult =
|
|
766
807
|
rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
767
|
-
const [processDuration] = await elapsed(() =>
|
|
768
|
-
execInSpan(this.tracer, 'Archiver.
|
|
769
|
-
this.updater.
|
|
808
|
+
const [processDuration, result] = await elapsed(() =>
|
|
809
|
+
execInSpan(this.tracer, 'Archiver.setCheckpointData', () =>
|
|
810
|
+
this.updater.setNewCheckpointData(validCheckpoints, updatedValidationResult),
|
|
770
811
|
),
|
|
771
812
|
);
|
|
772
813
|
this.instrumentation.processNewBlocks(
|
|
773
814
|
processDuration / validCheckpoints.length,
|
|
774
815
|
validCheckpoints.flatMap(c => c.checkpoint.blocks),
|
|
775
816
|
);
|
|
817
|
+
|
|
818
|
+
// If blocks were pruned due to conflict with L1 checkpoints, emit event
|
|
819
|
+
if (result.prunedBlocks && result.prunedBlocks.length > 0) {
|
|
820
|
+
const prunedCheckpointNumber = result.prunedBlocks[0].checkpointNumber;
|
|
821
|
+
const prunedSlotNumber = result.prunedBlocks[0].header.globalVariables.slotNumber;
|
|
822
|
+
|
|
823
|
+
this.log.warn(
|
|
824
|
+
`Pruned ${result.prunedBlocks.length} mismatching blocks for checkpoint ${prunedCheckpointNumber}`,
|
|
825
|
+
{ prunedBlocks: result.prunedBlocks.map(b => b.toBlockInfo()), prunedSlotNumber, prunedCheckpointNumber },
|
|
826
|
+
);
|
|
827
|
+
|
|
828
|
+
// Emit event for listening services to react to the prune.
|
|
829
|
+
// Note: slotNumber comes from the first pruned block. If pruned blocks theoretically spanned multiple slots,
|
|
830
|
+
// only one slot number would be reported (though in practice all blocks in a checkpoint span a single slot).
|
|
831
|
+
this.events.emit(L2BlockSourceEvents.L2PruneUncheckpointed, {
|
|
832
|
+
type: L2BlockSourceEvents.L2PruneUncheckpointed,
|
|
833
|
+
slotNumber: prunedSlotNumber,
|
|
834
|
+
blocks: result.prunedBlocks,
|
|
835
|
+
});
|
|
836
|
+
}
|
|
776
837
|
} catch (err) {
|
|
777
838
|
if (err instanceof InitialCheckpointNumberNotSequentialError) {
|
|
778
839
|
const { previousCheckpointNumber, newCheckpointNumber } = err;
|
|
779
840
|
const previousCheckpoint = previousCheckpointNumber
|
|
780
841
|
? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber))
|
|
781
842
|
: undefined;
|
|
782
|
-
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.
|
|
843
|
+
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1Constants.l1StartBlock;
|
|
783
844
|
await this.store.setCheckpointSynchedL1BlockNumber(updatedL1SyncPoint);
|
|
784
845
|
this.log.warn(
|
|
785
846
|
`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`,
|