@aztec/archiver 4.0.0-nightly.20260116 → 4.0.0-nightly.20260118
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -2
- package/dest/archiver.d.ts +3 -2
- package/dest/archiver.d.ts.map +1 -1
- package/dest/archiver.js +7 -6
- package/dest/modules/data_source_base.d.ts +2 -1
- package/dest/modules/data_source_base.d.ts.map +1 -1
- package/dest/modules/data_source_base.js +3 -0
- package/dest/modules/data_store_updater.d.ts +29 -6
- package/dest/modules/data_store_updater.d.ts.map +1 -1
- package/dest/modules/data_store_updater.js +117 -29
- package/dest/modules/l1_synchronizer.d.ts +5 -3
- package/dest/modules/l1_synchronizer.d.ts.map +1 -1
- package/dest/modules/l1_synchronizer.js +98 -52
- package/dest/store/block_store.d.ts +16 -2
- package/dest/store/block_store.d.ts.map +1 -1
- package/dest/store/block_store.js +62 -8
- package/dest/store/kv_archiver_store.d.ts +14 -2
- package/dest/store/kv_archiver_store.d.ts.map +1 -1
- package/dest/store/kv_archiver_store.js +14 -0
- package/dest/store/log_store.d.ts +1 -1
- package/dest/store/log_store.d.ts.map +1 -1
- package/dest/store/log_store.js +69 -48
- package/dest/test/fake_l1_state.d.ts +18 -1
- package/dest/test/fake_l1_state.d.ts.map +1 -1
- package/dest/test/fake_l1_state.js +36 -17
- package/dest/test/mock_l2_block_source.d.ts +2 -1
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +4 -0
- package/package.json +13 -13
- package/src/archiver.ts +8 -6
- package/src/modules/data_source_base.ts +4 -0
- package/src/modules/data_store_updater.ts +143 -42
- package/src/modules/l1_synchronizer.ts +113 -61
- package/src/store/block_store.ts +79 -10
- package/src/store/kv_archiver_store.ts +19 -1
- package/src/store/log_store.ts +112 -76
- package/src/test/fake_l1_state.ts +62 -24
- package/src/test/mock_l2_block_source.ts +5 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { BlockNumber, type CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
2
2
|
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
3
|
import { createLogger } from '@aztec/foundation/log';
|
|
4
4
|
import {
|
|
@@ -32,6 +32,14 @@ enum Operation {
|
|
|
32
32
|
Delete,
|
|
33
33
|
}
|
|
34
34
|
|
|
35
|
+
/** Result of adding checkpoints with information about any pruned blocks. */
|
|
36
|
+
type ReconcileCheckpointsResult = {
|
|
37
|
+
/** Blocks that were pruned due to conflict with L1 checkpoints. */
|
|
38
|
+
prunedBlocks: L2BlockNew[] | undefined;
|
|
39
|
+
/** Last block number that was already inserted locally, or undefined if none. */
|
|
40
|
+
lastAlreadyInsertedBlockNumber: BlockNumber | undefined;
|
|
41
|
+
};
|
|
42
|
+
|
|
35
43
|
/** Archiver helper module to handle updates to the data store. */
|
|
36
44
|
export class ArchiverDataStoreUpdater {
|
|
37
45
|
private readonly log = createLogger('archiver:store_updater');
|
|
@@ -47,10 +55,7 @@ export class ArchiverDataStoreUpdater {
|
|
|
47
55
|
* @param pendingChainValidationStatus - Optional validation status to set.
|
|
48
56
|
* @returns True if the operation is successful.
|
|
49
57
|
*/
|
|
50
|
-
public
|
|
51
|
-
blocks: L2BlockNew[],
|
|
52
|
-
pendingChainValidationStatus?: ValidateCheckpointResult,
|
|
53
|
-
): Promise<boolean> {
|
|
58
|
+
public addBlocks(blocks: L2BlockNew[], pendingChainValidationStatus?: ValidateCheckpointResult): Promise<boolean> {
|
|
54
59
|
return this.store.transactionAsync(async () => {
|
|
55
60
|
await this.store.addBlocks(blocks);
|
|
56
61
|
|
|
@@ -68,32 +73,136 @@ export class ArchiverDataStoreUpdater {
|
|
|
68
73
|
}
|
|
69
74
|
|
|
70
75
|
/**
|
|
76
|
+
* Reconciles local blocks with incoming checkpoints from L1.
|
|
71
77
|
* Adds checkpoints to the store with contract class/instance extraction from logs.
|
|
78
|
+
* Prunes any local blocks that conflict with checkpoint data (by comparing archive roots).
|
|
72
79
|
* Extracts ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated events,
|
|
73
80
|
* and individually broadcasted functions from the checkpoint block logs.
|
|
74
81
|
*
|
|
75
82
|
* @param checkpoints - The published checkpoints to add.
|
|
76
83
|
* @param pendingChainValidationStatus - Optional validation status to set.
|
|
77
|
-
* @returns
|
|
84
|
+
* @returns Result with information about any pruned blocks.
|
|
78
85
|
*/
|
|
79
|
-
public
|
|
86
|
+
public setNewCheckpointData(
|
|
80
87
|
checkpoints: PublishedCheckpoint[],
|
|
81
88
|
pendingChainValidationStatus?: ValidateCheckpointResult,
|
|
82
|
-
): Promise<
|
|
89
|
+
): Promise<ReconcileCheckpointsResult> {
|
|
83
90
|
return this.store.transactionAsync(async () => {
|
|
91
|
+
// Before adding checkpoints, check for conflicts with local blocks if any
|
|
92
|
+
const { prunedBlocks, lastAlreadyInsertedBlockNumber } = await this.pruneMismatchingLocalBlocks(checkpoints);
|
|
93
|
+
|
|
84
94
|
await this.store.addCheckpoints(checkpoints);
|
|
85
|
-
const allBlocks = checkpoints.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks);
|
|
86
95
|
|
|
87
|
-
|
|
96
|
+
// Filter out blocks that were already inserted via addBlocks() to avoid duplicating logs/contract data
|
|
97
|
+
const newBlocks = checkpoints
|
|
98
|
+
.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks)
|
|
99
|
+
.filter(b => lastAlreadyInsertedBlockNumber === undefined || b.number > lastAlreadyInsertedBlockNumber);
|
|
100
|
+
|
|
101
|
+
await Promise.all([
|
|
88
102
|
// Update the pending chain validation status if provided
|
|
89
103
|
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
90
104
|
// Add any logs emitted during the retrieved blocks
|
|
91
|
-
this.store.addLogs(
|
|
105
|
+
this.store.addLogs(newBlocks),
|
|
92
106
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
93
|
-
...
|
|
107
|
+
...newBlocks.map(block => this.addBlockDataToDB(block)),
|
|
94
108
|
]);
|
|
95
109
|
|
|
96
|
-
return
|
|
110
|
+
return { prunedBlocks, lastAlreadyInsertedBlockNumber };
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Checks for local proposed blocks that do not match the ones to be checkpointed and prunes them.
|
|
116
|
+
* This method handles multiple checkpoints but returns after pruning the first conflict found.
|
|
117
|
+
* This is correct because pruning from the first conflict point removes all subsequent blocks,
|
|
118
|
+
* and when checkpoints are added afterward, they include all the correct blocks.
|
|
119
|
+
*/
|
|
120
|
+
private async pruneMismatchingLocalBlocks(checkpoints: PublishedCheckpoint[]): Promise<ReconcileCheckpointsResult> {
|
|
121
|
+
const [lastCheckpointedBlockNumber, lastBlockNumber] = await Promise.all([
|
|
122
|
+
this.store.getCheckpointedL2BlockNumber(),
|
|
123
|
+
this.store.getLatestBlockNumber(),
|
|
124
|
+
]);
|
|
125
|
+
|
|
126
|
+
// Exit early if there are no local uncheckpointed blocks
|
|
127
|
+
if (lastBlockNumber === lastCheckpointedBlockNumber) {
|
|
128
|
+
return { prunedBlocks: undefined, lastAlreadyInsertedBlockNumber: undefined };
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Get all uncheckpointed local blocks
|
|
132
|
+
const uncheckpointedLocalBlocks = await this.store.getBlocks(
|
|
133
|
+
BlockNumber.add(lastCheckpointedBlockNumber, 1),
|
|
134
|
+
lastBlockNumber - lastCheckpointedBlockNumber,
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
let lastAlreadyInsertedBlockNumber: BlockNumber | undefined;
|
|
138
|
+
|
|
139
|
+
for (const publishedCheckpoint of checkpoints) {
|
|
140
|
+
const checkpointBlocks = publishedCheckpoint.checkpoint.blocks;
|
|
141
|
+
const slot = publishedCheckpoint.checkpoint.slot;
|
|
142
|
+
const localBlocksInSlot = uncheckpointedLocalBlocks.filter(b => b.slot === slot);
|
|
143
|
+
|
|
144
|
+
if (checkpointBlocks.length === 0) {
|
|
145
|
+
this.log.warn(`Checkpoint ${publishedCheckpoint.checkpoint.number} for slot ${slot} has no blocks`);
|
|
146
|
+
continue;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// Find the first checkpoint block that conflicts with an existing local block and prune local afterwards
|
|
150
|
+
for (const checkpointBlock of checkpointBlocks) {
|
|
151
|
+
const blockNumber = checkpointBlock.number;
|
|
152
|
+
const existingBlock = localBlocksInSlot.find(b => b.number === blockNumber);
|
|
153
|
+
const blockInfos = {
|
|
154
|
+
existingBlock: existingBlock?.toBlockInfo(),
|
|
155
|
+
checkpointBlock: checkpointBlock.toBlockInfo(),
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
if (!existingBlock) {
|
|
159
|
+
this.log.verbose(`No local block found for checkpointed block number ${blockNumber}`, blockInfos);
|
|
160
|
+
} else if (existingBlock.archive.root.equals(checkpointBlock.archive.root)) {
|
|
161
|
+
this.log.verbose(`Block number ${blockNumber} already inserted and matches checkpoint`, blockInfos);
|
|
162
|
+
lastAlreadyInsertedBlockNumber = blockNumber;
|
|
163
|
+
} else {
|
|
164
|
+
this.log.warn(`Conflict detected at block ${blockNumber} between checkpointed and local block`, blockInfos);
|
|
165
|
+
const prunedBlocks = await this.removeBlocksAfter(BlockNumber(blockNumber - 1));
|
|
166
|
+
return { prunedBlocks, lastAlreadyInsertedBlockNumber };
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// If local has more blocks than the checkpoint (e.g., local has [2,3,4] but checkpoint has [2,3]),
|
|
171
|
+
// we need to prune the extra local blocks so they match what was checkpointed
|
|
172
|
+
const lastCheckpointBlockNumber = checkpointBlocks.at(-1)!.number;
|
|
173
|
+
const lastLocalBlockNumber = localBlocksInSlot.at(-1)?.number;
|
|
174
|
+
|
|
175
|
+
if (lastLocalBlockNumber !== undefined && lastLocalBlockNumber > lastCheckpointBlockNumber) {
|
|
176
|
+
this.log.warn(
|
|
177
|
+
`Local chain for slot ${slot} ends at block ${lastLocalBlockNumber} but checkpoint ends at ${lastCheckpointBlockNumber}. Pruning blocks after block ${lastCheckpointBlockNumber}.`,
|
|
178
|
+
);
|
|
179
|
+
const prunedBlocks = await this.removeBlocksAfter(lastCheckpointBlockNumber);
|
|
180
|
+
return { prunedBlocks, lastAlreadyInsertedBlockNumber };
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
return { prunedBlocks: undefined, lastAlreadyInsertedBlockNumber };
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Removes all blocks strictly after the specified block number and cleans up associated contract data.
|
|
189
|
+
* This handles removal of provisionally added blocks along with their contract classes/instances.
|
|
190
|
+
*
|
|
191
|
+
* @param blockNumber - Remove all blocks with number greater than this.
|
|
192
|
+
* @returns The removed blocks.
|
|
193
|
+
*/
|
|
194
|
+
public removeBlocksAfter(blockNumber: BlockNumber): Promise<L2BlockNew[]> {
|
|
195
|
+
return this.store.transactionAsync(async () => {
|
|
196
|
+
// First get the blocks to be removed so we can clean up contract data
|
|
197
|
+
const removedBlocks = await this.store.removeBlocksAfter(blockNumber);
|
|
198
|
+
|
|
199
|
+
// Clean up contract data and logs for the removed blocks
|
|
200
|
+
await Promise.all([
|
|
201
|
+
this.store.deleteLogs(removedBlocks),
|
|
202
|
+
...removedBlocks.map(block => this.removeBlockDataFromDB(block)),
|
|
203
|
+
]);
|
|
204
|
+
|
|
205
|
+
return removedBlocks;
|
|
97
206
|
});
|
|
98
207
|
}
|
|
99
208
|
|
|
@@ -106,10 +215,7 @@ export class ArchiverDataStoreUpdater {
|
|
|
106
215
|
* @param checkpointsToUnwind - The number of checkpoints to unwind.
|
|
107
216
|
* @returns True if the operation is successful.
|
|
108
217
|
*/
|
|
109
|
-
public async
|
|
110
|
-
from: CheckpointNumber,
|
|
111
|
-
checkpointsToUnwind: number,
|
|
112
|
-
): Promise<boolean> {
|
|
218
|
+
public async unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
|
|
113
219
|
if (checkpointsToUnwind <= 0) {
|
|
114
220
|
throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
|
|
115
221
|
}
|
|
@@ -132,22 +238,8 @@ export class ArchiverDataStoreUpdater {
|
|
|
132
238
|
const opResults = await Promise.all([
|
|
133
239
|
// Prune rolls back to the last proven block, which is by definition valid
|
|
134
240
|
this.store.setPendingChainValidationStatus({ valid: true }),
|
|
135
|
-
//
|
|
136
|
-
...blocks.map(
|
|
137
|
-
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
138
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
139
|
-
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
140
|
-
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
141
|
-
|
|
142
|
-
return (
|
|
143
|
-
await Promise.all([
|
|
144
|
-
this.updatePublishedContractClasses(contractClassLogs, block.number, Operation.Delete),
|
|
145
|
-
this.updateDeployedContractInstances(privateLogs, block.number, Operation.Delete),
|
|
146
|
-
this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Delete),
|
|
147
|
-
])
|
|
148
|
-
).every(Boolean);
|
|
149
|
-
}),
|
|
150
|
-
|
|
241
|
+
// Remove contract data for all blocks being unwound
|
|
242
|
+
...blocks.map(block => this.removeBlockDataFromDB(block)),
|
|
151
243
|
this.store.deleteLogs(blocks),
|
|
152
244
|
this.store.unwindCheckpoints(from, checkpointsToUnwind),
|
|
153
245
|
]);
|
|
@@ -155,21 +247,30 @@ export class ArchiverDataStoreUpdater {
|
|
|
155
247
|
return opResults.every(Boolean);
|
|
156
248
|
}
|
|
157
249
|
|
|
158
|
-
/**
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
250
|
+
/** Extracts and stores contract data from a single block. */
|
|
251
|
+
private addBlockDataToDB(block: L2BlockNew): Promise<boolean> {
|
|
252
|
+
return this.editContractBlockData(block, Operation.Store);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
/** Removes contract data associated with a block. */
|
|
256
|
+
private removeBlockDataFromDB(block: L2BlockNew): Promise<boolean> {
|
|
257
|
+
return this.editContractBlockData(block, Operation.Delete);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
/** Adds or remove contract data associated with a block. */
|
|
261
|
+
private async editContractBlockData(block: L2BlockNew, operation: Operation): Promise<boolean> {
|
|
162
262
|
const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
|
|
163
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
164
263
|
const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
|
|
165
264
|
const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
|
|
166
265
|
|
|
167
266
|
return (
|
|
168
267
|
await Promise.all([
|
|
169
|
-
this.updatePublishedContractClasses(contractClassLogs, block.number,
|
|
170
|
-
this.updateDeployedContractInstances(privateLogs, block.number,
|
|
171
|
-
this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp,
|
|
172
|
-
|
|
268
|
+
this.updatePublishedContractClasses(contractClassLogs, block.number, operation),
|
|
269
|
+
this.updateDeployedContractInstances(privateLogs, block.number, operation),
|
|
270
|
+
this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, operation),
|
|
271
|
+
operation === Operation.Store
|
|
272
|
+
? this.storeBroadcastedIndividualFunctions(contractClassLogs, block.number)
|
|
273
|
+
: Promise.resolve(true),
|
|
173
274
|
])
|
|
174
275
|
).every(Boolean);
|
|
175
276
|
}
|
|
@@ -16,7 +16,7 @@ import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
|
|
|
16
16
|
import { isDefined } from '@aztec/foundation/types';
|
|
17
17
|
import { type ArchiverEmitter, L2BlockSourceEvents, type ValidateCheckpointResult } from '@aztec/stdlib/block';
|
|
18
18
|
import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
19
|
-
import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
|
|
19
|
+
import { type L1RollupConstants, getEpochAtSlot, getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers';
|
|
20
20
|
import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
21
21
|
import { type Traceable, type Tracer, execInSpan, trackSpan } from '@aztec/telemetry-client';
|
|
22
22
|
|
|
@@ -49,6 +49,7 @@ type RollupStatus = {
|
|
|
49
49
|
*/
|
|
50
50
|
export class ArchiverL1Synchronizer implements Traceable {
|
|
51
51
|
private l1BlockNumber: bigint | undefined;
|
|
52
|
+
private l1BlockHash: Buffer32 | undefined;
|
|
52
53
|
private l1Timestamp: bigint | undefined;
|
|
53
54
|
|
|
54
55
|
private readonly updater: ArchiverDataStoreUpdater;
|
|
@@ -73,7 +74,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
73
74
|
private readonly epochCache: EpochCache,
|
|
74
75
|
private readonly dateProvider: DateProvider,
|
|
75
76
|
private readonly instrumentation: ArchiverInstrumentation,
|
|
76
|
-
private readonly
|
|
77
|
+
private readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
|
|
77
78
|
private readonly events: ArchiverEmitter,
|
|
78
79
|
tracer: Tracer,
|
|
79
80
|
private readonly log: Logger = createLogger('archiver:l1-sync'),
|
|
@@ -118,27 +119,36 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
118
119
|
|
|
119
120
|
@trackSpan('Archiver.syncFromL1')
|
|
120
121
|
public async syncFromL1(initialSyncComplete: boolean): Promise<void> {
|
|
121
|
-
/**
|
|
122
|
-
* We keep track of three "pointers" to L1 blocks:
|
|
123
|
-
* 1. the last L1 block that published an L2 block
|
|
124
|
-
* 2. the last L1 block that added L1 to L2 messages
|
|
125
|
-
* 3. the last L1 block that cancelled L1 to L2 messages
|
|
126
|
-
*
|
|
127
|
-
* We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
|
|
128
|
-
* We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
|
|
129
|
-
* The archiver will stay back, until there's data on L1 that will move the pointers forward.
|
|
130
|
-
*/
|
|
131
|
-
const { l1StartBlock, l1StartBlockHash } = this.l1constants;
|
|
132
|
-
const {
|
|
133
|
-
blocksSynchedTo = l1StartBlock,
|
|
134
|
-
messagesSynchedTo = { l1BlockNumber: l1StartBlock, l1BlockHash: l1StartBlockHash },
|
|
135
|
-
} = await this.store.getSynchPoint();
|
|
136
|
-
|
|
137
122
|
const currentL1Block = await this.publicClient.getBlock({ includeTransactions: false });
|
|
138
123
|
const currentL1BlockNumber = currentL1Block.number;
|
|
139
124
|
const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
|
|
125
|
+
const currentL1Timestamp = currentL1Block.timestamp;
|
|
126
|
+
|
|
127
|
+
if (this.l1BlockHash && currentL1BlockHash.equals(this.l1BlockHash)) {
|
|
128
|
+
this.log.trace(`No new L1 blocks since last sync at L1 block ${this.l1BlockNumber}`);
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Warn if the latest L1 block timestamp is too old
|
|
133
|
+
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
134
|
+
const now = this.dateProvider.nowInSeconds();
|
|
135
|
+
if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
|
|
136
|
+
this.log.warn(
|
|
137
|
+
`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`,
|
|
138
|
+
{ currentL1BlockNumber, currentL1Timestamp, now, maxAllowedDelay },
|
|
139
|
+
);
|
|
140
|
+
}
|
|
140
141
|
|
|
141
|
-
|
|
142
|
+
// Load sync point for blocks and messages defaulting to start block
|
|
143
|
+
const {
|
|
144
|
+
blocksSynchedTo = this.l1Constants.l1StartBlock,
|
|
145
|
+
messagesSynchedTo = {
|
|
146
|
+
l1BlockNumber: this.l1Constants.l1StartBlock,
|
|
147
|
+
l1BlockHash: this.l1Constants.l1StartBlockHash,
|
|
148
|
+
},
|
|
149
|
+
} = await this.store.getSynchPoint();
|
|
150
|
+
|
|
151
|
+
this.log.debug(`Starting new archiver sync iteration`, {
|
|
142
152
|
blocksSynchedTo,
|
|
143
153
|
messagesSynchedTo,
|
|
144
154
|
currentL1BlockNumber,
|
|
@@ -165,29 +175,17 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
165
175
|
*/
|
|
166
176
|
|
|
167
177
|
// ********** Events that are processed per L1 block **********
|
|
168
|
-
await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber
|
|
169
|
-
|
|
170
|
-
// Get L1 timestamp for the current block
|
|
171
|
-
const currentL1Timestamp =
|
|
172
|
-
!this.l1Timestamp || !this.l1BlockNumber || this.l1BlockNumber !== currentL1BlockNumber
|
|
173
|
-
? (await this.publicClient.getBlock({ blockNumber: currentL1BlockNumber })).timestamp
|
|
174
|
-
: this.l1Timestamp;
|
|
175
|
-
|
|
176
|
-
// Warn if the latest L1 block timestamp is too old
|
|
177
|
-
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
178
|
-
const now = this.dateProvider.nowInSeconds();
|
|
179
|
-
if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
|
|
180
|
-
this.log.warn(
|
|
181
|
-
`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`,
|
|
182
|
-
{ currentL1BlockNumber, currentL1Timestamp, now, maxAllowedDelay },
|
|
183
|
-
);
|
|
184
|
-
}
|
|
178
|
+
await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber);
|
|
185
179
|
|
|
186
180
|
// ********** Events that are processed per checkpoint **********
|
|
187
181
|
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
188
182
|
// First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
|
|
189
183
|
// pending chain validation status, proven checkpoint number, and synched L1 block number.
|
|
190
184
|
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber, initialSyncComplete);
|
|
185
|
+
|
|
186
|
+
// Then we try pruning uncheckpointed blocks if a new slot was mined without checkpoints
|
|
187
|
+
await this.pruneUncheckpointedBlocks(currentL1Timestamp);
|
|
188
|
+
|
|
191
189
|
// Then we prune the current epoch if it'd reorg on next submission.
|
|
192
190
|
// Note that we don't do this before retrieving checkpoints because we may need to retrieve
|
|
193
191
|
// checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
|
|
@@ -222,18 +220,59 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
222
220
|
// but the corresponding blocks have not been processed (see #12631).
|
|
223
221
|
this.l1Timestamp = currentL1Timestamp;
|
|
224
222
|
this.l1BlockNumber = currentL1BlockNumber;
|
|
223
|
+
this.l1BlockHash = currentL1BlockHash;
|
|
225
224
|
|
|
226
225
|
const l1BlockNumberAtEnd = await this.publicClient.getBlockNumber();
|
|
227
|
-
this.log.
|
|
226
|
+
this.log.debug(`Archiver sync iteration complete`, {
|
|
228
227
|
l1BlockNumberAtStart: currentL1BlockNumber,
|
|
229
228
|
l1TimestampAtStart: currentL1Timestamp,
|
|
230
229
|
l1BlockNumberAtEnd,
|
|
231
230
|
});
|
|
232
231
|
}
|
|
233
232
|
|
|
233
|
+
/** Prune all proposed local blocks that should have been checkpointed by now. */
|
|
234
|
+
private async pruneUncheckpointedBlocks(currentL1Timestamp: bigint) {
|
|
235
|
+
const [lastCheckpointedBlockNumber, lastProposedBlockNumber] = await Promise.all([
|
|
236
|
+
this.store.getCheckpointedL2BlockNumber(),
|
|
237
|
+
this.store.getLatestBlockNumber(),
|
|
238
|
+
]);
|
|
239
|
+
|
|
240
|
+
// If there are no uncheckpointed blocks, we got nothing to do
|
|
241
|
+
if (lastProposedBlockNumber === lastCheckpointedBlockNumber) {
|
|
242
|
+
this.log.trace(`No uncheckpointed blocks to prune.`);
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// What's the slot of the first uncheckpointed block?
|
|
247
|
+
const firstUncheckpointedBlockNumber = BlockNumber(lastCheckpointedBlockNumber + 1);
|
|
248
|
+
const [firstUncheckpointedBlockHeader] = await this.store.getBlockHeaders(firstUncheckpointedBlockNumber, 1);
|
|
249
|
+
const firstUncheckpointedBlockSlot = firstUncheckpointedBlockHeader?.getSlot();
|
|
250
|
+
|
|
251
|
+
// What's the slot at the next L1 block? All blocks for slots strictly before this one should've been checkpointed by now.
|
|
252
|
+
const nextL1BlockTimestamp = currentL1Timestamp + BigInt(this.l1Constants.ethereumSlotDuration);
|
|
253
|
+
const slotAtNextL1Block = getSlotAtTimestamp(nextL1BlockTimestamp, this.l1Constants);
|
|
254
|
+
|
|
255
|
+
// Prune provisional blocks from slots that have ended without being checkpointed
|
|
256
|
+
if (firstUncheckpointedBlockSlot !== undefined && firstUncheckpointedBlockSlot < slotAtNextL1Block) {
|
|
257
|
+
this.log.warn(
|
|
258
|
+
`Pruning blocks after block ${lastCheckpointedBlockNumber} due to slot ${firstUncheckpointedBlockSlot} not being checkpointed`,
|
|
259
|
+
{ firstUncheckpointedBlockHeader: firstUncheckpointedBlockHeader.toInspect(), slotAtNextL1Block },
|
|
260
|
+
);
|
|
261
|
+
const prunedBlocks = await this.updater.removeBlocksAfter(lastCheckpointedBlockNumber);
|
|
262
|
+
|
|
263
|
+
if (prunedBlocks.length > 0) {
|
|
264
|
+
this.events.emit(L2BlockSourceEvents.L2PruneUncheckpointed, {
|
|
265
|
+
type: L2BlockSourceEvents.L2PruneUncheckpointed,
|
|
266
|
+
slotNumber: firstUncheckpointedBlockSlot,
|
|
267
|
+
blocks: prunedBlocks,
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
234
273
|
/** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */
|
|
235
274
|
private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint): Promise<boolean> {
|
|
236
|
-
const time = (currentL1Timestamp ?? 0n) + BigInt(this.
|
|
275
|
+
const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1Constants.ethereumSlotDuration);
|
|
237
276
|
const result = await this.rollup.canPruneAtTime(time, { blockNumber: currentL1BlockNumber });
|
|
238
277
|
if (result) {
|
|
239
278
|
this.log.debug(`Rollup contract allows pruning at L1 block ${currentL1BlockNumber} time ${time}`, {
|
|
@@ -266,7 +305,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
266
305
|
}
|
|
267
306
|
|
|
268
307
|
const pruneFromSlotNumber = header.slotNumber;
|
|
269
|
-
const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.
|
|
308
|
+
const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1Constants);
|
|
270
309
|
|
|
271
310
|
const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
|
|
272
311
|
|
|
@@ -283,8 +322,8 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
283
322
|
const newBlocks = blockPromises.filter(isDefined).flat();
|
|
284
323
|
|
|
285
324
|
// Emit an event for listening services to react to the chain prune
|
|
286
|
-
this.events.emit(L2BlockSourceEvents.
|
|
287
|
-
type: L2BlockSourceEvents.
|
|
325
|
+
this.events.emit(L2BlockSourceEvents.L2PruneUnproven, {
|
|
326
|
+
type: L2BlockSourceEvents.L2PruneUnproven,
|
|
288
327
|
epochNumber: pruneFromEpochNumber,
|
|
289
328
|
blocks: newBlocks,
|
|
290
329
|
});
|
|
@@ -292,7 +331,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
292
331
|
this.log.debug(
|
|
293
332
|
`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`,
|
|
294
333
|
);
|
|
295
|
-
await this.updater.
|
|
334
|
+
await this.updater.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
296
335
|
this.log.warn(
|
|
297
336
|
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
298
337
|
`to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
|
|
@@ -308,7 +347,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
308
347
|
}
|
|
309
348
|
|
|
310
349
|
private nextRange(end: bigint, limit: bigint): [bigint, bigint] {
|
|
311
|
-
const batchSize = (this.config.batchSize * this.
|
|
350
|
+
const batchSize = (this.config.batchSize * this.l1Constants.slotDuration) / this.l1Constants.ethereumSlotDuration;
|
|
312
351
|
const nextStart = end + 1n;
|
|
313
352
|
const nextEnd = nextStart + BigInt(batchSize);
|
|
314
353
|
if (nextEnd > limit) {
|
|
@@ -318,11 +357,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
318
357
|
}
|
|
319
358
|
|
|
320
359
|
@trackSpan('Archiver.handleL1ToL2Messages')
|
|
321
|
-
private async handleL1ToL2Messages(
|
|
322
|
-
messagesSyncPoint: L1BlockId,
|
|
323
|
-
currentL1BlockNumber: bigint,
|
|
324
|
-
_currentL1BlockHash: Buffer32,
|
|
325
|
-
): Promise<void> {
|
|
360
|
+
private async handleL1ToL2Messages(messagesSyncPoint: L1BlockId, currentL1BlockNumber: bigint): Promise<void> {
|
|
326
361
|
this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`);
|
|
327
362
|
if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) {
|
|
328
363
|
return;
|
|
@@ -379,11 +414,8 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
379
414
|
|
|
380
415
|
do {
|
|
381
416
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
382
|
-
this.log.trace(`Retrieving L1 to L2 messages
|
|
417
|
+
this.log.trace(`Retrieving L1 to L2 messages in L1 blocks ${searchStartBlock}-${searchEndBlock}`);
|
|
383
418
|
const messages = await retrieveL1ToL2Messages(this.inbox, searchStartBlock, searchEndBlock);
|
|
384
|
-
this.log.verbose(
|
|
385
|
-
`Retrieved ${messages.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`,
|
|
386
|
-
);
|
|
387
419
|
const timer = new Timer();
|
|
388
420
|
await this.store.addL1ToL2Messages(messages);
|
|
389
421
|
const perMsg = timer.ms() / messages.length;
|
|
@@ -415,7 +447,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
415
447
|
private async retrieveL1ToL2Message(leaf: Fr): Promise<InboxMessage | undefined> {
|
|
416
448
|
const currentL1BlockNumber = await this.publicClient.getBlockNumber();
|
|
417
449
|
let searchStartBlock: bigint = 0n;
|
|
418
|
-
let searchEndBlock: bigint = this.
|
|
450
|
+
let searchEndBlock: bigint = this.l1Constants.l1StartBlock - 1n;
|
|
419
451
|
|
|
420
452
|
do {
|
|
421
453
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
@@ -464,7 +496,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
464
496
|
// Update the syncpoint so the loop below reprocesses the changed messages. We go to the block before
|
|
465
497
|
// the last common one, so we force reprocessing it, in case new messages were added on that same L1 block
|
|
466
498
|
// after the last common message.
|
|
467
|
-
const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.
|
|
499
|
+
const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.l1Constants.l1StartBlock;
|
|
468
500
|
const syncPointL1BlockHash = await this.getL1BlockHash(syncPointL1BlockNumber);
|
|
469
501
|
messagesSyncPoint = { l1BlockNumber: syncPointL1BlockNumber, l1BlockHash: syncPointL1BlockHash };
|
|
470
502
|
await this.store.setMessageSynchedL1Block(messagesSyncPoint);
|
|
@@ -554,7 +586,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
554
586
|
await this.store.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
555
587
|
this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
|
|
556
588
|
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
|
|
557
|
-
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.
|
|
589
|
+
const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1Constants);
|
|
558
590
|
const lastBlockNumberInCheckpoint =
|
|
559
591
|
localCheckpointForDestinationProvenCheckpointNumber.startBlock +
|
|
560
592
|
localCheckpointForDestinationProvenCheckpointNumber.numBlocks -
|
|
@@ -644,7 +676,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
644
676
|
}
|
|
645
677
|
|
|
646
678
|
const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
|
|
647
|
-
await this.updater.
|
|
679
|
+
await this.updater.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
648
680
|
|
|
649
681
|
this.log.warn(
|
|
650
682
|
`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
|
|
@@ -704,7 +736,7 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
704
736
|
for (const published of publishedCheckpoints) {
|
|
705
737
|
const validationResult = this.config.skipValidateCheckpointAttestations
|
|
706
738
|
? { valid: true as const }
|
|
707
|
-
: await validateCheckpointAttestations(published, this.epochCache, this.
|
|
739
|
+
: await validateCheckpointAttestations(published, this.epochCache, this.l1Constants, this.log);
|
|
708
740
|
|
|
709
741
|
// Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
|
|
710
742
|
// in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
|
|
@@ -773,22 +805,42 @@ export class ArchiverL1Synchronizer implements Traceable {
|
|
|
773
805
|
try {
|
|
774
806
|
const updatedValidationResult =
|
|
775
807
|
rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
776
|
-
const [processDuration] = await elapsed(() =>
|
|
777
|
-
execInSpan(this.tracer, 'Archiver.
|
|
778
|
-
this.updater.
|
|
808
|
+
const [processDuration, result] = await elapsed(() =>
|
|
809
|
+
execInSpan(this.tracer, 'Archiver.setCheckpointData', () =>
|
|
810
|
+
this.updater.setNewCheckpointData(validCheckpoints, updatedValidationResult),
|
|
779
811
|
),
|
|
780
812
|
);
|
|
781
813
|
this.instrumentation.processNewBlocks(
|
|
782
814
|
processDuration / validCheckpoints.length,
|
|
783
815
|
validCheckpoints.flatMap(c => c.checkpoint.blocks),
|
|
784
816
|
);
|
|
817
|
+
|
|
818
|
+
// If blocks were pruned due to conflict with L1 checkpoints, emit event
|
|
819
|
+
if (result.prunedBlocks && result.prunedBlocks.length > 0) {
|
|
820
|
+
const prunedCheckpointNumber = result.prunedBlocks[0].checkpointNumber;
|
|
821
|
+
const prunedSlotNumber = result.prunedBlocks[0].header.globalVariables.slotNumber;
|
|
822
|
+
|
|
823
|
+
this.log.warn(
|
|
824
|
+
`Pruned ${result.prunedBlocks.length} mismatching blocks for checkpoint ${prunedCheckpointNumber}`,
|
|
825
|
+
{ prunedBlocks: result.prunedBlocks.map(b => b.toBlockInfo()), prunedSlotNumber, prunedCheckpointNumber },
|
|
826
|
+
);
|
|
827
|
+
|
|
828
|
+
// Emit event for listening services to react to the prune.
|
|
829
|
+
// Note: slotNumber comes from the first pruned block. If pruned blocks theoretically spanned multiple slots,
|
|
830
|
+
// only one slot number would be reported (though in practice all blocks in a checkpoint span a single slot).
|
|
831
|
+
this.events.emit(L2BlockSourceEvents.L2PruneUncheckpointed, {
|
|
832
|
+
type: L2BlockSourceEvents.L2PruneUncheckpointed,
|
|
833
|
+
slotNumber: prunedSlotNumber,
|
|
834
|
+
blocks: result.prunedBlocks,
|
|
835
|
+
});
|
|
836
|
+
}
|
|
785
837
|
} catch (err) {
|
|
786
838
|
if (err instanceof InitialCheckpointNumberNotSequentialError) {
|
|
787
839
|
const { previousCheckpointNumber, newCheckpointNumber } = err;
|
|
788
840
|
const previousCheckpoint = previousCheckpointNumber
|
|
789
841
|
? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber))
|
|
790
842
|
: undefined;
|
|
791
|
-
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.
|
|
843
|
+
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1Constants.l1StartBlock;
|
|
792
844
|
await this.store.setCheckpointSynchedL1BlockNumber(updatedL1SyncPoint);
|
|
793
845
|
this.log.warn(
|
|
794
846
|
`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`,
|