@aztec/archiver 0.55.1 → 0.56.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dest/archiver/archiver.d.ts +4 -5
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +82 -110
  4. package/dest/archiver/archiver_store.d.ts +11 -17
  5. package/dest/archiver/archiver_store.d.ts.map +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +1 -25
  8. package/dest/archiver/data_retrieval.d.ts +33 -5
  9. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  10. package/dest/archiver/data_retrieval.js +124 -15
  11. package/dest/archiver/kv_archiver_store/block_store.d.ts +2 -2
  12. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  13. package/dest/archiver/kv_archiver_store/block_store.js +16 -10
  14. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +3 -15
  15. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  16. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +12 -25
  17. package/dest/archiver/kv_archiver_store/message_store.d.ts +1 -0
  18. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/message_store.js +11 -8
  20. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +3 -20
  21. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  22. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +9 -28
  23. package/dest/index.d.ts +0 -1
  24. package/dest/index.d.ts.map +1 -1
  25. package/dest/index.js +1 -2
  26. package/package.json +10 -10
  27. package/src/archiver/archiver.ts +131 -156
  28. package/src/archiver/archiver_store.ts +12 -18
  29. package/src/archiver/archiver_store_test_suite.ts +1 -28
  30. package/src/archiver/data_retrieval.ts +189 -29
  31. package/src/archiver/kv_archiver_store/block_store.ts +17 -10
  32. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +11 -25
  33. package/src/archiver/kv_archiver_store/message_store.ts +9 -5
  34. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +10 -32
  35. package/src/index.ts +0 -2
  36. package/dest/archiver/eth_log_handlers.d.ts +0 -59
  37. package/dest/archiver/eth_log_handlers.d.ts.map +0 -1
  38. package/dest/archiver/eth_log_handlers.js +0 -155
  39. package/dest/archiver/kv_archiver_store/block_body_store.d.ts +0 -34
  40. package/dest/archiver/kv_archiver_store/block_body_store.d.ts.map +0 -1
  41. package/dest/archiver/kv_archiver_store/block_body_store.js +0 -65
  42. package/src/archiver/eth_log_handlers.ts +0 -213
  43. package/src/archiver/kv_archiver_store/block_body_store.ts +0 -74
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/archiver",
3
- "version": "0.55.1",
3
+ "version": "0.56.0",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
@@ -61,15 +61,15 @@
61
61
  ]
62
62
  },
63
63
  "dependencies": {
64
- "@aztec/circuit-types": "0.55.1",
65
- "@aztec/circuits.js": "0.55.1",
66
- "@aztec/ethereum": "0.55.1",
67
- "@aztec/foundation": "0.55.1",
68
- "@aztec/kv-store": "0.55.1",
69
- "@aztec/l1-artifacts": "0.55.1",
70
- "@aztec/protocol-contracts": "0.55.1",
71
- "@aztec/telemetry-client": "0.55.1",
72
- "@aztec/types": "0.55.1",
64
+ "@aztec/circuit-types": "0.56.0",
65
+ "@aztec/circuits.js": "0.56.0",
66
+ "@aztec/ethereum": "0.56.0",
67
+ "@aztec/foundation": "0.56.0",
68
+ "@aztec/kv-store": "0.56.0",
69
+ "@aztec/l1-artifacts": "0.56.0",
70
+ "@aztec/protocol-contracts": "0.56.0",
71
+ "@aztec/telemetry-client": "0.56.0",
72
+ "@aztec/types": "0.56.0",
73
73
  "debug": "^4.3.4",
74
74
  "lodash.groupby": "^4.6.0",
75
75
  "lodash.omit": "^4.5.0",
@@ -24,13 +24,12 @@ import {
24
24
  import { createEthereumChain } from '@aztec/ethereum';
25
25
  import { type ContractArtifact } from '@aztec/foundation/abi';
26
26
  import { type AztecAddress } from '@aztec/foundation/aztec-address';
27
- import { compactArray, unique } from '@aztec/foundation/collection';
28
27
  import { type EthAddress } from '@aztec/foundation/eth-address';
29
28
  import { Fr } from '@aztec/foundation/fields';
30
29
  import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log';
31
30
  import { RunningPromise } from '@aztec/foundation/running-promise';
32
31
  import { Timer } from '@aztec/foundation/timer';
33
- import { RollupAbi } from '@aztec/l1-artifacts';
32
+ import { InboxAbi, RollupAbi } from '@aztec/l1-artifacts';
34
33
  import { ClassRegistererAddress } from '@aztec/protocol-contracts/class-registerer';
35
34
  import { type TelemetryClient } from '@aztec/telemetry-client';
36
35
  import {
@@ -43,12 +42,19 @@ import {
43
42
  } from '@aztec/types/contracts';
44
43
 
45
44
  import groupBy from 'lodash.groupby';
46
- import { type Chain, type HttpTransport, type PublicClient, createPublicClient, getContract, http } from 'viem';
45
+ import {
46
+ type Chain,
47
+ type GetContractReturnType,
48
+ type HttpTransport,
49
+ type PublicClient,
50
+ createPublicClient,
51
+ getContract,
52
+ http,
53
+ } from 'viem';
47
54
 
48
55
  import { type ArchiverDataStore } from './archiver_store.js';
49
56
  import { type ArchiverConfig } from './config.js';
50
- import { retrieveBlockFromRollup, retrieveL1ToL2Messages, retrieveL2ProofVerifiedEvents } from './data_retrieval.js';
51
- import { getL1BlockTime } from './eth_log_handlers.js';
57
+ import { retrieveBlockFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js';
52
58
  import { ArchiverInstrumentation } from './instrumentation.js';
53
59
  import { type SingletonDataRetrieval } from './structs/data_retrieval.js';
54
60
 
@@ -68,6 +74,9 @@ export class Archiver implements ArchiveSource {
68
74
  */
69
75
  private runningPromise?: RunningPromise;
70
76
 
77
+ private rollup: GetContractReturnType<typeof RollupAbi, PublicClient<HttpTransport, Chain>>;
78
+ private inbox: GetContractReturnType<typeof InboxAbi, PublicClient<HttpTransport, Chain>>;
79
+
71
80
  /**
72
81
  * Creates a new instance of the Archiver.
73
82
  * @param publicClient - A client for interacting with the Ethereum node.
@@ -88,7 +97,19 @@ export class Archiver implements ArchiveSource {
88
97
  private readonly instrumentation: ArchiverInstrumentation,
89
98
  private readonly l1StartBlock: bigint = 0n,
90
99
  private readonly log: DebugLogger = createDebugLogger('aztec:archiver'),
91
- ) {}
100
+ ) {
101
+ this.rollup = getContract({
102
+ address: rollupAddress.toString(),
103
+ abi: RollupAbi,
104
+ client: publicClient,
105
+ });
106
+
107
+ this.inbox = getContract({
108
+ address: inboxAddress.toString(),
109
+ abi: InboxAbi,
110
+ client: publicClient,
111
+ });
112
+ }
92
113
 
93
114
  /**
94
115
  * Creates a new instance of the Archiver and blocks until it syncs from chain.
@@ -179,31 +200,12 @@ export class Archiver implements ArchiveSource {
179
200
  * This code does not handle reorgs.
180
201
  */
181
202
  const {
182
- blockBodiesSynchedTo = this.l1StartBlock,
183
203
  blocksSynchedTo = this.l1StartBlock,
184
204
  messagesSynchedTo = this.l1StartBlock,
185
205
  provenLogsSynchedTo = this.l1StartBlock,
186
206
  } = await this.store.getSynchPoint();
187
207
  const currentL1BlockNumber = await this.publicClient.getBlockNumber();
188
208
 
189
- if (
190
- currentL1BlockNumber <= blocksSynchedTo &&
191
- currentL1BlockNumber <= messagesSynchedTo &&
192
- currentL1BlockNumber <= blockBodiesSynchedTo &&
193
- currentL1BlockNumber <= provenLogsSynchedTo
194
- ) {
195
- // chain hasn't moved forward
196
- // or it's been rolled back
197
- this.log.debug(`Nothing to sync`, {
198
- currentL1BlockNumber,
199
- blocksSynchedTo,
200
- messagesSynchedTo,
201
- provenLogsSynchedTo,
202
- blockBodiesSynchedTo,
203
- });
204
- return;
205
- }
206
-
207
209
  // ********** Ensuring Consistency of data pulled from L1 **********
208
210
 
209
211
  /**
@@ -223,51 +225,114 @@ export class Archiver implements ArchiveSource {
223
225
  * in future but for the time being it should give us the guarantees that we need
224
226
  */
225
227
 
228
+ await this.updateLastProvenL2Block(provenLogsSynchedTo, currentL1BlockNumber);
229
+
226
230
  // ********** Events that are processed per L1 block **********
227
231
 
232
+ await this.handleL1ToL2Messages(blockUntilSynced, messagesSynchedTo, currentL1BlockNumber);
233
+
228
234
  // ********** Events that are processed per L2 block **********
235
+ await this.handleL2blocks(blockUntilSynced, blocksSynchedTo, currentL1BlockNumber);
236
+ }
237
+
238
+ private async handleL1ToL2Messages(
239
+ blockUntilSynced: boolean,
240
+ messagesSynchedTo: bigint,
241
+ currentL1BlockNumber: bigint,
242
+ ) {
243
+ if (currentL1BlockNumber <= messagesSynchedTo) {
244
+ return;
245
+ }
229
246
 
230
247
  const retrievedL1ToL2Messages = await retrieveL1ToL2Messages(
231
- this.publicClient,
232
- this.inboxAddress,
248
+ this.inbox,
233
249
  blockUntilSynced,
234
250
  messagesSynchedTo + 1n,
235
251
  currentL1BlockNumber,
236
252
  );
237
253
 
238
- if (retrievedL1ToL2Messages.retrievedData.length !== 0) {
254
+ if (retrievedL1ToL2Messages.retrievedData.length === 0) {
255
+ await this.store.setMessageSynchedL1BlockNumber(currentL1BlockNumber);
239
256
  this.log.verbose(
240
- `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${
241
- messagesSynchedTo + 1n
242
- } and ${currentL1BlockNumber}.`,
257
+ `Retrieved no new L1 -> L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`,
243
258
  );
259
+ return;
244
260
  }
245
261
 
246
262
  await this.store.addL1ToL2Messages(retrievedL1ToL2Messages);
247
263
 
248
- // Read all data from chain and then write to our stores at the end
249
- const nextExpectedL2BlockNum = BigInt((await this.store.getSynchedL2BlockNumber()) + 1);
264
+ this.log.verbose(
265
+ `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${
266
+ messagesSynchedTo + 1n
267
+ } and ${currentL1BlockNumber}.`,
268
+ );
269
+ }
270
+
271
+ private async updateLastProvenL2Block(provenSynchedTo: bigint, currentL1BlockNumber: bigint) {
272
+ if (currentL1BlockNumber <= provenSynchedTo) {
273
+ return;
274
+ }
275
+
276
+ const provenBlockNumber = await this.rollup.read.getProvenBlockNumber();
277
+ if (provenBlockNumber) {
278
+ await this.store.setProvenL2BlockNumber({
279
+ retrievedData: Number(provenBlockNumber),
280
+ lastProcessedL1BlockNumber: currentL1BlockNumber,
281
+ });
282
+ }
283
+ }
284
+
285
+ private async handleL2blocks(blockUntilSynced: boolean, blocksSynchedTo: bigint, currentL1BlockNumber: bigint) {
286
+ if (currentL1BlockNumber <= blocksSynchedTo) {
287
+ return;
288
+ }
289
+
290
+ const lastBlock = await this.getBlock(-1);
291
+
292
+ const [, , pendingBlockNumber, pendingArchive, archiveOfMyBlock] = await this.rollup.read.status([
293
+ BigInt(lastBlock?.number ?? 0),
294
+ ]);
295
+
296
+ const noBlocksButInitial = lastBlock === undefined && pendingBlockNumber == 0n;
297
+ const noBlockSinceLast =
298
+ lastBlock &&
299
+ pendingBlockNumber === BigInt(lastBlock.number) &&
300
+ pendingArchive === lastBlock.archive.root.toString();
301
+
302
+ if (noBlocksButInitial || noBlockSinceLast) {
303
+ await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
304
+ this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
305
+ return;
306
+ }
307
+
308
+ if (lastBlock && archiveOfMyBlock !== lastBlock.archive.root.toString()) {
309
+ // @todo Either `prune` have been called, or L1 have re-orged deep enough to remove a block.
310
+ // Issue#8620 and Issue#8621
311
+ }
250
312
 
251
313
  this.log.debug(`Retrieving blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
252
314
  const retrievedBlocks = await retrieveBlockFromRollup(
315
+ this.rollup,
253
316
  this.publicClient,
254
- this.rollupAddress,
255
317
  blockUntilSynced,
256
318
  blocksSynchedTo + 1n,
257
319
  currentL1BlockNumber,
258
- nextExpectedL2BlockNum,
320
+ this.log,
259
321
  );
260
322
 
261
- // Add the body
323
+ if (retrievedBlocks.length === 0) {
324
+ await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
325
+ this.log.verbose(`Retrieved no new blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
326
+ return;
327
+ }
262
328
 
263
- (retrievedBlocks.length ? this.log.verbose : this.log.debug)(
264
- `Retrieved ${retrievedBlocks.length || 'no'} new L2 blocks between L1 blocks ${
329
+ this.log.debug(
330
+ `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${
265
331
  blocksSynchedTo + 1n
266
332
  } and ${currentL1BlockNumber}.`,
267
333
  );
268
334
 
269
- const lastProcessedL1BlockNumber =
270
- retrievedBlocks.length > 0 ? retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber : blocksSynchedTo;
335
+ const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber;
271
336
 
272
337
  this.log.debug(
273
338
  `Processing retrieved blocks ${retrievedBlocks
@@ -275,127 +340,37 @@ export class Archiver implements ArchiveSource {
275
340
  .join(',')} with last processed L1 block ${lastProcessedL1BlockNumber}`,
276
341
  );
277
342
 
278
- if (retrievedBlocks.length > 0) {
279
- await Promise.all(
280
- retrievedBlocks.map(block => {
281
- const noteEncryptedLogs = block.data.body.noteEncryptedLogs;
282
- const encryptedLogs = block.data.body.encryptedLogs;
283
- const unencryptedLogs = block.data.body.unencryptedLogs;
284
- return this.store.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, block.data.number);
285
- }),
286
- );
287
-
288
- // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
289
- await Promise.all(
290
- retrievedBlocks.map(async block => {
291
- const blockLogs = block.data.body.txEffects
292
- .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : []))
293
- .flatMap(txLog => txLog.unrollLogs());
294
- await this.storeRegisteredContractClasses(blockLogs, block.data.number);
295
- await this.storeDeployedContractInstances(blockLogs, block.data.number);
296
- await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number);
297
- }),
298
- );
299
-
300
- const timer = new Timer();
301
- await this.store.addBlockBodies({
302
- lastProcessedL1BlockNumber: lastProcessedL1BlockNumber,
303
- retrievedData: retrievedBlocks.map(b => b.data.body),
304
- });
305
- await this.store.addBlocks(retrievedBlocks);
306
- this.instrumentation.processNewBlocks(
307
- timer.ms() / retrievedBlocks.length,
308
- retrievedBlocks.map(b => b.data),
309
- );
310
- const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number;
311
- this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`);
312
- }
313
-
314
- // Fetch the logs for proven blocks in the block range and update the last proven block number.
315
- if (currentL1BlockNumber > provenLogsSynchedTo) {
316
- await this.updateLastProvenL2Block(provenLogsSynchedTo + 1n, currentL1BlockNumber);
317
- }
318
-
319
- if (retrievedBlocks.length > 0 || blockUntilSynced) {
320
- (blockUntilSynced ? this.log.info : this.log.verbose)(`Synced to L1 block ${currentL1BlockNumber}`);
321
- }
322
- }
323
-
324
- private async updateLastProvenL2Block(fromBlock: bigint, toBlock: bigint) {
325
- const logs = await retrieveL2ProofVerifiedEvents(this.publicClient, this.rollupAddress, fromBlock, toBlock);
326
- const lastLog = logs[logs.length - 1];
327
- if (!lastLog) {
328
- return;
329
- }
330
-
331
- const provenBlockNumber = lastLog.l2BlockNumber;
332
- if (!provenBlockNumber) {
333
- throw new Error(`Missing argument blockNumber from L2ProofVerified event`);
334
- }
335
-
336
- await this.emitProofVerifiedMetrics(logs);
337
-
338
- const currentProvenBlockNumber = await this.store.getProvenL2BlockNumber();
339
- if (provenBlockNumber > currentProvenBlockNumber) {
340
- // Update the last proven block number
341
- this.log.verbose(`Updated last proven block number from ${currentProvenBlockNumber} to ${provenBlockNumber}`);
342
- await this.store.setProvenL2BlockNumber({
343
- retrievedData: Number(provenBlockNumber),
344
- lastProcessedL1BlockNumber: lastLog.l1BlockNumber,
345
- });
346
- this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
347
- } else {
348
- // We set the last processed L1 block number to the last L1 block number in the range to avoid duplicate processing
349
- await this.store.setProvenL2BlockNumber({
350
- retrievedData: Number(currentProvenBlockNumber),
351
- lastProcessedL1BlockNumber: lastLog.l1BlockNumber,
352
- });
353
- }
354
- }
355
-
356
- /**
357
- * Emits as metrics the block number proven, who proved it, and how much time passed since it was submitted.
358
- * @param logs - The ProofVerified logs to emit metrics for, as collected from `retrieveL2ProofVerifiedEvents`.
359
- **/
360
- private async emitProofVerifiedMetrics(logs: { l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr }[]) {
361
- if (!logs.length || !this.instrumentation.isEnabled()) {
362
- return;
363
- }
364
-
365
- const l1BlockTimes = new Map(
366
- await Promise.all(
367
- unique(logs.map(log => log.l1BlockNumber)).map(
368
- async blockNumber => [blockNumber, await getL1BlockTime(this.publicClient, blockNumber)] as const,
369
- ),
370
- ),
343
+ await Promise.all(
344
+ retrievedBlocks.map(block => {
345
+ return this.store.addLogs(
346
+ block.data.body.noteEncryptedLogs,
347
+ block.data.body.encryptedLogs,
348
+ block.data.body.unencryptedLogs,
349
+ block.data.number,
350
+ );
351
+ }),
371
352
  );
372
353
 
373
- // Collect L2 block times for all the blocks verified, this is the time in which the block proven was
374
- // originally submitted to L1, using the L1 timestamp of the transaction.
375
- const getL2BlockTime = async (blockNumber: bigint) =>
376
- (await this.store.getBlocks(Number(blockNumber), 1))[0]?.l1.timestamp;
377
-
378
- const l2BlockTimes = new Map(
379
- await Promise.all(
380
- unique(logs.map(log => log.l2BlockNumber)).map(
381
- async blockNumber => [blockNumber, await getL2BlockTime(blockNumber)] as const,
382
- ),
383
- ),
354
+ // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
355
+ await Promise.all(
356
+ retrievedBlocks.map(async block => {
357
+ const blockLogs = block.data.body.txEffects
358
+ .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : []))
359
+ .flatMap(txLog => txLog.unrollLogs());
360
+ await this.storeRegisteredContractClasses(blockLogs, block.data.number);
361
+ await this.storeDeployedContractInstances(blockLogs, block.data.number);
362
+ await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number);
363
+ }),
384
364
  );
385
365
 
386
- // Emit the prover id and the time difference between block submission and proof.
387
- this.instrumentation.processProofsVerified(
388
- compactArray(
389
- logs.map(log => {
390
- const l1BlockTime = l1BlockTimes.get(log.l1BlockNumber)!;
391
- const l2BlockTime = l2BlockTimes.get(log.l2BlockNumber);
392
- if (!l2BlockTime) {
393
- return undefined;
394
- }
395
- return { ...log, delay: l1BlockTime - l2BlockTime, proverId: log.proverId.toString() };
396
- }),
397
- ),
366
+ const timer = new Timer();
367
+ await this.store.addBlocks(retrievedBlocks);
368
+ this.instrumentation.processNewBlocks(
369
+ timer.ms() / retrievedBlocks.length,
370
+ retrievedBlocks.map(b => b.data),
398
371
  );
372
+ const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number;
373
+ this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`);
399
374
  }
400
375
 
401
376
  /**
@@ -1,5 +1,4 @@
1
1
  import {
2
- type Body,
3
2
  type EncryptedL2BlockL2Logs,
4
3
  type EncryptedNoteL2BlockL2Logs,
5
4
  type FromLogType,
@@ -33,8 +32,6 @@ import { type L1Published } from './structs/published.js';
33
32
  export type ArchiverL1SynchPoint = {
34
33
  /** Number of the last L1 block that added a new L2 block metadata. */
35
34
  blocksSynchedTo?: bigint;
36
- /** Number of the last L1 block that added a new L2 block body. */
37
- blockBodiesSynchedTo?: bigint;
38
35
  /** Number of the last L1 block that added L1 -> L2 messages from the Inbox. */
39
36
  messagesSynchedTo?: bigint;
40
37
  /** Number of the last L1 block that added a new proven block. */
@@ -53,21 +50,6 @@ export interface ArchiverDataStore {
53
50
  */
54
51
  addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean>;
55
52
 
56
- /**
57
- * Append new block bodies to the store's list.
58
- * @param blockBodies - The L2 block bodies to be added to the store.
59
- * @returns True if the operation is successful.
60
- */
61
- addBlockBodies(blockBodies: DataRetrieval<Body>): Promise<boolean>;
62
-
63
- /**
64
- * Gets block bodies that have the same txsEffectsHashes as we supply.
65
- *
66
- * @param txsEffectsHashes - A list of txsEffectsHashes.
67
- * @returns The requested L2 block bodies
68
- */
69
- getBlockBodies(txsEffectsHashes: Buffer[]): Promise<(Body | undefined)[]>;
70
-
71
53
  /**
72
54
  * Gets up to `limit` amount of L2 blocks starting from `from`.
73
55
  * @param from - Number of the first block to return (inclusive).
@@ -165,6 +147,18 @@ export interface ArchiverDataStore {
165
147
  */
166
148
  setProvenL2BlockNumber(l2BlockNumber: SingletonDataRetrieval<number>): Promise<void>;
167
149
 
150
+ /**
151
+ * Stores the l1 block number that blocks have been synched until
152
+ * @param l1BlockNumber - The l1 block number
153
+ */
154
+ setBlockSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void>;
155
+
156
+ /**
157
+ * Stores the l1 block number that messages have been synched until
158
+ * @param l1BlockNumber - The l1 block number
159
+ */
160
+ setMessageSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void>;
161
+
168
162
  /**
169
163
  * Gets the synch point of the archiver
170
164
  */
@@ -1,4 +1,4 @@
1
- import { type Body, InboxLeaf, L2Block, LogId, LogType, TxHash } from '@aztec/circuit-types';
1
+ import { InboxLeaf, L2Block, LogId, LogType, TxHash } from '@aztec/circuit-types';
2
2
  import '@aztec/circuit-types/jest';
3
3
  import { AztecAddress, Fr, INITIAL_L2_BLOCK_NUM, L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/circuits.js';
4
4
  import {
@@ -15,7 +15,6 @@ import {
15
15
  } from '@aztec/types/contracts';
16
16
 
17
17
  import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js';
18
- import { type DataRetrieval } from './structs/data_retrieval.js';
19
18
  import { type L1Published } from './structs/published.js';
20
19
 
21
20
  /**
@@ -26,7 +25,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
26
25
  describe(testName, () => {
27
26
  let store: ArchiverDataStore;
28
27
  let blocks: L1Published<L2Block>[];
29
- let blockBodies: DataRetrieval<Body>;
30
28
  const blockTests: [number, number, () => L1Published<L2Block>[]][] = [
31
29
  [1, 1, () => blocks.slice(0, 1)],
32
30
  [10, 1, () => blocks.slice(9, 10)],
@@ -41,17 +39,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
41
39
  data: L2Block.random(i + 1),
42
40
  l1: { blockNumber: BigInt(i + 10), blockHash: `0x${i}`, timestamp: BigInt(i * 1000) },
43
41
  }));
44
- blockBodies = {
45
- retrievedData: blocks.map(block => block.data.body),
46
- lastProcessedL1BlockNumber: 4n,
47
- };
48
42
  });
49
43
 
50
44
  describe('addBlocks', () => {
51
- it('returns success when adding block bodies', async () => {
52
- await expect(store.addBlockBodies(blockBodies)).resolves.toBe(true);
53
- });
54
-
55
45
  it('returns success when adding blocks', async () => {
56
46
  await expect(store.addBlocks(blocks)).resolves.toBe(true);
57
47
  });
@@ -65,7 +55,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
65
55
  describe('getBlocks', () => {
66
56
  beforeEach(async () => {
67
57
  await store.addBlocks(blocks);
68
- await store.addBlockBodies(blockBodies);
69
58
  });
70
59
 
71
60
  it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => {
@@ -101,7 +90,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
101
90
  await expect(store.getSynchPoint()).resolves.toEqual({
102
91
  blocksSynchedTo: undefined,
103
92
  messagesSynchedTo: undefined,
104
- blockBodiesSynchedTo: undefined,
105
93
  provenLogsSynchedTo: undefined,
106
94
  } satisfies ArchiverL1SynchPoint);
107
95
  });
@@ -111,17 +99,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
111
99
  await expect(store.getSynchPoint()).resolves.toEqual({
112
100
  blocksSynchedTo: 19n,
113
101
  messagesSynchedTo: undefined,
114
- blockBodiesSynchedTo: undefined,
115
- provenLogsSynchedTo: undefined,
116
- } satisfies ArchiverL1SynchPoint);
117
- });
118
-
119
- it('returns the L1 block number in which the most recent L2 block body was published', async () => {
120
- await store.addBlockBodies(blockBodies);
121
- await expect(store.getSynchPoint()).resolves.toEqual({
122
- blocksSynchedTo: undefined,
123
- messagesSynchedTo: undefined,
124
- blockBodiesSynchedTo: blockBodies.lastProcessedL1BlockNumber,
125
102
  provenLogsSynchedTo: undefined,
126
103
  } satisfies ArchiverL1SynchPoint);
127
104
  });
@@ -134,7 +111,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
134
111
  await expect(store.getSynchPoint()).resolves.toEqual({
135
112
  blocksSynchedTo: undefined,
136
113
  messagesSynchedTo: 1n,
137
- blockBodiesSynchedTo: undefined,
138
114
  provenLogsSynchedTo: undefined,
139
115
  } satisfies ArchiverL1SynchPoint);
140
116
  });
@@ -144,7 +120,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
144
120
  await expect(store.getSynchPoint()).resolves.toEqual({
145
121
  blocksSynchedTo: undefined,
146
122
  messagesSynchedTo: undefined,
147
- blockBodiesSynchedTo: undefined,
148
123
  provenLogsSynchedTo: 3n,
149
124
  } satisfies ArchiverL1SynchPoint);
150
125
  });
@@ -212,7 +187,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
212
187
  ),
213
188
  );
214
189
  await store.addBlocks(blocks);
215
- await store.addBlockBodies(blockBodies);
216
190
  });
217
191
 
218
192
  it.each([
@@ -364,7 +338,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
364
338
  }));
365
339
 
366
340
  await store.addBlocks(blocks);
367
- await store.addBlockBodies(blockBodies);
368
341
 
369
342
  await Promise.all(
370
343
  blocks.map(block =>