@aztec/archiver 0.55.0 → 0.56.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dest/archiver/archiver.d.ts +4 -5
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +89 -110
  4. package/dest/archiver/archiver_store.d.ts +11 -17
  5. package/dest/archiver/archiver_store.d.ts.map +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +1 -25
  8. package/dest/archiver/config.d.ts +0 -4
  9. package/dest/archiver/config.d.ts.map +1 -1
  10. package/dest/archiver/config.js +1 -6
  11. package/dest/archiver/data_retrieval.d.ts +33 -5
  12. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  13. package/dest/archiver/data_retrieval.js +124 -15
  14. package/dest/archiver/kv_archiver_store/block_store.d.ts +2 -2
  15. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  16. package/dest/archiver/kv_archiver_store/block_store.js +16 -10
  17. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +3 -15
  18. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +12 -25
  20. package/dest/archiver/kv_archiver_store/message_store.d.ts +1 -0
  21. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  22. package/dest/archiver/kv_archiver_store/message_store.js +11 -8
  23. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +3 -20
  24. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  25. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +9 -28
  26. package/dest/index.d.ts +0 -1
  27. package/dest/index.d.ts.map +1 -1
  28. package/dest/index.js +1 -2
  29. package/package.json +10 -10
  30. package/src/archiver/archiver.ts +140 -156
  31. package/src/archiver/archiver_store.ts +12 -18
  32. package/src/archiver/archiver_store_test_suite.ts +1 -28
  33. package/src/archiver/config.ts +0 -10
  34. package/src/archiver/data_retrieval.ts +189 -29
  35. package/src/archiver/kv_archiver_store/block_store.ts +17 -10
  36. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +11 -25
  37. package/src/archiver/kv_archiver_store/message_store.ts +9 -5
  38. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +10 -32
  39. package/src/index.ts +0 -2
  40. package/dest/archiver/eth_log_handlers.d.ts +0 -59
  41. package/dest/archiver/eth_log_handlers.d.ts.map +0 -1
  42. package/dest/archiver/eth_log_handlers.js +0 -155
  43. package/dest/archiver/kv_archiver_store/block_body_store.d.ts +0 -34
  44. package/dest/archiver/kv_archiver_store/block_body_store.d.ts.map +0 -1
  45. package/dest/archiver/kv_archiver_store/block_body_store.js +0 -65
  46. package/src/archiver/eth_log_handlers.ts +0 -213
  47. package/src/archiver/kv_archiver_store/block_body_store.ts +0 -74
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/archiver",
3
- "version": "0.55.0",
3
+ "version": "0.56.0",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
@@ -61,15 +61,15 @@
61
61
  ]
62
62
  },
63
63
  "dependencies": {
64
- "@aztec/circuit-types": "0.55.0",
65
- "@aztec/circuits.js": "0.55.0",
66
- "@aztec/ethereum": "0.55.0",
67
- "@aztec/foundation": "0.55.0",
68
- "@aztec/kv-store": "0.55.0",
69
- "@aztec/l1-artifacts": "0.55.0",
70
- "@aztec/protocol-contracts": "0.55.0",
71
- "@aztec/telemetry-client": "0.55.0",
72
- "@aztec/types": "0.55.0",
64
+ "@aztec/circuit-types": "0.56.0",
65
+ "@aztec/circuits.js": "0.56.0",
66
+ "@aztec/ethereum": "0.56.0",
67
+ "@aztec/foundation": "0.56.0",
68
+ "@aztec/kv-store": "0.56.0",
69
+ "@aztec/l1-artifacts": "0.56.0",
70
+ "@aztec/protocol-contracts": "0.56.0",
71
+ "@aztec/telemetry-client": "0.56.0",
72
+ "@aztec/types": "0.56.0",
73
73
  "debug": "^4.3.4",
74
74
  "lodash.groupby": "^4.6.0",
75
75
  "lodash.omit": "^4.5.0",
@@ -24,12 +24,12 @@ import {
24
24
  import { createEthereumChain } from '@aztec/ethereum';
25
25
  import { type ContractArtifact } from '@aztec/foundation/abi';
26
26
  import { type AztecAddress } from '@aztec/foundation/aztec-address';
27
- import { compactArray, unique } from '@aztec/foundation/collection';
28
27
  import { type EthAddress } from '@aztec/foundation/eth-address';
29
28
  import { Fr } from '@aztec/foundation/fields';
30
29
  import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log';
31
30
  import { RunningPromise } from '@aztec/foundation/running-promise';
32
31
  import { Timer } from '@aztec/foundation/timer';
32
+ import { InboxAbi, RollupAbi } from '@aztec/l1-artifacts';
33
33
  import { ClassRegistererAddress } from '@aztec/protocol-contracts/class-registerer';
34
34
  import { type TelemetryClient } from '@aztec/telemetry-client';
35
35
  import {
@@ -42,12 +42,19 @@ import {
42
42
  } from '@aztec/types/contracts';
43
43
 
44
44
  import groupBy from 'lodash.groupby';
45
- import { type Chain, type HttpTransport, type PublicClient, createPublicClient, http } from 'viem';
45
+ import {
46
+ type Chain,
47
+ type GetContractReturnType,
48
+ type HttpTransport,
49
+ type PublicClient,
50
+ createPublicClient,
51
+ getContract,
52
+ http,
53
+ } from 'viem';
46
54
 
47
55
  import { type ArchiverDataStore } from './archiver_store.js';
48
56
  import { type ArchiverConfig } from './config.js';
49
- import { retrieveBlockFromRollup, retrieveL1ToL2Messages, retrieveL2ProofVerifiedEvents } from './data_retrieval.js';
50
- import { getL1BlockTime } from './eth_log_handlers.js';
57
+ import { retrieveBlockFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js';
51
58
  import { ArchiverInstrumentation } from './instrumentation.js';
52
59
  import { type SingletonDataRetrieval } from './structs/data_retrieval.js';
53
60
 
@@ -67,6 +74,9 @@ export class Archiver implements ArchiveSource {
67
74
  */
68
75
  private runningPromise?: RunningPromise;
69
76
 
77
+ private rollup: GetContractReturnType<typeof RollupAbi, PublicClient<HttpTransport, Chain>>;
78
+ private inbox: GetContractReturnType<typeof InboxAbi, PublicClient<HttpTransport, Chain>>;
79
+
70
80
  /**
71
81
  * Creates a new instance of the Archiver.
72
82
  * @param publicClient - A client for interacting with the Ethereum node.
@@ -87,7 +97,19 @@ export class Archiver implements ArchiveSource {
87
97
  private readonly instrumentation: ArchiverInstrumentation,
88
98
  private readonly l1StartBlock: bigint = 0n,
89
99
  private readonly log: DebugLogger = createDebugLogger('aztec:archiver'),
90
- ) {}
100
+ ) {
101
+ this.rollup = getContract({
102
+ address: rollupAddress.toString(),
103
+ abi: RollupAbi,
104
+ client: publicClient,
105
+ });
106
+
107
+ this.inbox = getContract({
108
+ address: inboxAddress.toString(),
109
+ abi: InboxAbi,
110
+ client: publicClient,
111
+ });
112
+ }
91
113
 
92
114
  /**
93
115
  * Creates a new instance of the Archiver and blocks until it syncs from chain.
@@ -109,6 +131,14 @@ export class Archiver implements ArchiveSource {
109
131
  pollingInterval: config.viemPollingIntervalMS,
110
132
  });
111
133
 
134
+ const rollup = getContract({
135
+ address: config.l1Contracts.rollupAddress.toString(),
136
+ abi: RollupAbi,
137
+ client: publicClient,
138
+ });
139
+
140
+ const l1StartBlock = await rollup.read.L1_BLOCK_AT_GENESIS();
141
+
112
142
  const archiver = new Archiver(
113
143
  publicClient,
114
144
  config.l1Contracts.rollupAddress,
@@ -117,7 +147,7 @@ export class Archiver implements ArchiveSource {
117
147
  archiverStore,
118
148
  config.archiverPollingIntervalMS,
119
149
  new ArchiverInstrumentation(telemetry),
120
- BigInt(config.archiverL1StartBlock),
150
+ BigInt(l1StartBlock),
121
151
  );
122
152
  await archiver.start(blockUntilSynced);
123
153
  return archiver;
@@ -170,31 +200,12 @@ export class Archiver implements ArchiveSource {
170
200
  * This code does not handle reorgs.
171
201
  */
172
202
  const {
173
- blockBodiesSynchedTo = this.l1StartBlock,
174
203
  blocksSynchedTo = this.l1StartBlock,
175
204
  messagesSynchedTo = this.l1StartBlock,
176
205
  provenLogsSynchedTo = this.l1StartBlock,
177
206
  } = await this.store.getSynchPoint();
178
207
  const currentL1BlockNumber = await this.publicClient.getBlockNumber();
179
208
 
180
- if (
181
- currentL1BlockNumber <= blocksSynchedTo &&
182
- currentL1BlockNumber <= messagesSynchedTo &&
183
- currentL1BlockNumber <= blockBodiesSynchedTo &&
184
- currentL1BlockNumber <= provenLogsSynchedTo
185
- ) {
186
- // chain hasn't moved forward
187
- // or it's been rolled back
188
- this.log.debug(`Nothing to sync`, {
189
- currentL1BlockNumber,
190
- blocksSynchedTo,
191
- messagesSynchedTo,
192
- provenLogsSynchedTo,
193
- blockBodiesSynchedTo,
194
- });
195
- return;
196
- }
197
-
198
209
  // ********** Ensuring Consistency of data pulled from L1 **********
199
210
 
200
211
  /**
@@ -214,51 +225,114 @@ export class Archiver implements ArchiveSource {
214
225
  * in future but for the time being it should give us the guarantees that we need
215
226
  */
216
227
 
228
+ await this.updateLastProvenL2Block(provenLogsSynchedTo, currentL1BlockNumber);
229
+
217
230
  // ********** Events that are processed per L1 block **********
218
231
 
232
+ await this.handleL1ToL2Messages(blockUntilSynced, messagesSynchedTo, currentL1BlockNumber);
233
+
219
234
  // ********** Events that are processed per L2 block **********
235
+ await this.handleL2blocks(blockUntilSynced, blocksSynchedTo, currentL1BlockNumber);
236
+ }
237
+
238
+ private async handleL1ToL2Messages(
239
+ blockUntilSynced: boolean,
240
+ messagesSynchedTo: bigint,
241
+ currentL1BlockNumber: bigint,
242
+ ) {
243
+ if (currentL1BlockNumber <= messagesSynchedTo) {
244
+ return;
245
+ }
220
246
 
221
247
  const retrievedL1ToL2Messages = await retrieveL1ToL2Messages(
222
- this.publicClient,
223
- this.inboxAddress,
248
+ this.inbox,
224
249
  blockUntilSynced,
225
250
  messagesSynchedTo + 1n,
226
251
  currentL1BlockNumber,
227
252
  );
228
253
 
229
- if (retrievedL1ToL2Messages.retrievedData.length !== 0) {
254
+ if (retrievedL1ToL2Messages.retrievedData.length === 0) {
255
+ await this.store.setMessageSynchedL1BlockNumber(currentL1BlockNumber);
230
256
  this.log.verbose(
231
- `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${
232
- messagesSynchedTo + 1n
233
- } and ${currentL1BlockNumber}.`,
257
+ `Retrieved no new L1 -> L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`,
234
258
  );
259
+ return;
235
260
  }
236
261
 
237
262
  await this.store.addL1ToL2Messages(retrievedL1ToL2Messages);
238
263
 
239
- // Read all data from chain and then write to our stores at the end
240
- const nextExpectedL2BlockNum = BigInt((await this.store.getSynchedL2BlockNumber()) + 1);
264
+ this.log.verbose(
265
+ `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${
266
+ messagesSynchedTo + 1n
267
+ } and ${currentL1BlockNumber}.`,
268
+ );
269
+ }
270
+
271
+ private async updateLastProvenL2Block(provenSynchedTo: bigint, currentL1BlockNumber: bigint) {
272
+ if (currentL1BlockNumber <= provenSynchedTo) {
273
+ return;
274
+ }
275
+
276
+ const provenBlockNumber = await this.rollup.read.getProvenBlockNumber();
277
+ if (provenBlockNumber) {
278
+ await this.store.setProvenL2BlockNumber({
279
+ retrievedData: Number(provenBlockNumber),
280
+ lastProcessedL1BlockNumber: currentL1BlockNumber,
281
+ });
282
+ }
283
+ }
284
+
285
+ private async handleL2blocks(blockUntilSynced: boolean, blocksSynchedTo: bigint, currentL1BlockNumber: bigint) {
286
+ if (currentL1BlockNumber <= blocksSynchedTo) {
287
+ return;
288
+ }
289
+
290
+ const lastBlock = await this.getBlock(-1);
291
+
292
+ const [, , pendingBlockNumber, pendingArchive, archiveOfMyBlock] = await this.rollup.read.status([
293
+ BigInt(lastBlock?.number ?? 0),
294
+ ]);
295
+
296
+ const noBlocksButInitial = lastBlock === undefined && pendingBlockNumber == 0n;
297
+ const noBlockSinceLast =
298
+ lastBlock &&
299
+ pendingBlockNumber === BigInt(lastBlock.number) &&
300
+ pendingArchive === lastBlock.archive.root.toString();
301
+
302
+ if (noBlocksButInitial || noBlockSinceLast) {
303
+ await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
304
+ this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
305
+ return;
306
+ }
307
+
308
+ if (lastBlock && archiveOfMyBlock !== lastBlock.archive.root.toString()) {
309
+ // @todo Either `prune` have been called, or L1 have re-orged deep enough to remove a block.
310
+ // Issue#8620 and Issue#8621
311
+ }
241
312
 
242
313
  this.log.debug(`Retrieving blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
243
314
  const retrievedBlocks = await retrieveBlockFromRollup(
315
+ this.rollup,
244
316
  this.publicClient,
245
- this.rollupAddress,
246
317
  blockUntilSynced,
247
318
  blocksSynchedTo + 1n,
248
319
  currentL1BlockNumber,
249
- nextExpectedL2BlockNum,
320
+ this.log,
250
321
  );
251
322
 
252
- // Add the body
323
+ if (retrievedBlocks.length === 0) {
324
+ await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
325
+ this.log.verbose(`Retrieved no new blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
326
+ return;
327
+ }
253
328
 
254
- (retrievedBlocks.length ? this.log.verbose : this.log.debug)(
255
- `Retrieved ${retrievedBlocks.length || 'no'} new L2 blocks between L1 blocks ${
329
+ this.log.debug(
330
+ `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${
256
331
  blocksSynchedTo + 1n
257
332
  } and ${currentL1BlockNumber}.`,
258
333
  );
259
334
 
260
- const lastProcessedL1BlockNumber =
261
- retrievedBlocks.length > 0 ? retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber : blocksSynchedTo;
335
+ const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber;
262
336
 
263
337
  this.log.debug(
264
338
  `Processing retrieved blocks ${retrievedBlocks
@@ -266,127 +340,37 @@ export class Archiver implements ArchiveSource {
266
340
  .join(',')} with last processed L1 block ${lastProcessedL1BlockNumber}`,
267
341
  );
268
342
 
269
- if (retrievedBlocks.length > 0) {
270
- await Promise.all(
271
- retrievedBlocks.map(block => {
272
- const noteEncryptedLogs = block.data.body.noteEncryptedLogs;
273
- const encryptedLogs = block.data.body.encryptedLogs;
274
- const unencryptedLogs = block.data.body.unencryptedLogs;
275
- return this.store.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, block.data.number);
276
- }),
277
- );
278
-
279
- // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
280
- await Promise.all(
281
- retrievedBlocks.map(async block => {
282
- const blockLogs = block.data.body.txEffects
283
- .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : []))
284
- .flatMap(txLog => txLog.unrollLogs());
285
- await this.storeRegisteredContractClasses(blockLogs, block.data.number);
286
- await this.storeDeployedContractInstances(blockLogs, block.data.number);
287
- await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number);
288
- }),
289
- );
290
-
291
- const timer = new Timer();
292
- await this.store.addBlockBodies({
293
- lastProcessedL1BlockNumber: lastProcessedL1BlockNumber,
294
- retrievedData: retrievedBlocks.map(b => b.data.body),
295
- });
296
- await this.store.addBlocks(retrievedBlocks);
297
- this.instrumentation.processNewBlocks(
298
- timer.ms() / retrievedBlocks.length,
299
- retrievedBlocks.map(b => b.data),
300
- );
301
- const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number;
302
- this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`);
303
- }
304
-
305
- // Fetch the logs for proven blocks in the block range and update the last proven block number.
306
- if (currentL1BlockNumber > provenLogsSynchedTo) {
307
- await this.updateLastProvenL2Block(provenLogsSynchedTo + 1n, currentL1BlockNumber);
308
- }
309
-
310
- if (retrievedBlocks.length > 0 || blockUntilSynced) {
311
- (blockUntilSynced ? this.log.info : this.log.verbose)(`Synced to L1 block ${currentL1BlockNumber}`);
312
- }
313
- }
314
-
315
- private async updateLastProvenL2Block(fromBlock: bigint, toBlock: bigint) {
316
- const logs = await retrieveL2ProofVerifiedEvents(this.publicClient, this.rollupAddress, fromBlock, toBlock);
317
- const lastLog = logs[logs.length - 1];
318
- if (!lastLog) {
319
- return;
320
- }
321
-
322
- const provenBlockNumber = lastLog.l2BlockNumber;
323
- if (!provenBlockNumber) {
324
- throw new Error(`Missing argument blockNumber from L2ProofVerified event`);
325
- }
326
-
327
- await this.emitProofVerifiedMetrics(logs);
328
-
329
- const currentProvenBlockNumber = await this.store.getProvenL2BlockNumber();
330
- if (provenBlockNumber > currentProvenBlockNumber) {
331
- // Update the last proven block number
332
- this.log.verbose(`Updated last proven block number from ${currentProvenBlockNumber} to ${provenBlockNumber}`);
333
- await this.store.setProvenL2BlockNumber({
334
- retrievedData: Number(provenBlockNumber),
335
- lastProcessedL1BlockNumber: lastLog.l1BlockNumber,
336
- });
337
- this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
338
- } else {
339
- // We set the last processed L1 block number to the last L1 block number in the range to avoid duplicate processing
340
- await this.store.setProvenL2BlockNumber({
341
- retrievedData: Number(currentProvenBlockNumber),
342
- lastProcessedL1BlockNumber: lastLog.l1BlockNumber,
343
- });
344
- }
345
- }
346
-
347
- /**
348
- * Emits as metrics the block number proven, who proved it, and how much time passed since it was submitted.
349
- * @param logs - The ProofVerified logs to emit metrics for, as collected from `retrieveL2ProofVerifiedEvents`.
350
- **/
351
- private async emitProofVerifiedMetrics(logs: { l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr }[]) {
352
- if (!logs.length || !this.instrumentation.isEnabled()) {
353
- return;
354
- }
355
-
356
- const l1BlockTimes = new Map(
357
- await Promise.all(
358
- unique(logs.map(log => log.l1BlockNumber)).map(
359
- async blockNumber => [blockNumber, await getL1BlockTime(this.publicClient, blockNumber)] as const,
360
- ),
361
- ),
343
+ await Promise.all(
344
+ retrievedBlocks.map(block => {
345
+ return this.store.addLogs(
346
+ block.data.body.noteEncryptedLogs,
347
+ block.data.body.encryptedLogs,
348
+ block.data.body.unencryptedLogs,
349
+ block.data.number,
350
+ );
351
+ }),
362
352
  );
363
353
 
364
- // Collect L2 block times for all the blocks verified, this is the time in which the block proven was
365
- // originally submitted to L1, using the L1 timestamp of the transaction.
366
- const getL2BlockTime = async (blockNumber: bigint) =>
367
- (await this.store.getBlocks(Number(blockNumber), 1))[0]?.l1.timestamp;
368
-
369
- const l2BlockTimes = new Map(
370
- await Promise.all(
371
- unique(logs.map(log => log.l2BlockNumber)).map(
372
- async blockNumber => [blockNumber, await getL2BlockTime(blockNumber)] as const,
373
- ),
374
- ),
354
+ // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
355
+ await Promise.all(
356
+ retrievedBlocks.map(async block => {
357
+ const blockLogs = block.data.body.txEffects
358
+ .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : []))
359
+ .flatMap(txLog => txLog.unrollLogs());
360
+ await this.storeRegisteredContractClasses(blockLogs, block.data.number);
361
+ await this.storeDeployedContractInstances(blockLogs, block.data.number);
362
+ await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number);
363
+ }),
375
364
  );
376
365
 
377
- // Emit the prover id and the time difference between block submission and proof.
378
- this.instrumentation.processProofsVerified(
379
- compactArray(
380
- logs.map(log => {
381
- const l1BlockTime = l1BlockTimes.get(log.l1BlockNumber)!;
382
- const l2BlockTime = l2BlockTimes.get(log.l2BlockNumber);
383
- if (!l2BlockTime) {
384
- return undefined;
385
- }
386
- return { ...log, delay: l1BlockTime - l2BlockTime, proverId: log.proverId.toString() };
387
- }),
388
- ),
366
+ const timer = new Timer();
367
+ await this.store.addBlocks(retrievedBlocks);
368
+ this.instrumentation.processNewBlocks(
369
+ timer.ms() / retrievedBlocks.length,
370
+ retrievedBlocks.map(b => b.data),
389
371
  );
372
+ const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number;
373
+ this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`);
390
374
  }
391
375
 
392
376
  /**
@@ -1,5 +1,4 @@
1
1
  import {
2
- type Body,
3
2
  type EncryptedL2BlockL2Logs,
4
3
  type EncryptedNoteL2BlockL2Logs,
5
4
  type FromLogType,
@@ -33,8 +32,6 @@ import { type L1Published } from './structs/published.js';
33
32
  export type ArchiverL1SynchPoint = {
34
33
  /** Number of the last L1 block that added a new L2 block metadata. */
35
34
  blocksSynchedTo?: bigint;
36
- /** Number of the last L1 block that added a new L2 block body. */
37
- blockBodiesSynchedTo?: bigint;
38
35
  /** Number of the last L1 block that added L1 -> L2 messages from the Inbox. */
39
36
  messagesSynchedTo?: bigint;
40
37
  /** Number of the last L1 block that added a new proven block. */
@@ -53,21 +50,6 @@ export interface ArchiverDataStore {
53
50
  */
54
51
  addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean>;
55
52
 
56
- /**
57
- * Append new block bodies to the store's list.
58
- * @param blockBodies - The L2 block bodies to be added to the store.
59
- * @returns True if the operation is successful.
60
- */
61
- addBlockBodies(blockBodies: DataRetrieval<Body>): Promise<boolean>;
62
-
63
- /**
64
- * Gets block bodies that have the same txsEffectsHashes as we supply.
65
- *
66
- * @param txsEffectsHashes - A list of txsEffectsHashes.
67
- * @returns The requested L2 block bodies
68
- */
69
- getBlockBodies(txsEffectsHashes: Buffer[]): Promise<(Body | undefined)[]>;
70
-
71
53
  /**
72
54
  * Gets up to `limit` amount of L2 blocks starting from `from`.
73
55
  * @param from - Number of the first block to return (inclusive).
@@ -165,6 +147,18 @@ export interface ArchiverDataStore {
165
147
  */
166
148
  setProvenL2BlockNumber(l2BlockNumber: SingletonDataRetrieval<number>): Promise<void>;
167
149
 
150
+ /**
151
+ * Stores the l1 block number that blocks have been synched until
152
+ * @param l1BlockNumber - The l1 block number
153
+ */
154
+ setBlockSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void>;
155
+
156
+ /**
157
+ * Stores the l1 block number that messages have been synched until
158
+ * @param l1BlockNumber - The l1 block number
159
+ */
160
+ setMessageSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void>;
161
+
168
162
  /**
169
163
  * Gets the synch point of the archiver
170
164
  */
@@ -1,4 +1,4 @@
1
- import { type Body, InboxLeaf, L2Block, LogId, LogType, TxHash } from '@aztec/circuit-types';
1
+ import { InboxLeaf, L2Block, LogId, LogType, TxHash } from '@aztec/circuit-types';
2
2
  import '@aztec/circuit-types/jest';
3
3
  import { AztecAddress, Fr, INITIAL_L2_BLOCK_NUM, L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/circuits.js';
4
4
  import {
@@ -15,7 +15,6 @@ import {
15
15
  } from '@aztec/types/contracts';
16
16
 
17
17
  import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js';
18
- import { type DataRetrieval } from './structs/data_retrieval.js';
19
18
  import { type L1Published } from './structs/published.js';
20
19
 
21
20
  /**
@@ -26,7 +25,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
26
25
  describe(testName, () => {
27
26
  let store: ArchiverDataStore;
28
27
  let blocks: L1Published<L2Block>[];
29
- let blockBodies: DataRetrieval<Body>;
30
28
  const blockTests: [number, number, () => L1Published<L2Block>[]][] = [
31
29
  [1, 1, () => blocks.slice(0, 1)],
32
30
  [10, 1, () => blocks.slice(9, 10)],
@@ -41,17 +39,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
41
39
  data: L2Block.random(i + 1),
42
40
  l1: { blockNumber: BigInt(i + 10), blockHash: `0x${i}`, timestamp: BigInt(i * 1000) },
43
41
  }));
44
- blockBodies = {
45
- retrievedData: blocks.map(block => block.data.body),
46
- lastProcessedL1BlockNumber: 4n,
47
- };
48
42
  });
49
43
 
50
44
  describe('addBlocks', () => {
51
- it('returns success when adding block bodies', async () => {
52
- await expect(store.addBlockBodies(blockBodies)).resolves.toBe(true);
53
- });
54
-
55
45
  it('returns success when adding blocks', async () => {
56
46
  await expect(store.addBlocks(blocks)).resolves.toBe(true);
57
47
  });
@@ -65,7 +55,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
65
55
  describe('getBlocks', () => {
66
56
  beforeEach(async () => {
67
57
  await store.addBlocks(blocks);
68
- await store.addBlockBodies(blockBodies);
69
58
  });
70
59
 
71
60
  it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => {
@@ -101,7 +90,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
101
90
  await expect(store.getSynchPoint()).resolves.toEqual({
102
91
  blocksSynchedTo: undefined,
103
92
  messagesSynchedTo: undefined,
104
- blockBodiesSynchedTo: undefined,
105
93
  provenLogsSynchedTo: undefined,
106
94
  } satisfies ArchiverL1SynchPoint);
107
95
  });
@@ -111,17 +99,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
111
99
  await expect(store.getSynchPoint()).resolves.toEqual({
112
100
  blocksSynchedTo: 19n,
113
101
  messagesSynchedTo: undefined,
114
- blockBodiesSynchedTo: undefined,
115
- provenLogsSynchedTo: undefined,
116
- } satisfies ArchiverL1SynchPoint);
117
- });
118
-
119
- it('returns the L1 block number in which the most recent L2 block body was published', async () => {
120
- await store.addBlockBodies(blockBodies);
121
- await expect(store.getSynchPoint()).resolves.toEqual({
122
- blocksSynchedTo: undefined,
123
- messagesSynchedTo: undefined,
124
- blockBodiesSynchedTo: blockBodies.lastProcessedL1BlockNumber,
125
102
  provenLogsSynchedTo: undefined,
126
103
  } satisfies ArchiverL1SynchPoint);
127
104
  });
@@ -134,7 +111,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
134
111
  await expect(store.getSynchPoint()).resolves.toEqual({
135
112
  blocksSynchedTo: undefined,
136
113
  messagesSynchedTo: 1n,
137
- blockBodiesSynchedTo: undefined,
138
114
  provenLogsSynchedTo: undefined,
139
115
  } satisfies ArchiverL1SynchPoint);
140
116
  });
@@ -144,7 +120,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
144
120
  await expect(store.getSynchPoint()).resolves.toEqual({
145
121
  blocksSynchedTo: undefined,
146
122
  messagesSynchedTo: undefined,
147
- blockBodiesSynchedTo: undefined,
148
123
  provenLogsSynchedTo: 3n,
149
124
  } satisfies ArchiverL1SynchPoint);
150
125
  });
@@ -212,7 +187,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
212
187
  ),
213
188
  );
214
189
  await store.addBlocks(blocks);
215
- await store.addBlockBodies(blockBodies);
216
190
  });
217
191
 
218
192
  it.each([
@@ -364,7 +338,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
364
338
  }));
365
339
 
366
340
  await store.addBlocks(blocks);
367
- await store.addBlockBodies(blockBodies);
368
341
 
369
342
  await Promise.all(
370
343
  blocks.map(block =>
@@ -22,11 +22,6 @@ export type ArchiverConfig = {
22
22
  */
23
23
  archiverPollingIntervalMS?: number;
24
24
 
25
- /**
26
- * The L1 block to start reading from
27
- */
28
- archiverL1StartBlock: number;
29
-
30
25
  /**
31
26
  * The polling interval viem uses in ms
32
27
  */
@@ -57,11 +52,6 @@ export const archiverConfigMappings: ConfigMappingsType<ArchiverConfig> = {
57
52
  description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.',
58
53
  ...numberConfigHelper(1000),
59
54
  },
60
- archiverL1StartBlock: {
61
- env: 'ARCHIVER_L1_START_BLOCK',
62
- description: 'The L1 block the archiver should start reading logs from',
63
- ...numberConfigHelper(0),
64
- },
65
55
  viemPollingIntervalMS: {
66
56
  env: 'ARCHIVER_VIEM_POLLING_INTERVAL_MS',
67
57
  description: 'The polling interval viem uses in ms',