@aztec/archiver 0.65.2 → 0.66.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dest/archiver/archiver.d.ts +18 -22
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +143 -99
  4. package/dest/archiver/archiver_store.d.ts +7 -8
  5. package/dest/archiver/archiver_store.d.ts.map +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +126 -150
  8. package/dest/archiver/config.d.ts +6 -12
  9. package/dest/archiver/config.d.ts.map +1 -1
  10. package/dest/archiver/config.js +6 -1
  11. package/dest/archiver/data_retrieval.d.ts +2 -3
  12. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  13. package/dest/archiver/data_retrieval.js +14 -15
  14. package/dest/archiver/instrumentation.d.ts +2 -7
  15. package/dest/archiver/instrumentation.d.ts.map +1 -1
  16. package/dest/archiver/instrumentation.js +3 -6
  17. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +7 -8
  18. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +7 -8
  20. package/dest/archiver/kv_archiver_store/log_store.d.ts +7 -8
  21. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  22. package/dest/archiver/kv_archiver_store/log_store.js +55 -95
  23. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +8 -10
  24. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  25. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +50 -57
  26. package/dest/index.d.ts +2 -2
  27. package/dest/index.d.ts.map +1 -1
  28. package/dest/index.js +3 -42
  29. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  30. package/dest/test/mock_l2_block_source.js +2 -2
  31. package/package.json +11 -13
  32. package/src/archiver/archiver.ts +199 -191
  33. package/src/archiver/archiver_store.ts +6 -13
  34. package/src/archiver/archiver_store_test_suite.ts +160 -186
  35. package/src/archiver/config.ts +12 -12
  36. package/src/archiver/data_retrieval.ts +12 -17
  37. package/src/archiver/instrumentation.ts +3 -5
  38. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +7 -14
  39. package/src/archiver/kv_archiver_store/log_store.ts +68 -118
  40. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +51 -65
  41. package/src/index.ts +5 -59
  42. package/src/test/mock_l2_block_source.ts +1 -2
  43. package/dest/archiver/epoch_helpers.d.ts +0 -20
  44. package/dest/archiver/epoch_helpers.d.ts.map +0 -1
  45. package/dest/archiver/epoch_helpers.js +0 -34
  46. package/src/archiver/epoch_helpers.ts +0 -54
@@ -1,37 +1,35 @@
1
1
  import {
2
- type EncryptedL2Log,
3
- type FromLogType,
4
2
  type GetUnencryptedLogsResponse,
5
3
  type InBlock,
6
4
  type InboxLeaf,
5
+ type L1RollupConstants,
7
6
  type L1ToL2MessageSource,
8
7
  type L2Block,
9
8
  type L2BlockId,
10
- type L2BlockL2Logs,
11
9
  type L2BlockSource,
12
10
  type L2LogsSource,
13
11
  type L2Tips,
14
12
  type LogFilter,
15
- type LogType,
16
13
  type NullifierWithBlockSource,
17
14
  type TxEffect,
18
15
  type TxHash,
19
16
  type TxReceipt,
20
17
  type TxScopedL2Log,
21
18
  type UnencryptedL2Log,
19
+ getEpochNumberAtTimestamp,
20
+ getSlotAtTimestamp,
21
+ getSlotRangeForEpoch,
22
+ getTimestampRangeForEpoch,
22
23
  } from '@aztec/circuit-types';
23
24
  import {
24
25
  type ContractClassPublic,
25
- ContractClassRegisteredEvent,
26
26
  type ContractDataSource,
27
- ContractInstanceDeployedEvent,
28
27
  type ContractInstanceWithAddress,
29
28
  type ExecutablePrivateFunctionWithMembershipProof,
30
29
  type FunctionSelector,
31
30
  type Header,
32
- PrivateFunctionBroadcastedEvent,
31
+ type PrivateLog,
33
32
  type PublicFunction,
34
- UnconstrainedFunctionBroadcastedEvent,
35
33
  type UnconstrainedFunctionWithMembershipProof,
36
34
  computePublicBytecodeCommitment,
37
35
  isValidPrivateFunctionMembershipProof,
@@ -45,9 +43,14 @@ import { Fr } from '@aztec/foundation/fields';
45
43
  import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log';
46
44
  import { RunningPromise } from '@aztec/foundation/running-promise';
47
45
  import { count } from '@aztec/foundation/string';
48
- import { Timer } from '@aztec/foundation/timer';
46
+ import { elapsed } from '@aztec/foundation/timer';
49
47
  import { InboxAbi, RollupAbi } from '@aztec/l1-artifacts';
50
- import { ProtocolContractAddress } from '@aztec/protocol-contracts';
48
+ import {
49
+ ContractClassRegisteredEvent,
50
+ ContractInstanceDeployedEvent,
51
+ PrivateFunctionBroadcastedEvent,
52
+ UnconstrainedFunctionBroadcastedEvent,
53
+ } from '@aztec/protocol-contracts';
51
54
  import { type TelemetryClient } from '@aztec/telemetry-client';
52
55
 
53
56
  import groupBy from 'lodash.groupby';
@@ -63,13 +66,7 @@ import {
63
66
 
64
67
  import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js';
65
68
  import { type ArchiverConfig } from './config.js';
66
- import { retrieveBlockFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js';
67
- import {
68
- getEpochNumberAtTimestamp,
69
- getSlotAtTimestamp,
70
- getSlotRangeForEpoch,
71
- getTimestampRangeForEpoch,
72
- } from './epoch_helpers.js';
69
+ import { retrieveBlocksFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js';
73
70
  import { ArchiverInstrumentation } from './instrumentation.js';
74
71
  import { type DataRetrieval } from './structs/data_retrieval.js';
75
72
  import { type L1Published } from './structs/published.js';
@@ -114,25 +111,23 @@ export class Archiver implements ArchiveSource {
114
111
  */
115
112
  constructor(
116
113
  private readonly publicClient: PublicClient<HttpTransport, Chain>,
117
- private readonly rollupAddress: EthAddress,
118
- readonly inboxAddress: EthAddress,
119
- private readonly registryAddress: EthAddress,
114
+ private readonly l1Addresses: { rollupAddress: EthAddress; inboxAddress: EthAddress; registryAddress: EthAddress },
120
115
  readonly dataStore: ArchiverDataStore,
121
- private readonly pollingIntervalMs: number,
116
+ private readonly config: { pollingIntervalMs: number; batchSize: number },
122
117
  private readonly instrumentation: ArchiverInstrumentation,
123
- private readonly l1constants: L1RollupConstants = EmptyL1RollupConstants,
118
+ private readonly l1constants: L1RollupConstants,
124
119
  private readonly log: DebugLogger = createDebugLogger('aztec:archiver'),
125
120
  ) {
126
121
  this.store = new ArchiverStoreHelper(dataStore);
127
122
 
128
123
  this.rollup = getContract({
129
- address: rollupAddress.toString(),
124
+ address: l1Addresses.rollupAddress.toString(),
130
125
  abi: RollupAbi,
131
126
  client: publicClient,
132
127
  });
133
128
 
134
129
  this.inbox = getContract({
135
- address: inboxAddress.toString(),
130
+ address: l1Addresses.inboxAddress.toString(),
136
131
  abi: InboxAbi,
137
132
  client: publicClient,
138
133
  });
@@ -173,12 +168,13 @@ export class Archiver implements ArchiveSource {
173
168
 
174
169
  const archiver = new Archiver(
175
170
  publicClient,
176
- config.l1Contracts.rollupAddress,
177
- config.l1Contracts.inboxAddress,
178
- config.l1Contracts.registryAddress,
171
+ config.l1Contracts,
179
172
  archiverStore,
180
- config.archiverPollingIntervalMS ?? 10_000,
181
- new ArchiverInstrumentation(telemetry),
173
+ {
174
+ pollingIntervalMs: config.archiverPollingIntervalMS ?? 10_000,
175
+ batchSize: config.archiverBatchSize ?? 100,
176
+ },
177
+ new ArchiverInstrumentation(telemetry, () => archiverStore.estimateSize()),
182
178
  { l1StartBlock, l1GenesisTime, epochDuration, slotDuration, ethereumSlotDuration },
183
179
  );
184
180
  await archiver.start(blockUntilSynced);
@@ -195,11 +191,10 @@ export class Archiver implements ArchiveSource {
195
191
  }
196
192
 
197
193
  if (blockUntilSynced) {
198
- this.log.info(`Performing initial chain sync to rollup contract ${this.rollupAddress.toString()}`);
199
194
  await this.sync(blockUntilSynced);
200
195
  }
201
196
 
202
- this.runningPromise = new RunningPromise(() => this.safeSync(), this.pollingIntervalMs);
197
+ this.runningPromise = new RunningPromise(() => this.safeSync(), this.config.pollingIntervalMs);
203
198
  this.runningPromise.start();
204
199
  }
205
200
 
@@ -216,9 +211,8 @@ export class Archiver implements ArchiveSource {
216
211
 
217
212
  /**
218
213
  * Fetches logs from L1 contracts and processes them.
219
- * @param blockUntilSynced - If true, blocks until the archiver has fully synced.
220
214
  */
221
- private async sync(blockUntilSynced: boolean) {
215
+ private async sync(initialRun: boolean) {
222
216
  /**
223
217
  * We keep track of three "pointers" to L1 blocks:
224
218
  * 1. the last L1 block that published an L2 block
@@ -235,6 +229,15 @@ export class Archiver implements ArchiveSource {
235
229
  const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
236
230
  const currentL1BlockNumber = await this.publicClient.getBlockNumber();
237
231
 
232
+ if (initialRun) {
233
+ this.log.info(
234
+ `Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${Math.min(
235
+ Number(blocksSynchedTo),
236
+ Number(messagesSynchedTo),
237
+ )} to current L1 block ${currentL1BlockNumber}`,
238
+ );
239
+ }
240
+
238
241
  // ********** Ensuring Consistency of data pulled from L1 **********
239
242
 
240
243
  /**
@@ -255,7 +258,7 @@ export class Archiver implements ArchiveSource {
255
258
  */
256
259
 
257
260
  // ********** Events that are processed per L1 block **********
258
- await this.handleL1ToL2Messages(blockUntilSynced, messagesSynchedTo, currentL1BlockNumber);
261
+ await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber);
259
262
 
260
263
  // Store latest l1 block number and timestamp seen. Used for epoch and slots calculations.
261
264
  if (!this.l1BlockNumber || this.l1BlockNumber < currentL1BlockNumber) {
@@ -266,16 +269,17 @@ export class Archiver implements ArchiveSource {
266
269
  // ********** Events that are processed per L2 block **********
267
270
  if (currentL1BlockNumber > blocksSynchedTo) {
268
271
  // First we retrieve new L2 blocks
269
- const { provenBlockNumber } = await this.handleL2blocks(blockUntilSynced, blocksSynchedTo, currentL1BlockNumber);
272
+ const { provenBlockNumber } = await this.handleL2blocks(blocksSynchedTo, currentL1BlockNumber);
270
273
  // And then we prune the current epoch if it'd reorg on next submission.
271
274
  // Note that we don't do this before retrieving L2 blocks because we may need to retrieve
272
275
  // blocks from more than 2 epochs ago, so we want to make sure we have the latest view of
273
276
  // the chain locally before we start unwinding stuff. This can be optimized by figuring out
274
277
  // up to which point we're pruning, and then requesting L2 blocks up to that point only.
275
278
  await this.handleEpochPrune(provenBlockNumber, currentL1BlockNumber);
279
+ }
276
280
 
277
- const storeSizes = this.store.estimateSize();
278
- this.instrumentation.recordDBMetrics(storeSizes);
281
+ if (initialRun) {
282
+ this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete.`);
279
283
  }
280
284
  }
281
285
 
@@ -291,24 +295,31 @@ export class Archiver implements ArchiveSource {
291
295
 
292
296
  if (canPrune) {
293
297
  const blocksToUnwind = localPendingBlockNumber - provenBlockNumber;
294
- this.log.verbose(
295
- `L2 prune will occur on next submission. ` +
296
- `Unwinding ${count(blocksToUnwind, 'block')} from block ${localPendingBlockNumber} ` +
297
- `to the last proven block ${provenBlockNumber}.`,
298
- );
298
+ this.log.debug(`L2 prune will occur on next block submission.`);
299
299
  await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
300
- this.log.verbose(`Unwound ${count(blocksToUnwind, 'block')}. New L2 block is ${await this.getBlockNumber()}.`);
300
+ this.log.warn(
301
+ `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` +
302
+ `to ${provenBlockNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
303
+ `Updated L2 latest block is ${await this.getBlockNumber()}.`,
304
+ );
301
305
  // TODO(palla/reorg): Do we need to set the block synched L1 block number here?
302
306
  // Seems like the next iteration should handle this.
303
307
  // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
304
308
  }
305
309
  }
306
310
 
307
- private async handleL1ToL2Messages(
308
- blockUntilSynced: boolean,
309
- messagesSynchedTo: bigint,
310
- currentL1BlockNumber: bigint,
311
- ) {
311
+ private nextRange(end: bigint, limit: bigint): [bigint, bigint] {
312
+ const batchSize = (this.config.batchSize * this.l1constants.slotDuration) / this.l1constants.ethereumSlotDuration;
313
+ const nextStart = end + 1n;
314
+ const nextEnd = nextStart + BigInt(batchSize);
315
+ if (nextEnd > limit) {
316
+ return [nextStart, limit];
317
+ }
318
+ return [nextStart, nextEnd];
319
+ }
320
+
321
+ private async handleL1ToL2Messages(messagesSynchedTo: bigint, currentL1BlockNumber: bigint) {
322
+ this.log.trace(`Handling L1 to L2 messages from ${messagesSynchedTo} to ${currentL1BlockNumber}.`);
312
323
  if (currentL1BlockNumber <= messagesSynchedTo) {
313
324
  return;
314
325
  }
@@ -318,30 +329,30 @@ export class Archiver implements ArchiveSource {
318
329
 
319
330
  if (localTotalMessageCount === destinationTotalMessageCount) {
320
331
  await this.store.setMessageSynchedL1BlockNumber(currentL1BlockNumber);
321
- this.log.verbose(
322
- `Retrieved no new L1 -> L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`,
332
+ this.log.trace(
333
+ `Retrieved no new L1 to L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`,
323
334
  );
324
335
  return;
325
336
  }
326
337
 
327
- const retrievedL1ToL2Messages = await retrieveL1ToL2Messages(
328
- this.inbox,
329
- blockUntilSynced,
330
- messagesSynchedTo + 1n,
331
- currentL1BlockNumber,
332
- );
333
-
334
- await this.store.addL1ToL2Messages(retrievedL1ToL2Messages);
335
-
336
- this.log.verbose(
337
- `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${
338
- messagesSynchedTo + 1n
339
- } and ${currentL1BlockNumber}.`,
340
- );
338
+ // Retrieve messages in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
339
+ let searchStartBlock: bigint = messagesSynchedTo;
340
+ let searchEndBlock: bigint = messagesSynchedTo;
341
+ do {
342
+ [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
343
+ this.log.trace(`Retrieving L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`);
344
+ const retrievedL1ToL2Messages = await retrieveL1ToL2Messages(this.inbox, searchStartBlock, searchEndBlock);
345
+ this.log.verbose(
346
+ `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`,
347
+ );
348
+ await this.store.addL1ToL2Messages(retrievedL1ToL2Messages);
349
+ for (const msg of retrievedL1ToL2Messages.retrievedData) {
350
+ this.log.debug(`Downloaded L1 to L2 message`, { leaf: msg.leaf.toString(), index: msg.index });
351
+ }
352
+ } while (searchEndBlock < currentL1BlockNumber);
341
353
  }
342
354
 
343
355
  private async handleL2blocks(
344
- blockUntilSynced: boolean,
345
356
  blocksSynchedTo: bigint,
346
357
  currentL1BlockNumber: bigint,
347
358
  ): Promise<{ provenBlockNumber: bigint }> {
@@ -361,10 +372,13 @@ export class Archiver implements ArchiveSource {
361
372
  localBlockForDestinationProvenBlockNumber &&
362
373
  provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString()
363
374
  ) {
364
- this.log.verbose(`Updating the proven block number to ${provenBlockNumber} and epoch to ${provenEpochNumber}`);
365
375
  await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
366
376
  // if we are here then we must have a valid proven epoch number
367
377
  await this.store.setProvenL2EpochNumber(Number(provenEpochNumber));
378
+ this.log.info(`Updated proven chain to block ${provenBlockNumber} (epoch ${provenEpochNumber})`, {
379
+ provenBlockNumber,
380
+ provenEpochNumber,
381
+ });
368
382
  }
369
383
  this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
370
384
  };
@@ -374,7 +388,7 @@ export class Archiver implements ArchiveSource {
374
388
  const noBlocks = localPendingBlockNumber === 0n && pendingBlockNumber === 0n;
375
389
  if (noBlocks) {
376
390
  await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
377
- this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
391
+ this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
378
392
  return { provenBlockNumber };
379
393
  }
380
394
 
@@ -391,7 +405,7 @@ export class Archiver implements ArchiveSource {
391
405
  const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingBlock.archive.root.toString();
392
406
  if (noBlockSinceLast) {
393
407
  await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
394
- this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
408
+ this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
395
409
  return { provenBlockNumber };
396
410
  }
397
411
 
@@ -401,7 +415,7 @@ export class Archiver implements ArchiveSource {
401
415
  // or the L1 have reorged.
402
416
  // In any case, we have to figure out how far into the past the action will take us.
403
417
  // For simplicity here, we will simply rewind until we end in a block that is also on the chain on L1.
404
- this.log.verbose(`L2 prune have occurred, unwind state`);
418
+ this.log.debug(`L2 prune has been detected.`);
405
419
 
406
420
  let tipAfterUnwind = localPendingBlockNumber;
407
421
  while (true) {
@@ -419,55 +433,70 @@ export class Archiver implements ArchiveSource {
419
433
  }
420
434
 
421
435
  const blocksToUnwind = localPendingBlockNumber - tipAfterUnwind;
422
- this.log.verbose(
423
- `Unwinding ${blocksToUnwind} block${blocksToUnwind > 1n ? 's' : ''} from block ${localPendingBlockNumber}`,
424
- );
425
-
426
436
  await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
437
+
438
+ this.log.warn(
439
+ `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` +
440
+ `due to mismatched block hashes at L1 block ${currentL1BlockNumber}. ` +
441
+ `Updated L2 latest block is ${await this.getBlockNumber()}.`,
442
+ );
427
443
  }
428
444
  }
429
445
 
430
- this.log.debug(`Retrieving blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
431
- const retrievedBlocks = await retrieveBlockFromRollup(
432
- this.rollup,
433
- this.publicClient,
434
- blockUntilSynced,
435
- blocksSynchedTo + 1n, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier
436
- currentL1BlockNumber,
437
- this.log,
438
- );
446
+ // Retrieve L2 blocks in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
447
+ // computed using the L2 block time vs the L1 block time.
448
+ let searchStartBlock: bigint = blocksSynchedTo;
449
+ let searchEndBlock: bigint = blocksSynchedTo;
450
+
451
+ do {
452
+ [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
453
+
454
+ this.log.trace(`Retrieving L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
455
+ const retrievedBlocks = await retrieveBlocksFromRollup(
456
+ this.rollup,
457
+ this.publicClient,
458
+ searchStartBlock, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier
459
+ searchEndBlock,
460
+ this.log,
461
+ );
439
462
 
440
- if (retrievedBlocks.length === 0) {
441
- // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
442
- // See further details in earlier comments.
443
- this.log.verbose(`Retrieved no new L2 blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
444
- return { provenBlockNumber };
445
- }
463
+ if (retrievedBlocks.length === 0) {
464
+ // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
465
+ // See further details in earlier comments.
466
+ this.log.trace(`Retrieved no new L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
467
+ continue;
468
+ }
446
469
 
447
- this.log.debug(
448
- `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${
449
- blocksSynchedTo + 1n
450
- } and ${currentL1BlockNumber}.`,
451
- );
470
+ const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber;
471
+ this.log.debug(
472
+ `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${searchStartBlock} and ${searchEndBlock} with last processed L1 block ${lastProcessedL1BlockNumber}.`,
473
+ );
452
474
 
453
- const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber;
475
+ for (const block of retrievedBlocks) {
476
+ this.log.debug(`Ingesting new L2 block ${block.data.number} with ${block.data.body.txEffects.length} txs`, {
477
+ blockHash: block.data.hash(),
478
+ l1BlockNumber: block.l1.blockNumber,
479
+ ...block.data.header.globalVariables.toInspect(),
480
+ ...block.data.getStats(),
481
+ });
482
+ }
454
483
 
455
- this.log.debug(`last processed L1 block: [${lastProcessedL1BlockNumber}]`);
456
- for (const block of retrievedBlocks) {
457
- this.log.debug(`ingesting new L2 block`, block.data.header.globalVariables.toFriendlyJSON());
458
- }
484
+ const [processDuration] = await elapsed(() => this.store.addBlocks(retrievedBlocks));
485
+ this.instrumentation.processNewBlocks(
486
+ processDuration / retrievedBlocks.length,
487
+ retrievedBlocks.map(b => b.data),
488
+ );
459
489
 
460
- const timer = new Timer();
461
- await this.store.addBlocks(retrievedBlocks);
490
+ for (const block of retrievedBlocks) {
491
+ this.log.info(`Downloaded L2 block ${block.data.number}`, {
492
+ blockHash: block.data.hash(),
493
+ blockNumber: block.data.number,
494
+ });
495
+ }
496
+ } while (searchEndBlock < currentL1BlockNumber);
462
497
 
463
498
  // Important that we update AFTER inserting the blocks.
464
499
  await updateProvenBlock();
465
- this.instrumentation.processNewBlocks(
466
- timer.ms() / retrievedBlocks.length,
467
- retrievedBlocks.map(b => b.data),
468
- );
469
- const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number;
470
- this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`);
471
500
 
472
501
  return { provenBlockNumber };
473
502
  }
@@ -485,11 +514,11 @@ export class Archiver implements ArchiveSource {
485
514
  }
486
515
 
487
516
  public getRollupAddress(): Promise<EthAddress> {
488
- return Promise.resolve(this.rollupAddress);
517
+ return Promise.resolve(this.l1Addresses.rollupAddress);
489
518
  }
490
519
 
491
520
  public getRegistryAddress(): Promise<EthAddress> {
492
- return Promise.resolve(this.registryAddress);
521
+ return Promise.resolve(this.l1Addresses.registryAddress);
493
522
  }
494
523
 
495
524
  public getL1BlockNumber(): bigint {
@@ -629,18 +658,13 @@ export class Archiver implements ArchiveSource {
629
658
  }
630
659
 
631
660
  /**
632
- * Gets up to `limit` amount of logs starting from `from`.
633
- * @param from - Number of the L2 block to which corresponds the first logs to be returned.
634
- * @param limit - The number of logs to return.
635
- * @param logType - Specifies whether to return encrypted or unencrypted logs.
636
- * @returns The requested logs.
661
+ * Retrieves all private logs from up to `limit` blocks, starting from the block number `from`.
662
+ * @param from - The block number from which to begin retrieving logs.
663
+ * @param limit - The maximum number of blocks to retrieve logs from.
664
+ * @returns An array of private logs from the specified range of blocks.
637
665
  */
638
- public getLogs<TLogType extends LogType>(
639
- from: number,
640
- limit: number,
641
- logType: TLogType,
642
- ): Promise<L2BlockL2Logs<FromLogType<TLogType>>[]> {
643
- return this.store.getLogs(from, limit, logType);
666
+ public getPrivateLogs(from: number, limit: number): Promise<PrivateLog[]> {
667
+ return this.store.getPrivateLogs(from, limit);
644
668
  }
645
669
 
646
670
  /**
@@ -830,12 +854,12 @@ class ArchiverStoreHelper
830
854
  * @param allLogs - All logs emitted in a bunch of blocks.
831
855
  */
832
856
  async #updateRegisteredContractClasses(allLogs: UnencryptedL2Log[], blockNum: number, operation: Operation) {
833
- const contractClasses = ContractClassRegisteredEvent.fromLogs(
834
- allLogs,
835
- ProtocolContractAddress.ContractClassRegisterer,
836
- ).map(e => e.toContractClassPublic());
857
+ const contractClasses = allLogs
858
+ .filter(log => ContractClassRegisteredEvent.isContractClassRegisteredEvent(log.data))
859
+ .map(log => ContractClassRegisteredEvent.fromLog(log.data))
860
+ .map(e => e.toContractClassPublic());
837
861
  if (contractClasses.length > 0) {
838
- contractClasses.forEach(c => this.#log.verbose(`Registering contract class ${c.id.toString()}`));
862
+ contractClasses.forEach(c => this.#log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`));
839
863
  if (operation == Operation.Store) {
840
864
  // TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive
841
865
  return await this.store.addContractClasses(
@@ -854,8 +878,11 @@ class ArchiverStoreHelper
854
878
  * Extracts and stores contract instances out of ContractInstanceDeployed events emitted by the canonical deployer contract.
855
879
  * @param allLogs - All logs emitted in a bunch of blocks.
856
880
  */
857
- async #updateDeployedContractInstances(allLogs: EncryptedL2Log[], blockNum: number, operation: Operation) {
858
- const contractInstances = ContractInstanceDeployedEvent.fromLogs(allLogs).map(e => e.toContractInstance());
881
+ async #updateDeployedContractInstances(allLogs: PrivateLog[], blockNum: number, operation: Operation) {
882
+ const contractInstances = allLogs
883
+ .filter(log => ContractInstanceDeployedEvent.isContractInstanceDeployedEvent(log))
884
+ .map(log => ContractInstanceDeployedEvent.fromLog(log))
885
+ .map(e => e.toContractInstance());
859
886
  if (contractInstances.length > 0) {
860
887
  contractInstances.forEach(c =>
861
888
  this.#log.verbose(`${Operation[operation]} contract instance at ${c.address.toString()}`),
@@ -881,14 +908,12 @@ class ArchiverStoreHelper
881
908
  */
882
909
  async #storeBroadcastedIndividualFunctions(allLogs: UnencryptedL2Log[], _blockNum: number) {
883
910
  // Filter out private and unconstrained function broadcast events
884
- const privateFnEvents = PrivateFunctionBroadcastedEvent.fromLogs(
885
- allLogs,
886
- ProtocolContractAddress.ContractClassRegisterer,
887
- );
888
- const unconstrainedFnEvents = UnconstrainedFunctionBroadcastedEvent.fromLogs(
889
- allLogs,
890
- ProtocolContractAddress.ContractClassRegisterer,
891
- );
911
+ const privateFnEvents = allLogs
912
+ .filter(log => PrivateFunctionBroadcastedEvent.isPrivateFunctionBroadcastedEvent(log.data))
913
+ .map(log => PrivateFunctionBroadcastedEvent.fromLog(log.data));
914
+ const unconstrainedFnEvents = allLogs
915
+ .filter(log => UnconstrainedFunctionBroadcastedEvent.isUnconstrainedFunctionBroadcastedEvent(log.data))
916
+ .map(log => UnconstrainedFunctionBroadcastedEvent.fromLog(log.data));
892
917
 
893
918
  // Group all events by contract class id
894
919
  for (const [classIdString, classEvents] of Object.entries(
@@ -928,30 +953,28 @@ class ArchiverStoreHelper
928
953
  }
929
954
 
930
955
  async addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean> {
931
- return [
956
+ const opResults = await Promise.all([
932
957
  this.store.addLogs(blocks.map(block => block.data)),
933
958
  // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
934
- ...(await Promise.all(
935
- blocks.map(async block => {
936
- const contractClassLogs = block.data.body.txEffects
937
- .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : []))
938
- .flatMap(txLog => txLog.unrollLogs());
939
- // ContractInstanceDeployed event logs are now broadcast in .encryptedLogs
940
- const allEncryptedLogs = block.data.body.txEffects
941
- .flatMap(txEffect => (txEffect ? [txEffect.encryptedLogs] : []))
942
- .flatMap(txLog => txLog.unrollLogs());
943
- return (
944
- await Promise.all([
945
- this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Store),
946
- this.#updateDeployedContractInstances(allEncryptedLogs, block.data.number, Operation.Store),
947
- this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.data.number),
948
- ])
949
- ).every(Boolean);
950
- }),
951
- )),
959
+ ...blocks.map(async block => {
960
+ const contractClassLogs = block.data.body.txEffects
961
+ .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : []))
962
+ .flatMap(txLog => txLog.unrollLogs());
963
+ // ContractInstanceDeployed event logs are broadcast in privateLogs.
964
+ const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
965
+ return (
966
+ await Promise.all([
967
+ this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Store),
968
+ this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Store),
969
+ this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.data.number),
970
+ ])
971
+ ).every(Boolean);
972
+ }),
952
973
  this.store.addNullifiers(blocks.map(block => block.data)),
953
974
  this.store.addBlocks(blocks),
954
- ].every(Boolean);
975
+ ]);
976
+
977
+ return opResults.every(Boolean);
955
978
  }
956
979
 
957
980
  async unwindBlocks(from: number, blocksToUnwind: number): Promise<boolean> {
@@ -963,24 +986,29 @@ class ArchiverStoreHelper
963
986
  // from - blocksToUnwind = the new head, so + 1 for what we need to remove
964
987
  const blocks = await this.getBlocks(from - blocksToUnwind + 1, blocksToUnwind);
965
988
 
966
- return [
989
+ const opResults = await Promise.all([
967
990
  // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
968
- ...(await Promise.all(
969
- blocks.map(async block => {
970
- const contractClassLogs = block.data.body.txEffects
971
- .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : []))
972
- .flatMap(txLog => txLog.unrollLogs());
973
- // ContractInstanceDeployed event logs are now broadcast in .encryptedLogs
974
- const allEncryptedLogs = block.data.body.txEffects
975
- .flatMap(txEffect => (txEffect ? [txEffect.encryptedLogs] : []))
976
- .flatMap(txLog => txLog.unrollLogs());
977
- await this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Delete);
978
- await this.#updateDeployedContractInstances(allEncryptedLogs, block.data.number, Operation.Delete);
979
- }),
980
- )),
991
+ ...blocks.map(async block => {
992
+ const contractClassLogs = block.data.body.txEffects
993
+ .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : []))
994
+ .flatMap(txLog => txLog.unrollLogs());
995
+
996
+ // ContractInstanceDeployed event logs are broadcast in privateLogs.
997
+ const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
998
+
999
+ return (
1000
+ await Promise.all([
1001
+ this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Delete),
1002
+ this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Delete),
1003
+ ])
1004
+ ).every(Boolean);
1005
+ }),
1006
+
981
1007
  this.store.deleteLogs(blocks.map(b => b.data)),
982
1008
  this.store.unwindBlocks(from, blocksToUnwind),
983
- ].every(Boolean);
1009
+ ]);
1010
+
1011
+ return opResults.every(Boolean);
984
1012
  }
985
1013
 
986
1014
  getBlocks(from: number, limit: number): Promise<L1Published<L2Block>[]> {
@@ -1004,12 +1032,8 @@ class ArchiverStoreHelper
1004
1032
  getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined> {
1005
1033
  return this.store.getL1ToL2MessageIndex(l1ToL2Message);
1006
1034
  }
1007
- getLogs<TLogType extends LogType>(
1008
- from: number,
1009
- limit: number,
1010
- logType: TLogType,
1011
- ): Promise<L2BlockL2Logs<FromLogType<TLogType>>[]> {
1012
- return this.store.getLogs(from, limit, logType);
1035
+ getPrivateLogs(from: number, limit: number): Promise<PrivateLog[]> {
1036
+ return this.store.getPrivateLogs(from, limit);
1013
1037
  }
1014
1038
  getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
1015
1039
  return this.store.getLogsByTags(tags);
@@ -1075,19 +1099,3 @@ class ArchiverStoreHelper
1075
1099
  return this.store.estimateSize();
1076
1100
  }
1077
1101
  }
1078
-
1079
- type L1RollupConstants = {
1080
- l1StartBlock: bigint;
1081
- l1GenesisTime: bigint;
1082
- slotDuration: number;
1083
- epochDuration: number;
1084
- ethereumSlotDuration: number;
1085
- };
1086
-
1087
- const EmptyL1RollupConstants: L1RollupConstants = {
1088
- l1StartBlock: 0n,
1089
- l1GenesisTime: 0n,
1090
- epochDuration: 0,
1091
- slotDuration: 0,
1092
- ethereumSlotDuration: 0,
1093
- };