@aztec/archiver 3.0.0-nightly.20251126 → 3.0.0-nightly.20251128

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dest/archiver/archiver.d.ts +24 -17
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +218 -160
  4. package/dest/archiver/archiver_store.d.ts +1 -1
  5. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +5 -4
  8. package/dest/archiver/config.d.ts +1 -1
  9. package/dest/archiver/data_retrieval.d.ts +15 -13
  10. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  11. package/dest/archiver/data_retrieval.js +56 -55
  12. package/dest/archiver/errors.d.ts +1 -1
  13. package/dest/archiver/errors.d.ts.map +1 -1
  14. package/dest/archiver/index.d.ts +1 -1
  15. package/dest/archiver/instrumentation.d.ts +3 -3
  16. package/dest/archiver/instrumentation.d.ts.map +1 -1
  17. package/dest/archiver/kv_archiver_store/block_store.d.ts +1 -1
  18. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +1 -1
  20. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  21. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +1 -1
  22. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  23. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +2 -2
  24. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  25. package/dest/archiver/kv_archiver_store/log_store.d.ts +1 -1
  26. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  27. package/dest/archiver/kv_archiver_store/message_store.d.ts +1 -1
  28. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  29. package/dest/archiver/structs/data_retrieval.d.ts +1 -1
  30. package/dest/archiver/structs/inbox_message.d.ts +1 -1
  31. package/dest/archiver/structs/published.d.ts +3 -2
  32. package/dest/archiver/structs/published.d.ts.map +1 -1
  33. package/dest/archiver/validation.d.ts +10 -4
  34. package/dest/archiver/validation.d.ts.map +1 -1
  35. package/dest/archiver/validation.js +25 -17
  36. package/dest/factory.d.ts +1 -1
  37. package/dest/index.d.ts +2 -2
  38. package/dest/index.d.ts.map +1 -1
  39. package/dest/index.js +1 -1
  40. package/dest/rpc/index.d.ts +2 -2
  41. package/dest/test/index.d.ts +1 -1
  42. package/dest/test/mock_archiver.d.ts +1 -1
  43. package/dest/test/mock_archiver.d.ts.map +1 -1
  44. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  45. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  46. package/dest/test/mock_l2_block_source.d.ts +6 -5
  47. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  48. package/dest/test/mock_structs.d.ts +1 -1
  49. package/package.json +17 -17
  50. package/src/archiver/archiver.ts +290 -192
  51. package/src/archiver/archiver_store_test_suite.ts +5 -4
  52. package/src/archiver/data_retrieval.ts +71 -68
  53. package/src/archiver/instrumentation.ts +2 -2
  54. package/src/archiver/structs/published.ts +2 -1
  55. package/src/archiver/validation.ts +40 -19
  56. package/src/index.ts +1 -1
  57. package/src/test/mock_l2_block_source.ts +5 -4
@@ -17,17 +17,17 @@ import { count } from '@aztec/foundation/string';
17
17
  import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
18
18
  import { ContractClassPublishedEvent, PrivateFunctionBroadcastedEvent, UtilityFunctionBroadcastedEvent } from '@aztec/protocol-contracts/class-registry';
19
19
  import { ContractInstancePublishedEvent, ContractInstanceUpdatedEvent } from '@aztec/protocol-contracts/instance-registry';
20
- import { L2BlockSourceEvents } from '@aztec/stdlib/block';
20
+ import { L2Block, L2BlockSourceEvents, PublishedL2Block } from '@aztec/stdlib/block';
21
21
  import { computePublicBytecodeCommitment, isValidPrivateFunctionMembershipProof, isValidUtilityFunctionMembershipProof } from '@aztec/stdlib/contract';
22
22
  import { getEpochAtSlot, getEpochNumberAtTimestamp, getSlotAtTimestamp, getSlotRangeForEpoch, getTimestampRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
23
23
  import { getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
24
24
  import { EventEmitter } from 'events';
25
25
  import groupBy from 'lodash.groupby';
26
26
  import { createPublicClient, fallback, http } from 'viem';
27
- import { retrieveBlocksFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedBlockToPublishedL2Block } from './data_retrieval.js';
27
+ import { retrieveCheckpointsFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedToPublishedCheckpoint } from './data_retrieval.js';
28
28
  import { InitialBlockNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
29
29
  import { ArchiverInstrumentation } from './instrumentation.js';
30
- import { validateBlockAttestations } from './validation.js';
30
+ import { validateCheckpointAttestations } from './validation.js';
31
31
  function mapArchiverConfig(config) {
32
32
  return {
33
33
  pollingIntervalMs: config.archiverPollingIntervalMS,
@@ -37,7 +37,7 @@ function mapArchiverConfig(config) {
37
37
  };
38
38
  }
39
39
  /**
40
- * Pulls L2 blocks in a non-blocking manner and provides interface for their retrieval.
40
+ * Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
41
41
  * Responsible for handling robust L1 polling so that other components do not need to
42
42
  * concern themselves with it.
43
43
  */ export class Archiver extends EventEmitter {
@@ -51,7 +51,7 @@ function mapArchiverConfig(config) {
51
51
  instrumentation;
52
52
  l1constants;
53
53
  log;
54
- /** A loop in which we will be continually fetching new L2 blocks. */ runningPromise;
54
+ /** A loop in which we will be continually fetching new checkpoints. */ runningPromise;
55
55
  rollup;
56
56
  inbox;
57
57
  store;
@@ -233,29 +233,29 @@ function mapArchiverConfig(config) {
233
233
  maxAllowedDelay
234
234
  });
235
235
  }
236
- // ********** Events that are processed per L2 block **********
236
+ // ********** Events that are processed per checkpoint **********
237
237
  if (currentL1BlockNumber > blocksSynchedTo) {
238
- // First we retrieve new L2 blocks and store them in the DB. This will also update the
239
- // pending chain validation status, proven block number, and synched L1 block number.
240
- const rollupStatus = await this.handleL2blocks(blocksSynchedTo, currentL1BlockNumber);
238
+ // First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
239
+ // pending chain validation status, proven checkpoint number, and synched L1 block number.
240
+ const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
241
241
  // Then we prune the current epoch if it'd reorg on next submission.
242
- // Note that we don't do this before retrieving L2 blocks because we may need to retrieve
243
- // blocks from more than 2 epochs ago, so we want to make sure we have the latest view of
242
+ // Note that we don't do this before retrieving checkpoints because we may need to retrieve
243
+ // checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
244
244
  // the chain locally before we start unwinding stuff. This can be optimized by figuring out
245
- // up to which point we're pruning, and then requesting L2 blocks up to that point only.
246
- const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.provenBlockNumber, currentL1BlockNumber, currentL1Timestamp);
247
- // If the last block we processed had an invalid attestation, we manually advance the L1 syncpoint
245
+ // up to which point we're pruning, and then requesting checkpoints up to that point only.
246
+ const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp);
247
+ // If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
248
248
  // past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
249
- // we get a valid block to advance the syncpoint.
250
- if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithL2Blocks !== undefined) {
251
- await this.store.setBlockSynchedL1BlockNumber(rollupStatus.lastL1BlockWithL2Blocks);
249
+ // we get a valid checkpoint to advance the syncpoint.
250
+ if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
251
+ await this.store.setBlockSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
252
252
  }
253
- // And lastly we check if we are missing any L2 blocks behind us due to a possible L1 reorg.
253
+ // And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
254
254
  // We only do this if rollup cant prune on the next submission. Otherwise we will end up
255
- // re-syncing the blocks we have just unwound above. We also dont do this if the last block is invalid,
255
+ // re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
256
256
  // since the archiver will rightfully refuse to sync up to it.
257
257
  if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
258
- await this.checkForNewBlocksBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
258
+ await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
259
259
  }
260
260
  this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
261
261
  }
@@ -291,30 +291,30 @@ function mapArchiverConfig(config) {
291
291
  }
292
292
  return result;
293
293
  }
294
- /** Checks if there'd be a reorg for the next block submission and start pruning now. */ async handleEpochPrune(provenBlockNumber, currentL1BlockNumber, currentL1Timestamp) {
294
+ /** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */ async handleEpochPrune(provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp) {
295
295
  const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
296
- const localPendingBlockNumber = await this.getBlockNumber();
297
- const canPrune = localPendingBlockNumber > provenBlockNumber && rollupCanPrune;
296
+ const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
297
+ const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
298
298
  if (canPrune) {
299
299
  const timer = new Timer();
300
- const pruneFrom = provenBlockNumber + 1;
301
- const header = await this.getBlockHeader(Number(pruneFrom));
300
+ const pruneFrom = provenCheckpointNumber + 1;
301
+ const header = await this.getCheckpointHeader(Number(pruneFrom));
302
302
  if (header === undefined) {
303
- throw new Error(`Missing block header ${pruneFrom}`);
303
+ throw new Error(`Missing checkpoint header ${pruneFrom}`);
304
304
  }
305
- const pruneFromSlotNumber = header.globalVariables.slotNumber.toBigInt();
305
+ const pruneFromSlotNumber = header.slotNumber.toBigInt();
306
306
  const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
307
- const blocksToUnwind = localPendingBlockNumber - provenBlockNumber;
308
- const blocks = await this.getBlocks(Number(provenBlockNumber) + 1, Number(blocksToUnwind));
307
+ const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
308
+ const checkpoints = await this.getCheckpoints(Number(provenCheckpointNumber) + 1, Number(checkpointsToUnwind));
309
309
  // Emit an event for listening services to react to the chain prune
310
310
  this.emit(L2BlockSourceEvents.L2PruneDetected, {
311
311
  type: L2BlockSourceEvents.L2PruneDetected,
312
312
  epochNumber: pruneFromEpochNumber,
313
- blocks
313
+ blocks: checkpoints.flatMap((c)=>L2Block.fromCheckpoint(c))
314
314
  });
315
- this.log.debug(`L2 prune from ${provenBlockNumber + 1} to ${localPendingBlockNumber} will occur on next block submission.`);
316
- await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
317
- this.log.warn(`Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + `to ${provenBlockNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest block is ${await this.getBlockNumber()}.`);
315
+ this.log.debug(`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`);
316
+ await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
317
+ this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + `Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
318
318
  this.instrumentation.processPrune(timer.ms());
319
319
  // TODO(palla/reorg): Do we need to set the block synched L1 block number here?
320
320
  // Seems like the next iteration should handle this.
@@ -483,170 +483,173 @@ function mapArchiverConfig(config) {
483
483
  }
484
484
  return Buffer32.fromString(block.hash);
485
485
  }
486
- async handleL2blocks(blocksSynchedTo, currentL1BlockNumber) {
487
- const localPendingBlockNumber = await this.getBlockNumber();
486
+ async handleCheckpoints(blocksSynchedTo, currentL1BlockNumber) {
487
+ const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
488
488
  const initialValidationResult = await this.store.getPendingChainValidationStatus();
489
- const [provenBlockNumber, provenArchive, pendingBlockNumber, pendingArchive, archiveForLocalPendingBlockNumber] = await this.rollup.status(BigInt(localPendingBlockNumber), {
489
+ const [rollupProvenCheckpointNumber, provenArchive, rollupPendingCheckpointNumber, pendingArchive, archiveForLocalPendingCheckpointNumber] = await this.rollup.status(BigInt(localPendingCheckpointNumber), {
490
490
  blockNumber: currentL1BlockNumber
491
491
  });
492
+ const provenCheckpointNumber = Number(rollupProvenCheckpointNumber);
493
+ const pendingCheckpointNumber = Number(rollupPendingCheckpointNumber);
492
494
  const rollupStatus = {
493
- provenBlockNumber: Number(provenBlockNumber),
495
+ provenCheckpointNumber,
494
496
  provenArchive,
495
- pendingBlockNumber: Number(pendingBlockNumber),
497
+ pendingCheckpointNumber,
496
498
  pendingArchive,
497
499
  validationResult: initialValidationResult
498
500
  };
499
501
  this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
500
- localPendingBlockNumber,
502
+ localPendingCheckpointNumber,
501
503
  blocksSynchedTo,
502
504
  currentL1BlockNumber,
503
- archiveForLocalPendingBlockNumber,
505
+ archiveForLocalPendingCheckpointNumber,
504
506
  ...rollupStatus
505
507
  });
506
- const updateProvenBlock = async ()=>{
507
- // Annoying edge case: if proven block is moved back to 0 due to a reorg at the beginning of the chain,
508
- // we need to set it to zero. This is an edge case because we dont have a block zero (initial block is one),
509
- // so localBlockForDestinationProvenBlockNumber would not be found below.
510
- if (provenBlockNumber === 0n) {
511
- const localProvenBlockNumber = await this.store.getProvenL2BlockNumber();
512
- if (localProvenBlockNumber !== Number(provenBlockNumber)) {
513
- await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
514
- this.log.info(`Rolled back proven chain to block ${provenBlockNumber}`, {
515
- provenBlockNumber
508
+ const updateProvenCheckpoint = async ()=>{
509
+ // Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
510
+ // we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
511
+ // so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
512
+ if (provenCheckpointNumber === 0) {
513
+ const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
514
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
515
+ await this.setProvenCheckpointNumber(provenCheckpointNumber);
516
+ this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, {
517
+ provenCheckpointNumber
516
518
  });
517
519
  }
518
520
  }
519
- const localBlockForDestinationProvenBlockNumber = await this.getBlock(Number(provenBlockNumber));
520
- // Sanity check. I've hit what seems to be a state where the proven block is set to a value greater than the latest
521
- // synched block when requesting L2Tips from the archiver. This is the only place where the proven block is set.
522
- const synched = await this.store.getSynchedL2BlockNumber();
523
- if (localBlockForDestinationProvenBlockNumber && synched < localBlockForDestinationProvenBlockNumber?.number) {
524
- this.log.error(`Hit local block greater than last synched block: ${localBlockForDestinationProvenBlockNumber.number} > ${synched}`);
521
+ const localCheckpointForDestinationProvenCheckpointNumber = await this.getCheckpoint(provenCheckpointNumber);
522
+ // Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
523
+ // synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
524
+ const synched = await this.getSynchedCheckpointNumber();
525
+ if (localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.number) {
526
+ this.log.error(`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.number} > ${synched}`);
525
527
  }
526
- this.log.trace(`Local block for remote proven block ${provenBlockNumber} is ${localBlockForDestinationProvenBlockNumber?.archive.root.toString() ?? 'undefined'}`);
527
- if (localBlockForDestinationProvenBlockNumber && provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString()) {
528
- const localProvenBlockNumber = await this.store.getProvenL2BlockNumber();
529
- if (localProvenBlockNumber !== Number(provenBlockNumber)) {
530
- await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
531
- this.log.info(`Updated proven chain to block ${provenBlockNumber}`, {
532
- provenBlockNumber
528
+ this.log.trace(`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'}`);
529
+ const lastProvenBlockNumber = await this.getLastBlockNumberInCheckpoint(provenCheckpointNumber);
530
+ if (localCheckpointForDestinationProvenCheckpointNumber && provenArchive === localCheckpointForDestinationProvenCheckpointNumber.archive.root.toString()) {
531
+ const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
532
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
533
+ await this.setProvenCheckpointNumber(provenCheckpointNumber);
534
+ this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, {
535
+ provenCheckpointNumber
533
536
  });
534
- const provenSlotNumber = localBlockForDestinationProvenBlockNumber.header.globalVariables.slotNumber.toBigInt();
537
+ const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber.toBigInt();
535
538
  const provenEpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
536
539
  this.emit(L2BlockSourceEvents.L2BlockProven, {
537
540
  type: L2BlockSourceEvents.L2BlockProven,
538
- blockNumber: provenBlockNumber,
541
+ blockNumber: BigInt(lastProvenBlockNumber),
539
542
  slotNumber: provenSlotNumber,
540
543
  epochNumber: provenEpochNumber
541
544
  });
542
545
  } else {
543
- this.log.trace(`Proven block ${provenBlockNumber} already stored.`);
546
+ this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
544
547
  }
545
548
  }
546
- this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
549
+ this.instrumentation.updateLastProvenBlock(lastProvenBlockNumber);
547
550
  };
548
- // This is an edge case that we only hit if there are no proposed blocks.
549
- // If we have 0 blocks locally and there are no blocks onchain there is nothing to do.
550
- const noBlocks = localPendingBlockNumber === 0 && pendingBlockNumber === 0n;
551
- if (noBlocks) {
551
+ // This is an edge case that we only hit if there are no proposed checkpoints.
552
+ // If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
553
+ const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
554
+ if (noCheckpoints) {
552
555
  await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
553
- this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no blocks on chain`);
556
+ this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`);
554
557
  return rollupStatus;
555
558
  }
556
- await updateProvenBlock();
559
+ await updateProvenCheckpoint();
557
560
  // Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
558
- // are any state that could be impacted by it. If we have no blocks, there is no impact.
559
- if (localPendingBlockNumber > 0) {
560
- const localPendingBlock = await this.getBlock(localPendingBlockNumber);
561
- if (localPendingBlock === undefined) {
562
- throw new Error(`Missing block ${localPendingBlockNumber}`);
561
+ // are any state that could be impacted by it. If we have no checkpoints, there is no impact.
562
+ if (localPendingCheckpointNumber > 0) {
563
+ const localPendingCheckpoint = await this.getCheckpoint(localPendingCheckpointNumber);
564
+ if (localPendingCheckpoint === undefined) {
565
+ throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
563
566
  }
564
- const localPendingArchiveRoot = localPendingBlock.archive.root.toString();
565
- const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingArchiveRoot;
566
- if (noBlockSinceLast) {
567
+ const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
568
+ const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive === localPendingArchiveRoot;
569
+ if (noCheckpointSinceLast) {
567
570
  // We believe the following line causes a problem when we encounter L1 re-orgs.
568
571
  // Basically, by setting the synched L1 block number here, we are saying that we have
569
- // processed all blocks up to the current L1 block number and we will not attempt to retrieve logs from
572
+ // processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
570
573
  // this block again (or any blocks before).
571
- // However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing blocks
574
+ // However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
572
575
  // We must only set this block number based on actually retrieved logs.
573
576
  // TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
574
577
  // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
575
- this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
578
+ this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
576
579
  return rollupStatus;
577
580
  }
578
- const localPendingBlockInChain = archiveForLocalPendingBlockNumber === localPendingArchiveRoot;
579
- if (!localPendingBlockInChain) {
580
- // If our local pending block tip is not in the chain on L1 a "prune" must have happened
581
+ const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber === localPendingArchiveRoot;
582
+ if (!localPendingCheckpointInChain) {
583
+ // If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
581
584
  // or the L1 have reorged.
582
585
  // In any case, we have to figure out how far into the past the action will take us.
583
- // For simplicity here, we will simply rewind until we end in a block that is also on the chain on L1.
584
- this.log.debug(`L2 prune has been detected due to local pending block ${localPendingBlockNumber} not in chain`, {
585
- localPendingBlockNumber,
586
+ // For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
587
+ this.log.debug(`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`, {
588
+ localPendingCheckpointNumber,
586
589
  localPendingArchiveRoot,
587
- archiveForLocalPendingBlockNumber
590
+ archiveForLocalPendingCheckpointNumber
588
591
  });
589
- let tipAfterUnwind = localPendingBlockNumber;
592
+ let tipAfterUnwind = localPendingCheckpointNumber;
590
593
  while(true){
591
- const candidateBlock = await this.getBlock(Number(tipAfterUnwind));
592
- if (candidateBlock === undefined) {
594
+ const candidateCheckpoint = await this.getCheckpoint(tipAfterUnwind);
595
+ if (candidateCheckpoint === undefined) {
593
596
  break;
594
597
  }
595
- const archiveAtContract = await this.rollup.archiveAt(BigInt(candidateBlock.number));
596
- this.log.trace(`Checking local block ${candidateBlock.number} with archive ${candidateBlock.archive.root}`, {
598
+ const archiveAtContract = await this.rollup.archiveAt(BigInt(candidateCheckpoint.number));
599
+ this.log.trace(`Checking local checkpoint ${candidateCheckpoint.number} with archive ${candidateCheckpoint.archive.root}`, {
597
600
  archiveAtContract,
598
- archiveLocal: candidateBlock.archive.root.toString()
601
+ archiveLocal: candidateCheckpoint.archive.root.toString()
599
602
  });
600
- if (archiveAtContract === candidateBlock.archive.root.toString()) {
603
+ if (archiveAtContract === candidateCheckpoint.archive.root.toString()) {
601
604
  break;
602
605
  }
603
606
  tipAfterUnwind--;
604
607
  }
605
- const blocksToUnwind = localPendingBlockNumber - tipAfterUnwind;
606
- await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
607
- this.log.warn(`Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + `due to mismatched block hashes at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest block is ${await this.getBlockNumber()}.`);
608
+ const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
609
+ await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
610
+ this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
608
611
  }
609
612
  }
610
- // Retrieve L2 blocks in batches. Each batch is estimated to accommodate up to L2 'blockBatchSize' blocks,
613
+ // Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
611
614
  // computed using the L2 block time vs the L1 block time.
612
615
  let searchStartBlock = blocksSynchedTo;
613
616
  let searchEndBlock = blocksSynchedTo;
614
- let lastRetrievedBlock;
615
- let lastL1BlockWithL2Blocks = undefined;
617
+ let lastRetrievedCheckpoint;
618
+ let lastL1BlockWithCheckpoint = undefined;
616
619
  do {
617
620
  [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
618
- this.log.trace(`Retrieving L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
621
+ this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
619
622
  // TODO(md): Retrieve from blob sink then from consensus client, then from peers
620
- const retrievedBlocks = await retrieveBlocksFromRollup(this.rollup.getContract(), this.publicClient, this.blobSinkClient, searchStartBlock, searchEndBlock, this.log);
621
- if (retrievedBlocks.length === 0) {
623
+ const retrievedCheckpoints = await retrieveCheckpointsFromRollup(this.rollup.getContract(), this.publicClient, this.blobSinkClient, searchStartBlock, searchEndBlock, this.log);
624
+ if (retrievedCheckpoints.length === 0) {
622
625
  // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
623
626
  // See further details in earlier comments.
624
- this.log.trace(`Retrieved no new L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
627
+ this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
625
628
  continue;
626
629
  }
627
- this.log.debug(`Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${searchStartBlock} and ${searchEndBlock}`, {
628
- lastProcessedL1Block: retrievedBlocks[retrievedBlocks.length - 1].l1,
630
+ this.log.debug(`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`, {
631
+ lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
629
632
  searchStartBlock,
630
633
  searchEndBlock
631
634
  });
632
- const publishedBlocks = await Promise.all(retrievedBlocks.map((b)=>retrievedBlockToPublishedL2Block(b)));
633
- const validBlocks = [];
634
- for (const block of publishedBlocks){
635
+ const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map((b)=>retrievedToPublishedCheckpoint(b)));
636
+ const validCheckpoints = [];
637
+ for (const published of publishedCheckpoints){
635
638
  const validationResult = this.config.skipValidateBlockAttestations ? {
636
639
  valid: true
637
- } : await validateBlockAttestations(block, this.epochCache, this.l1constants, this.log);
638
- // Only update the validation result if it has changed, so we can keep track of the first invalid block
639
- // in case there is a sequence of more than one invalid block, as we need to invalidate the first one.
640
- // There is an exception though: if an invalid block is invalidated and replaced with another invalid block,
640
+ } : await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
641
+ // Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
642
+ // in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
643
+ // There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
641
644
  // we need to update the validation result, since we need to be able to invalidate the new one.
642
- // See test 'chain progresses if an invalid block is invalidated with an invalid one' for more info.
645
+ // See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
643
646
  if (rollupStatus.validationResult?.valid !== validationResult.valid || !rollupStatus.validationResult.valid && !validationResult.valid && rollupStatus.validationResult.block.blockNumber === validationResult.block.blockNumber) {
644
647
  rollupStatus.validationResult = validationResult;
645
648
  }
646
649
  if (!validationResult.valid) {
647
- this.log.warn(`Skipping block ${block.block.number} due to invalid attestations`, {
648
- blockHash: block.block.hash(),
649
- l1BlockNumber: block.l1.blockNumber,
650
+ this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
651
+ checkpointHash: published.checkpoint.hash(),
652
+ l1BlockNumber: published.l1.blockNumber,
650
653
  ...pick(validationResult, 'reason')
651
654
  });
652
655
  // Emit event for invalid block detection
@@ -656,18 +659,18 @@ function mapArchiverConfig(config) {
656
659
  });
657
660
  continue;
658
661
  }
659
- validBlocks.push(block);
660
- this.log.debug(`Ingesting new L2 block ${block.block.number} with ${block.block.body.txEffects.length} txs`, {
661
- blockHash: block.block.hash(),
662
- l1BlockNumber: block.l1.blockNumber,
663
- ...block.block.header.globalVariables.toInspect(),
664
- ...block.block.getStats()
662
+ validCheckpoints.push(published);
663
+ this.log.debug(`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`, {
664
+ checkpointHash: published.checkpoint.hash(),
665
+ l1BlockNumber: published.l1.blockNumber,
666
+ ...published.checkpoint.header.toInspect(),
667
+ blocks: published.checkpoint.blocks.map((b)=>b.getStats())
665
668
  });
666
669
  }
667
670
  try {
668
671
  const updatedValidationResult = rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
669
- const [processDuration] = await elapsed(()=>this.store.addBlocks(validBlocks, updatedValidationResult));
670
- this.instrumentation.processNewBlocks(processDuration / validBlocks.length, validBlocks.map((b)=>b.block));
672
+ const [processDuration] = await elapsed(()=>this.addCheckpoints(validCheckpoints, updatedValidationResult));
673
+ this.instrumentation.processNewBlocks(processDuration / validCheckpoints.length, validCheckpoints.flatMap((c)=>c.checkpoint.blocks));
671
674
  } catch (err) {
672
675
  if (err instanceof InitialBlockNumberNotSequentialError) {
673
676
  const { previousBlockNumber, newBlockNumber } = err;
@@ -683,53 +686,54 @@ function mapArchiverConfig(config) {
683
686
  }
684
687
  throw err;
685
688
  }
686
- for (const block of validBlocks){
687
- this.log.info(`Downloaded L2 block ${block.block.number}`, {
688
- blockHash: await block.block.hash(),
689
- blockNumber: block.block.number,
690
- txCount: block.block.body.txEffects.length,
691
- globalVariables: block.block.header.globalVariables.toInspect(),
692
- archiveRoot: block.block.archive.root.toString(),
693
- archiveNextLeafIndex: block.block.archive.nextAvailableLeafIndex
689
+ for (const checkpoint of validCheckpoints){
690
+ this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
691
+ checkpointHash: checkpoint.checkpoint.hash(),
692
+ checkpointNumber: checkpoint.checkpoint.number,
693
+ blockCount: checkpoint.checkpoint.blocks.length,
694
+ txCount: checkpoint.checkpoint.blocks.reduce((acc, b)=>acc + b.body.txEffects.length, 0),
695
+ header: checkpoint.checkpoint.header.toInspect(),
696
+ archiveRoot: checkpoint.checkpoint.archive.root.toString(),
697
+ archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex
694
698
  });
695
699
  }
696
- lastRetrievedBlock = validBlocks.at(-1) ?? lastRetrievedBlock;
697
- lastL1BlockWithL2Blocks = publishedBlocks.at(-1)?.l1.blockNumber ?? lastL1BlockWithL2Blocks;
700
+ lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
701
+ lastL1BlockWithCheckpoint = publishedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
698
702
  }while (searchEndBlock < currentL1BlockNumber)
699
703
  // Important that we update AFTER inserting the blocks.
700
- await updateProvenBlock();
704
+ await updateProvenCheckpoint();
701
705
  return {
702
706
  ...rollupStatus,
703
- lastRetrievedBlock,
704
- lastL1BlockWithL2Blocks
707
+ lastRetrievedCheckpoint,
708
+ lastL1BlockWithCheckpoint
705
709
  };
706
710
  }
707
- async checkForNewBlocksBeforeL1SyncPoint(status, blocksSynchedTo, currentL1BlockNumber) {
708
- const { lastRetrievedBlock, pendingBlockNumber } = status;
709
- // Compare the last L2 block we have (either retrieved in this round or loaded from store) with what the
711
+ async checkForNewCheckpointsBeforeL1SyncPoint(status, blocksSynchedTo, currentL1BlockNumber) {
712
+ const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
713
+ // Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
710
714
  // rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
711
- const latestLocalL2BlockNumber = lastRetrievedBlock?.block.number ?? await this.store.getSynchedL2BlockNumber();
712
- if (latestLocalL2BlockNumber < pendingBlockNumber) {
715
+ const latestLocalCheckpointNumber = lastRetrievedCheckpoint?.checkpoint.number ?? await this.getSynchedCheckpointNumber();
716
+ if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
713
717
  // Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
714
- // but still havent reached the pending block according to the call to the rollup contract.
715
- // We suspect an L1 reorg that added blocks *behind* us. If that is the case, it must have happened between the
716
- // last L2 block we saw and the current one, so we reset the last synched L1 block number. In the edge case we
717
- // don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
718
- const latestLocalL2Block = lastRetrievedBlock ?? (latestLocalL2BlockNumber > 0 ? await this.store.getPublishedBlocks(latestLocalL2BlockNumber, 1).then(([b])=>b) : undefined);
719
- const targetL1BlockNumber = latestLocalL2Block?.l1.blockNumber ?? maxBigint(currentL1BlockNumber - 64n, 0n);
720
- const latestLocalL2BlockArchive = latestLocalL2Block?.block.archive.root.toString();
721
- this.log.warn(`Failed to reach L2 block ${pendingBlockNumber} at ${currentL1BlockNumber} (latest is ${latestLocalL2BlockNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
722
- latestLocalL2BlockNumber,
723
- latestLocalL2BlockArchive,
718
+ // but still haven't reached the pending checkpoint according to the call to the rollup contract.
719
+ // We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
720
+ // the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
721
+ // we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
722
+ const latestLocalCheckpoint = lastRetrievedCheckpoint ?? (latestLocalCheckpointNumber > 0 ? await this.getPublishedCheckpoints(latestLocalCheckpointNumber, 1).then(([c])=>c) : undefined);
723
+ const targetL1BlockNumber = latestLocalCheckpoint?.l1.blockNumber ?? maxBigint(currentL1BlockNumber - 64n, 0n);
724
+ const latestLocalCheckpointArchive = latestLocalCheckpoint?.checkpoint.archive.root.toString();
725
+ this.log.warn(`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
726
+ latestLocalCheckpointNumber,
727
+ latestLocalCheckpointArchive,
724
728
  blocksSynchedTo,
725
729
  currentL1BlockNumber,
726
730
  ...status
727
731
  });
728
732
  await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
729
733
  } else {
730
- this.log.trace(`No new blocks behind L1 sync point to retrieve.`, {
731
- latestLocalL2BlockNumber,
732
- pendingBlockNumber
734
+ this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
735
+ latestLocalCheckpointNumber,
736
+ pendingCheckpointNumber
733
737
  });
734
738
  }
735
739
  }
@@ -836,6 +840,60 @@ function mapArchiverConfig(config) {
836
840
  /** Returns whether the archiver has completed an initial sync run successfully. */ isInitialSyncComplete() {
837
841
  return this.initialSyncComplete;
838
842
  }
843
+ async getPublishedCheckpoints(from, limit, proven) {
844
+ const blocks = await this.getPublishedBlocks(from, limit, proven);
845
+ return blocks.map((b)=>b.toPublishedCheckpoint());
846
+ }
847
+ async getCheckpoints(from, limit, proven) {
848
+ const published = await this.getPublishedCheckpoints(from, limit, proven);
849
+ return published.map((p)=>p.checkpoint);
850
+ }
851
+ async getCheckpoint(number) {
852
+ if (number < 0) {
853
+ number = await this.getSynchedCheckpointNumber();
854
+ }
855
+ if (number === 0) {
856
+ return undefined;
857
+ }
858
+ const published = await this.getPublishedCheckpoints(number, 1);
859
+ return published[0]?.checkpoint;
860
+ }
861
+ async getCheckpointHeader(number) {
862
+ if (number === 'latest') {
863
+ number = await this.getSynchedCheckpointNumber();
864
+ }
865
+ if (number === 0) {
866
+ return undefined;
867
+ }
868
+ const checkpoint = await this.getCheckpoint(number);
869
+ return checkpoint?.header;
870
+ }
871
+ getCheckpointNumber() {
872
+ return this.getSynchedCheckpointNumber();
873
+ }
874
+ getSynchedCheckpointNumber() {
875
+ // TODO: Checkpoint number will no longer be the same as the block number once we support multiple blocks per checkpoint.
876
+ return this.store.getSynchedL2BlockNumber();
877
+ }
878
+ getProvenCheckpointNumber() {
879
+ // TODO: Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
880
+ return this.store.getProvenL2BlockNumber();
881
+ }
882
+ setProvenCheckpointNumber(checkpointNumber) {
883
+ // TODO: Proven checkpoint number will no longer be the same as the proven block number once we support multiple blocks per checkpoint.
884
+ return this.store.setProvenL2BlockNumber(checkpointNumber);
885
+ }
886
+ unwindCheckpoints(from, checkpointsToUnwind) {
887
+ // TODO: This only works if we have one block per checkpoint.
888
+ return this.store.unwindBlocks(from, checkpointsToUnwind);
889
+ }
890
+ getLastBlockNumberInCheckpoint(checkpointNumber) {
891
+ // TODO: Checkpoint number will no longer be the same as the block number once we support multiple blocks per checkpoint.
892
+ return Promise.resolve(checkpointNumber);
893
+ }
894
+ addCheckpoints(checkpoints, pendingChainValidationStatus) {
895
+ return this.store.addBlocks(checkpoints.map((p)=>PublishedL2Block.fromPublishedCheckpoint(p)), pendingChainValidationStatus);
896
+ }
839
897
  /**
840
898
  * Gets up to `limit` amount of L2 blocks starting from `from`.
841
899
  * @param from - Number of the first block to return (inclusive).
@@ -252,4 +252,4 @@ export interface ArchiverDataStore {
252
252
  /** Sets the last synced validation status of the pending chain. */
253
253
  setPendingChainValidationStatus(status: ValidateBlockResult | undefined): Promise<void>;
254
254
  }
255
- //# sourceMappingURL=archiver_store.d.ts.map
255
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYXJjaGl2ZXJfc3RvcmUuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9hcmNoaXZlci9hcmNoaXZlcl9zdG9yZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssRUFBRSxTQUFTLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQztBQUNqRCxPQUFPLEtBQUssRUFBRSxFQUFFLEVBQUUsTUFBTSwwQkFBMEIsQ0FBQztBQUNuRCxPQUFPLEtBQUssRUFBRSxXQUFXLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQztBQUNuRCxPQUFPLEtBQUssRUFBRSxnQkFBZ0IsRUFBRSxNQUFNLG1CQUFtQixDQUFDO0FBQzFELE9BQU8sS0FBSyxFQUFFLFlBQVksRUFBRSxNQUFNLDZCQUE2QixDQUFDO0FBQ2hFLE9BQU8sS0FBSyxFQUFFLE9BQU8sRUFBRSxtQkFBbUIsRUFBRSxNQUFNLHFCQUFxQixDQUFDO0FBQ3hFLE9BQU8sS0FBSyxFQUNWLG1CQUFtQixFQUNuQixpQ0FBaUMsRUFDakMsMkJBQTJCLEVBQzNCLDRDQUE0QyxFQUM1QyxrQ0FBa0MsRUFDbkMsTUFBTSx3QkFBd0IsQ0FBQztBQUNoQyxPQUFPLEtBQUssRUFBRSw0QkFBNEIsRUFBRSxxQkFBcUIsRUFBRSxNQUFNLGlDQUFpQyxDQUFDO0FBQzNHLE9BQU8sS0FBSyxFQUFFLFNBQVMsRUFBRSxVQUFVLEVBQUUsYUFBYSxFQUFFLE1BQU0sb0JBQW9CLENBQUM7QUFDL0UsT0FBTyxFQUFFLFdBQVcsRUFBRSxLQUFLLGVBQWUsRUFBRSxLQUFLLE1BQU0sRUFBRSxLQUFLLFNBQVMsRUFBRSxNQUFNLGtCQUFrQixDQUFDO0FBQ2xHLE9BQU8sS0FBSyxFQUFFLE1BQU0sRUFBRSxNQUFNLHFCQUFxQixDQUFDO0FBRWxELE9BQU8sS0FBSyxFQUFFLFlBQVksRUFBRSxNQUFNLDRCQUE0QixDQUFDO0FBQy9ELE9BQU8sS0FBSyxFQUFFLGdCQUFnQixFQUFFLE1BQU0sd0JBQXdCLENBQUM7QUFFL0Q7O0dBRUc7QUFDSCxNQUFNLE1BQU0sb0JBQW9CLEdBQUc7SUFDakMsdUVBQXVFO0lBQ3ZFLGVBQWUsQ0FBQyxFQUFFLE1BQU0sQ0FBQztJQUN6QixtREFBbUQ7SUFDbkQsaUJBQWlCLENBQUMsRUFBRSxTQUFTLENBQUM7Q0FDL0IsQ0FBQztBQUVGOzs7R0FHRztBQUNILE1BQU0sV0FBVyxpQkFBaUI7SUFDaEMseUZBQXlGO0lBQ3pGLGdCQUFnQixDQUFDLENBQUMsRUFBRSxRQUFRLEVBQUUsTUFBTSxPQUFPLENBQUMsQ0FBQyxDQUFDLEdBQUcsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDO0lBRTVEOzs7Ozs7T0FNRztJQUNILFNBQVMsQ0FBQyxNQUFNLEVBQUUsZ0JBQWdCLEVBQUUsRUFBRSxJQUFJLENBQUMsRUFBRTtRQUFFLEtBQUssQ0FBQyxFQUFFLE9BQU8sQ0FBQTtLQUFFLEdBQUcsT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBRXBGOzs7Ozs7T0FNRztJQUNILFlBQVksQ0FBQyxJQUFJLEVBQUUsTUFBTSxFQUFFLGNBQWMsRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBRXJFOzs7T0FHRztJQUNILGlCQUFpQixDQUFDLE1BQU0sRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLGdCQUFnQixHQUFHLFNBQVMsQ0FBQyxDQUFDO0lBRXpFOzs7T0FHRztJQUNILHVCQUF1QixDQUFDLFNBQVMsRUFBRSxFQUFFLEdBQUcsT0FBTyxDQUFDLGdCQUFnQixHQUFHLFNBQVMsQ0FBQyxDQUFDO0lBRTlFOzs7T0FHRztJQUNILDBCQUEwQixDQUFDLE9BQU8sRUFBRSxFQUFFLEdBQUcsT0FBTyxDQUFDLGdCQUFnQixHQUFHLFNBQVMsQ0FBQyxDQUFDO0lBRS9FOzs7OztPQUtHO0lBQ0gsa0JBQWtCLENBQUMsSUFBSSxFQUFFLE1BQU0sRUFBRSxLQUFLLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxnQkFBZ0IsRUFBRSxDQUFDLENBQUM7SUFFN0U7Ozs7O09BS0c7SUFDSCxlQUFlLENBQUMsSUFBSSxFQUFFLE1BQU0sRUFBRSxLQUFLLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxXQUFXLEVBQUUsQ0FBQyxDQUFDO0lBRXJFOzs7T0FHRztJQUNILG9CQUFvQixDQUFDLFNBQVMsRUFBRSxFQUFFLEdBQUcsT0FBTyxDQUFDLFdBQVcsR0FBRyxTQUFTLENBQUMsQ0FBQztJQUV0RTs7O09BR0c7SUFDSCx1QkFBdUIsQ0FBQyxPQUFPLEVBQUUsRUFBRSxHQUFHLE9BQU8sQ0FBQyxXQUFXLEdBQUcsU0FBUyxDQUFDLENBQUM7SUFFdkU7Ozs7T0FJRztJQUNILFdBQVcsQ0FBQyxNQUFNLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxlQUFlLEdBQUcsU0FBUyxDQUFDLENBQUM7SUFFbEU7Ozs7T0FJRztJQUNILG1CQUFtQixDQUFDLE1BQU0sRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLFNBQVMsR0FBRyxTQUFTLENBQUMsQ0FBQztJQUVwRTs7OztPQUlHO0lBQ0gsT0FBTyxDQUFDLE1BQU0sRUFBRSxPQUFPLEVBQUUsR0FBRyxPQUFPLENBQUMsT0FBTyxDQUFDLENBQUM7SUFDN0MsVUFBVSxDQUFDLE1BQU0sRUFBRSxPQUFPLEVBQUUsR0FBRyxPQUFPLENBQUMsT0FBTyxDQUFDLENBQUM7SUFFaEQ7Ozs7T0FJRztJQUNILGlCQUFpQixDQUFDLFFBQVEsRUFBRSxZQUFZLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUM7SUFFM0Q7Ozs7T0FJRztJQUNILGlCQUFpQixDQUFDLFdBQVcsRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLEVBQUUsRUFBRSxDQUFDLENBQUM7SUFFdEQ7Ozs7T0FJRztJQUNILHFCQUFxQixDQUFDLGFBQWEsRUFBRSxFQUFFLEdBQUcsT0FBTyxDQUFDLE1BQU0sR0FBRyxTQUFTLENBQUMsQ0FBQztJQUV0RTs7O09BR0c7SUFDSCwwQkFBMEIsSUFBSSxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUM7SUFFOUM7Ozs7O09BS0c7SUFDSCxjQUFjLENBQUMsSUFBSSxFQUFFLE1BQU0sRUFBRSxLQUFLLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxVQUFVLEVBQUUsQ0FBQyxDQUFDO0lBRW5FOzs7Ozs7T0FNRztJQUNILGFBQWEsQ0FBQyxJQUFJLEVBQUUsRUFBRSxFQUFFLEVBQUUsVUFBVSxDQUFDLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxhQUFhLEVBQUUsRUFBRSxDQUFDLENBQUM7SUFFM0U7Ozs7T0FJRztJQUNILGFBQWEsQ0FBQyxNQUFNLEVBQUUsU0FBUyxHQUFHLE9BQU8sQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDO0lBRWpFOzs7O09BSUc7SUFDSCxvQkFBb0IsQ0FBQyxNQUFNLEVBQUUsU0FBUyxHQUFHLE9BQU8sQ0FBQyw0QkFBNEIsQ0FBQyxDQUFDO0lBRS9FOzs7T0FHRztJQUNILHVCQUF1QixJQUFJLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FBQztJQUUzQzs7O09BR0c7SUFDSCxzQkFBc0IsSUFBSSxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUM7SUFFMUM7OztPQUdHO0lBQ0gsc0JBQXNCLENBQUMsYUFBYSxFQUFFLE1BQU0sR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUM7SUFFN0Q7OztPQUdHO0lBQ0gsNEJBQTRCLENBQUMsYUFBYSxFQUFFLE1BQU0sR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUM7SUFFbkU7O09BRUc7SUFDSCx3QkFBd0IsQ0FBQyxPQUFPLEVBQUUsU0FBUyxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztJQUU1RDs7T0FFRztJQUNILGFBQWEsSUFBSSxPQUFPLENBQUMsb0JBQW9CLENBQUMsQ0FBQztJQUUvQzs7Ozs7T0FLRztJQUNILGtCQUFrQixDQUFDLElBQUksRUFBRSxtQkFBbUIsRUFBRSxFQUFFLG1CQUFtQixFQUFFLEVBQUUsRUFBRSxFQUFFLFdBQVcsRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBRWxILHFCQUFxQixDQUFDLElBQUksRUFBRSxtQkFBbUIsRUFBRSxFQUFFLFdBQVcsRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBRTFGLHFCQUFxQixDQUFDLGVBQWUsRUFBRSxFQUFFLEdBQUcsT0FBTyxDQUFDLEVBQUUsR0FBRyxTQUFTLENBQUMsQ0FBQztJQUVwRTs7O09BR0c7SUFDSCxnQkFBZ0IsQ0FBQyxFQUFFLEVBQUUsRUFBRSxHQUFHLE9BQU8sQ0FBQyxtQkFBbUIsR0FBRyxTQUFTLENBQUMsQ0FBQztJQUVuRTs7Ozs7T0FLRztJQUNILG9CQUFvQixDQUFDLElBQUksRUFBRSwyQkFBMkIsRUFBRSxFQUFFLFdBQVcsRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBQ2pHLHVCQUF1QixDQUFDLElBQUksRUFBRSwyQkFBMkIsRUFBRSxFQUFFLFdBQVcsRUFBRSxNQUFNLEdBQUcsT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBRXBHOzs7OztPQUtHO0lBQ0gsMEJBQTBCLENBQUMsSUFBSSxFQUFFLGlDQUFpQyxFQUFFLEVBQUUsU0FBUyxFQUFFLE1BQU0sR0FBRyxPQUFPLENBQUMsT0FBTyxDQUFDLENBQUM7SUFDM0csNkJBQTZCLENBQUMsSUFBSSxFQUFFLGlDQUFpQyxFQUFFLEVBQUUsU0FBUyxFQUFFLE1BQU0sR0FBRyxPQUFPLENBQUMsT0FBTyxDQUFDLENBQUM7SUFDOUc7O09BRUc7SUFDSCxZQUFZLENBQ1YsZUFBZSxFQUFFLEVBQUUsRUFDbkIsZ0JBQWdCLEVBQUUsNENBQTRDLEVBQUUsRUFDaEUsZ0JBQWdCLEVBQUUsa0NBQWtDLEVBQUUsR0FDckQsT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBRXBCOzs7OztPQUtHO0lBQ0gsbUJBQW1CLENBQUMsT0FBTyxFQUFFLFlBQVksRUFBRSxTQUFTLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQywyQkFBMkIsR0FBRyxTQUFTLENBQUMsQ0FBQztJQUVoSCwrREFBK0Q7SUFDL0QsbUJBQW1CLElBQUksT0FBTyxDQUFDLEVBQUUsRUFBRSxDQUFDLENBQUM7SUFFckMsZ0ZBQWdGO0lBQ2hGLGtDQUFrQyxDQUFDLFVBQVUsRUFBRSxNQUFNLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUM7SUFFeEUsd0RBQXdEO0lBQ3hELG9CQUFvQixDQUFDLE9BQU8sRUFBRSxZQUFZLEVBQUUsUUFBUSxFQUFFLGdCQUFnQixHQUFHLE9BQU8sQ0FBQyxNQUFNLEdBQUcsU0FBUyxDQUFDLENBQUM7SUFFckcsZ0RBQWdEO0lBQ2hELFlBQVksSUFBSSxPQUFPLENBQUM7UUFBRSxXQUFXLEVBQUUsTUFBTSxDQUFDO1FBQUMsZ0JBQWdCLEVBQUUsTUFBTSxDQUFDO1FBQUMsVUFBVSxFQUFFLE1BQU0sQ0FBQztRQUFDLFFBQVEsRUFBRSxNQUFNLENBQUE7S0FBRSxDQUFDLENBQUM7SUFFakgscUZBQXFGO0lBQ3JGLFFBQVEsQ0FBQyxJQUFJLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FBQztJQUV4Qyx3Q0FBd0M7SUFDeEMsS0FBSyxJQUFJLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztJQUV2QixxRkFBcUY7SUFDckYsK0JBQStCLENBQUMsaUJBQWlCLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztJQUUxRSx1RUFBdUU7SUFDdkUscUJBQXFCLENBQUMsS0FBSyxDQUFDLEVBQUUsV0FBVyxDQUFDLE1BQU0sQ0FBQyxHQUFHLHFCQUFxQixDQUFDLFlBQVksQ0FBQyxDQUFDO0lBRXhGLCtFQUErRTtJQUMvRSxvQkFBb0IsQ0FBQyxVQUFVLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztJQUV4RCxnREFBZ0Q7SUFDaEQsb0JBQW9CLElBQUksT0FBTyxDQUFDLFlBQVksR0FBRyxTQUFTLENBQUMsQ0FBQztJQUUxRCxzRUFBc0U7SUFDdEUsK0JBQStCLElBQUksT0FBTyxDQUFDLG1CQUFtQixHQUFHLFNBQVMsQ0FBQyxDQUFDO0lBRTVFLG1FQUFtRTtJQUNuRSwrQkFBK0IsQ0FBQyxNQUFNLEVBQUUsbUJBQW1CLEdBQUcsU0FBUyxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztDQUN6RiJ9
@@ -5,4 +5,4 @@ import type { ArchiverDataStore } from './archiver_store.js';
5
5
  * @param getStore - Returns an instance of a store that's already been initialized.
6
6
  */
7
7
  export declare function describeArchiverDataStore(testName: string, getStore: () => ArchiverDataStore | Promise<ArchiverDataStore>): void;
8
- //# sourceMappingURL=archiver_store_test_suite.d.ts.map
8
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYXJjaGl2ZXJfc3RvcmVfdGVzdF9zdWl0ZS5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL2FyY2hpdmVyL2FyY2hpdmVyX3N0b3JlX3Rlc3Rfc3VpdGUudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBcUNBLE9BQU8sNEJBQTRCLENBQUM7QUFJcEMsT0FBTyxLQUFLLEVBQUUsaUJBQWlCLEVBQXdCLE1BQU0scUJBQXFCLENBQUM7QUFLbkY7OztHQUdHO0FBQ0gsd0JBQWdCLHlCQUF5QixDQUN2QyxRQUFRLEVBQUUsTUFBTSxFQUNoQixRQUFRLEVBQUUsTUFBTSxpQkFBaUIsR0FBRyxPQUFPLENBQUMsaUJBQWlCLENBQUMsUUEyckMvRCJ9
@@ -1 +1 @@
1
- {"version":3,"file":"archiver_store_test_suite.d.ts","sourceRoot":"","sources":["../../src/archiver/archiver_store_test_suite.ts"],"names":[],"mappings":"AAoCA,OAAO,4BAA4B,CAAC;AAIpC,OAAO,KAAK,EAAE,iBAAiB,EAAwB,MAAM,qBAAqB,CAAC;AAKnF;;;GAGG;AACH,wBAAgB,yBAAyB,CACvC,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC,QA2rC/D"}
1
+ {"version":3,"file":"archiver_store_test_suite.d.ts","sourceRoot":"","sources":["../../src/archiver/archiver_store_test_suite.ts"],"names":[],"mappings":"AAqCA,OAAO,4BAA4B,CAAC;AAIpC,OAAO,KAAK,EAAE,iBAAiB,EAAwB,MAAM,qBAAqB,CAAC;AAKnF;;;GAGG;AACH,wBAAgB,yBAAyB,CACvC,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC,QA2rC/D"}