@aztec/archiver 0.0.0-test.1 → 0.0.1-commit.1142ef1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. package/README.md +27 -6
  2. package/dest/archiver/archiver.d.ts +204 -94
  3. package/dest/archiver/archiver.d.ts.map +1 -1
  4. package/dest/archiver/archiver.js +1616 -414
  5. package/dest/archiver/archiver_store.d.ts +178 -83
  6. package/dest/archiver/archiver_store.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  8. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  9. package/dest/archiver/archiver_store_test_suite.js +2373 -397
  10. package/dest/archiver/config.d.ts +7 -22
  11. package/dest/archiver/config.d.ts.map +1 -1
  12. package/dest/archiver/config.js +30 -14
  13. package/dest/archiver/errors.d.ts +33 -1
  14. package/dest/archiver/errors.d.ts.map +1 -1
  15. package/dest/archiver/errors.js +49 -0
  16. package/dest/archiver/index.d.ts +3 -4
  17. package/dest/archiver/index.d.ts.map +1 -1
  18. package/dest/archiver/index.js +1 -2
  19. package/dest/archiver/instrumentation.d.ts +14 -6
  20. package/dest/archiver/instrumentation.d.ts.map +1 -1
  21. package/dest/archiver/instrumentation.js +45 -41
  22. package/dest/archiver/kv_archiver_store/block_store.d.ts +98 -21
  23. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  24. package/dest/archiver/kv_archiver_store/block_store.js +495 -86
  25. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +4 -4
  26. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  27. package/dest/archiver/kv_archiver_store/contract_class_store.js +13 -19
  28. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +12 -9
  29. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  30. package/dest/archiver/kv_archiver_store/contract_instance_store.js +30 -16
  31. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +81 -75
  32. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  33. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +145 -83
  34. package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
  35. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  36. package/dest/archiver/kv_archiver_store/log_store.js +151 -114
  37. package/dest/archiver/kv_archiver_store/message_store.d.ts +25 -18
  38. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  39. package/dest/archiver/kv_archiver_store/message_store.js +152 -49
  40. package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
  41. package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
  42. package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
  43. package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
  44. package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
  45. package/dest/archiver/l1/calldata_retriever.js +471 -0
  46. package/dest/archiver/l1/data_retrieval.d.ts +90 -0
  47. package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
  48. package/dest/archiver/l1/data_retrieval.js +331 -0
  49. package/dest/archiver/l1/debug_tx.d.ts +19 -0
  50. package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
  51. package/dest/archiver/l1/debug_tx.js +73 -0
  52. package/dest/archiver/l1/spire_proposer.d.ts +70 -0
  53. package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
  54. package/dest/archiver/l1/spire_proposer.js +157 -0
  55. package/dest/archiver/l1/trace_tx.d.ts +97 -0
  56. package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
  57. package/dest/archiver/l1/trace_tx.js +91 -0
  58. package/dest/archiver/l1/types.d.ts +12 -0
  59. package/dest/archiver/l1/types.d.ts.map +1 -0
  60. package/dest/archiver/l1/types.js +3 -0
  61. package/dest/archiver/l1/validate_trace.d.ts +29 -0
  62. package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
  63. package/dest/archiver/l1/validate_trace.js +150 -0
  64. package/dest/archiver/structs/data_retrieval.d.ts +1 -1
  65. package/dest/archiver/structs/inbox_message.d.ts +15 -0
  66. package/dest/archiver/structs/inbox_message.d.ts.map +1 -0
  67. package/dest/archiver/structs/inbox_message.js +39 -0
  68. package/dest/archiver/structs/published.d.ts +2 -11
  69. package/dest/archiver/structs/published.d.ts.map +1 -1
  70. package/dest/archiver/structs/published.js +1 -1
  71. package/dest/archiver/validation.d.ts +17 -0
  72. package/dest/archiver/validation.d.ts.map +1 -0
  73. package/dest/archiver/validation.js +98 -0
  74. package/dest/factory.d.ts +9 -14
  75. package/dest/factory.d.ts.map +1 -1
  76. package/dest/factory.js +22 -52
  77. package/dest/index.d.ts +2 -2
  78. package/dest/index.d.ts.map +1 -1
  79. package/dest/index.js +1 -1
  80. package/dest/rpc/index.d.ts +2 -3
  81. package/dest/rpc/index.d.ts.map +1 -1
  82. package/dest/rpc/index.js +1 -4
  83. package/dest/test/index.d.ts +1 -1
  84. package/dest/test/mock_archiver.d.ts +16 -8
  85. package/dest/test/mock_archiver.d.ts.map +1 -1
  86. package/dest/test/mock_archiver.js +19 -14
  87. package/dest/test/mock_l1_to_l2_message_source.d.ts +9 -6
  88. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  89. package/dest/test/mock_l1_to_l2_message_source.js +30 -7
  90. package/dest/test/mock_l2_block_source.d.ts +56 -13
  91. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  92. package/dest/test/mock_l2_block_source.js +196 -25
  93. package/dest/test/mock_structs.d.ts +10 -0
  94. package/dest/test/mock_structs.d.ts.map +1 -0
  95. package/dest/test/mock_structs.js +38 -0
  96. package/package.json +29 -30
  97. package/src/archiver/archiver.ts +1596 -512
  98. package/src/archiver/archiver_store.ts +205 -88
  99. package/src/archiver/archiver_store_test_suite.ts +2386 -354
  100. package/src/archiver/config.ts +38 -46
  101. package/src/archiver/errors.ts +85 -0
  102. package/src/archiver/index.ts +2 -3
  103. package/src/archiver/instrumentation.ts +65 -45
  104. package/src/archiver/kv_archiver_store/block_store.ts +668 -101
  105. package/src/archiver/kv_archiver_store/contract_class_store.ts +14 -24
  106. package/src/archiver/kv_archiver_store/contract_instance_store.ts +36 -28
  107. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +197 -113
  108. package/src/archiver/kv_archiver_store/log_store.ts +204 -132
  109. package/src/archiver/kv_archiver_store/message_store.ts +213 -54
  110. package/src/archiver/l1/README.md +98 -0
  111. package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
  112. package/src/archiver/l1/calldata_retriever.ts +641 -0
  113. package/src/archiver/l1/data_retrieval.ts +512 -0
  114. package/src/archiver/l1/debug_tx.ts +99 -0
  115. package/src/archiver/l1/spire_proposer.ts +160 -0
  116. package/src/archiver/l1/trace_tx.ts +128 -0
  117. package/src/archiver/l1/types.ts +13 -0
  118. package/src/archiver/l1/validate_trace.ts +211 -0
  119. package/src/archiver/structs/inbox_message.ts +41 -0
  120. package/src/archiver/structs/published.ts +1 -11
  121. package/src/archiver/validation.ts +124 -0
  122. package/src/factory.ts +28 -69
  123. package/src/index.ts +1 -1
  124. package/src/rpc/index.ts +1 -5
  125. package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
  126. package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
  127. package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
  128. package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
  129. package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
  130. package/src/test/fixtures/trace_transaction-proxied.json +128 -0
  131. package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
  132. package/src/test/mock_archiver.ts +22 -16
  133. package/src/test/mock_l1_to_l2_message_source.ts +26 -8
  134. package/src/test/mock_l2_block_source.ts +254 -31
  135. package/src/test/mock_structs.ts +50 -0
  136. package/dest/archiver/data_retrieval.d.ts +0 -74
  137. package/dest/archiver/data_retrieval.d.ts.map +0 -1
  138. package/dest/archiver/data_retrieval.js +0 -283
  139. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts +0 -12
  140. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts.map +0 -1
  141. package/dest/archiver/kv_archiver_store/nullifier_store.js +0 -73
  142. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts +0 -23
  143. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts.map +0 -1
  144. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.js +0 -49
  145. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +0 -175
  146. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +0 -1
  147. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +0 -636
  148. package/src/archiver/data_retrieval.ts +0 -422
  149. package/src/archiver/kv_archiver_store/nullifier_store.ts +0 -97
  150. package/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts +0 -61
  151. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +0 -801
@@ -1,42 +1,60 @@
1
- import type { BlobSinkClientInterface } from '@aztec/blob-sink/client';
2
- import { type ViemPublicClient, createEthereumChain } from '@aztec/ethereum';
1
+ import type { BlobClientInterface } from '@aztec/blob-client/client';
2
+ import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
3
+ import { EpochCache } from '@aztec/epoch-cache';
4
+ import { createEthereumChain } from '@aztec/ethereum/chain';
5
+ import { BlockTagTooOldError, InboxContract, RollupContract } from '@aztec/ethereum/contracts';
6
+ import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
7
+ import type { L1BlockId } from '@aztec/ethereum/l1-types';
8
+ import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types';
9
+ import { maxBigint } from '@aztec/foundation/bigint';
10
+ import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types';
11
+ import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
12
+ import { merge, pick } from '@aztec/foundation/collection';
13
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
14
  import type { EthAddress } from '@aztec/foundation/eth-address';
4
- import { Fr } from '@aztec/foundation/fields';
5
15
  import { type Logger, createLogger } from '@aztec/foundation/log';
16
+ import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise';
6
17
  import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
7
18
  import { count } from '@aztec/foundation/string';
8
- import { elapsed } from '@aztec/foundation/timer';
9
- import { InboxAbi, RollupAbi } from '@aztec/l1-artifacts';
19
+ import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
20
+ import { isDefined } from '@aztec/foundation/types';
21
+ import type { CustomRange } from '@aztec/kv-store';
22
+ import { RollupAbi } from '@aztec/l1-artifacts';
10
23
  import {
11
- ContractClassRegisteredEvent,
24
+ ContractClassPublishedEvent,
12
25
  PrivateFunctionBroadcastedEvent,
13
- UnconstrainedFunctionBroadcastedEvent,
14
- } from '@aztec/protocol-contracts/class-registerer';
26
+ UtilityFunctionBroadcastedEvent,
27
+ } from '@aztec/protocol-contracts/class-registry';
15
28
  import {
16
- ContractInstanceDeployedEvent,
29
+ ContractInstancePublishedEvent,
17
30
  ContractInstanceUpdatedEvent,
18
- } from '@aztec/protocol-contracts/instance-deployer';
31
+ } from '@aztec/protocol-contracts/instance-registry';
19
32
  import type { FunctionSelector } from '@aztec/stdlib/abi';
20
33
  import type { AztecAddress } from '@aztec/stdlib/aztec-address';
21
34
  import {
22
- type InBlock,
23
- type L2Block,
24
- type L2BlockId,
35
+ type ArchiverEmitter,
36
+ type CheckpointId,
37
+ CheckpointedL2Block,
38
+ CommitteeAttestation,
39
+ GENESIS_CHECKPOINT_HEADER_HASH,
40
+ L2Block,
41
+ L2BlockNew,
42
+ type L2BlockSink,
25
43
  type L2BlockSource,
26
44
  L2BlockSourceEvents,
27
45
  type L2Tips,
28
- type NullifierWithBlockSource,
46
+ PublishedL2Block,
29
47
  } from '@aztec/stdlib/block';
48
+ import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
30
49
  import {
31
50
  type ContractClassPublic,
32
51
  type ContractDataSource,
33
52
  type ContractInstanceWithAddress,
34
53
  type ExecutablePrivateFunctionWithMembershipProof,
35
- type PublicFunction,
36
- type UnconstrainedFunctionWithMembershipProof,
54
+ type UtilityFunctionWithMembershipProof,
37
55
  computePublicBytecodeCommitment,
38
56
  isValidPrivateFunctionMembershipProof,
39
- isValidUnconstrainedFunctionMembershipProof,
57
+ isValidUtilityFunctionMembershipProof,
40
58
  } from '@aztec/stdlib/contract';
41
59
  import {
42
60
  type L1RollupConstants,
@@ -48,56 +66,117 @@ import {
48
66
  } from '@aztec/stdlib/epoch-helpers';
49
67
  import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
50
68
  import type { L2LogsSource } from '@aztec/stdlib/interfaces/server';
51
- import { ContractClassLog, type LogFilter, type PrivateLog, type PublicLog, TxScopedL2Log } from '@aztec/stdlib/logs';
52
- import type { InboxLeaf, L1ToL2MessageSource } from '@aztec/stdlib/messaging';
53
- import { type BlockHeader, TxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx';
54
- import { Attributes, type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client';
69
+ import {
70
+ ContractClassLog,
71
+ type LogFilter,
72
+ type PrivateLog,
73
+ type PublicLog,
74
+ type SiloedTag,
75
+ Tag,
76
+ TxScopedL2Log,
77
+ } from '@aztec/stdlib/logs';
78
+ import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
79
+ import type { CheckpointHeader } from '@aztec/stdlib/rollup';
80
+ import { type BlockHeader, type IndexedTxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx';
81
+ import type { UInt64 } from '@aztec/stdlib/types';
82
+ import {
83
+ type TelemetryClient,
84
+ type Traceable,
85
+ type Tracer,
86
+ execInSpan,
87
+ getTelemetryClient,
88
+ trackSpan,
89
+ } from '@aztec/telemetry-client';
55
90
 
56
91
  import { EventEmitter } from 'events';
57
92
  import groupBy from 'lodash.groupby';
58
- import { type GetContractReturnType, createPublicClient, fallback, getContract, http } from 'viem';
93
+ import { type GetContractReturnType, type Hex, createPublicClient, fallback, http } from 'viem';
59
94
 
60
95
  import type { ArchiverDataStore, ArchiverL1SynchPoint } from './archiver_store.js';
61
96
  import type { ArchiverConfig } from './config.js';
62
- import { retrieveBlocksFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js';
63
- import { NoBlobBodiesFoundError } from './errors.js';
97
+ import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
64
98
  import { ArchiverInstrumentation } from './instrumentation.js';
65
- import type { DataRetrieval } from './structs/data_retrieval.js';
66
- import type { L1Published } from './structs/published.js';
99
+ import type { CheckpointData } from './kv_archiver_store/block_store.js';
100
+ import {
101
+ retrieveCheckpointsFromRollup,
102
+ retrieveL1ToL2Message,
103
+ retrieveL1ToL2Messages,
104
+ retrievedToPublishedCheckpoint,
105
+ } from './l1/data_retrieval.js';
106
+ import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
107
+ import type { InboxMessage } from './structs/inbox_message.js';
108
+ import { type ValidateCheckpointResult, validateCheckpointAttestations } from './validation.js';
67
109
 
68
110
  /**
69
111
  * Helper interface to combine all sources this archiver implementation provides.
70
112
  */
71
- export type ArchiveSource = L2BlockSource &
72
- L2LogsSource &
73
- ContractDataSource &
74
- L1ToL2MessageSource &
75
- NullifierWithBlockSource;
113
+ export type ArchiveSource = L2BlockSource & L2LogsSource & ContractDataSource & L1ToL2MessageSource;
114
+
115
+ /** Request to add a block to the archiver, queued for processing by the sync loop. */
116
+ type AddBlockRequest = {
117
+ block: L2BlockNew;
118
+ resolve: () => void;
119
+ reject: (err: Error) => void;
120
+ };
121
+
122
+ export type ArchiverDeps = {
123
+ telemetry?: TelemetryClient;
124
+ blobClient: BlobClientInterface;
125
+ epochCache?: EpochCache;
126
+ dateProvider?: DateProvider;
127
+ };
128
+
129
+ function mapArchiverConfig(config: Partial<ArchiverConfig>) {
130
+ return {
131
+ pollingIntervalMs: config.archiverPollingIntervalMS,
132
+ batchSize: config.archiverBatchSize,
133
+ skipValidateCheckpointAttestations: config.skipValidateCheckpointAttestations,
134
+ maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds,
135
+ ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts,
136
+ };
137
+ }
138
+
139
+ type RollupStatus = {
140
+ provenCheckpointNumber: CheckpointNumber;
141
+ provenArchive: Hex;
142
+ pendingCheckpointNumber: CheckpointNumber;
143
+ pendingArchive: Hex;
144
+ validationResult: ValidateCheckpointResult | undefined;
145
+ lastRetrievedCheckpoint?: PublishedCheckpoint;
146
+ lastL1BlockWithCheckpoint?: bigint;
147
+ };
76
148
 
77
149
  /**
78
- * Pulls L2 blocks in a non-blocking manner and provides interface for their retrieval.
150
+ * Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
79
151
  * Responsible for handling robust L1 polling so that other components do not need to
80
152
  * concern themselves with it.
81
153
  */
82
- export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
83
- /**
84
- * A promise in which we will be continually fetching new L2 blocks.
85
- */
86
- private runningPromise?: RunningPromise;
154
+ export class Archiver
155
+ extends (EventEmitter as new () => ArchiverEmitter)
156
+ implements ArchiveSource, L2BlockSink, Traceable
157
+ {
158
+ /** A loop in which we will be continually fetching new checkpoints. */
159
+ private runningPromise: RunningPromise;
87
160
 
88
- private rollup: GetContractReturnType<typeof RollupAbi, ViemPublicClient>;
89
- private inbox: GetContractReturnType<typeof InboxAbi, ViemPublicClient>;
161
+ private rollup: RollupContract;
162
+ private inbox: InboxContract;
90
163
 
91
164
  private store: ArchiverStoreHelper;
92
165
 
93
- public l1BlockNumber: bigint | undefined;
94
- public l1Timestamp: bigint | undefined;
166
+ private l1BlockNumber: bigint | undefined;
167
+ private l1Timestamp: bigint | undefined;
168
+ private initialSyncComplete: boolean = false;
169
+ private initialSyncPromise: PromiseWithResolvers<void>;
170
+
171
+ /** Queue of blocks to be added to the store, processed by the sync loop. */
172
+ private blockQueue: AddBlockRequest[] = [];
95
173
 
96
174
  public readonly tracer: Tracer;
97
175
 
98
176
  /**
99
177
  * Creates a new instance of the Archiver.
100
178
  * @param publicClient - A client for interacting with the Ethereum node.
179
+ * @param debugClient - A client for interacting with the Ethereum node for debug/trace methods.
101
180
  * @param rollupAddress - Ethereum address of the rollup contract.
102
181
  * @param inboxAddress - Ethereum address of the inbox contract.
103
182
  * @param registryAddress - Ethereum address of the registry contract.
@@ -107,12 +186,24 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
107
186
  */
108
187
  constructor(
109
188
  private readonly publicClient: ViemPublicClient,
110
- private readonly l1Addresses: { rollupAddress: EthAddress; inboxAddress: EthAddress; registryAddress: EthAddress },
189
+ private readonly debugClient: ViemPublicDebugClient,
190
+ private readonly l1Addresses: Pick<
191
+ L1ContractAddresses,
192
+ 'rollupAddress' | 'inboxAddress' | 'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress'
193
+ > & { slashingProposerAddress: EthAddress },
111
194
  readonly dataStore: ArchiverDataStore,
112
- private readonly config: { pollingIntervalMs: number; batchSize: number },
113
- private readonly blobSinkClient: BlobSinkClientInterface,
195
+ private config: {
196
+ pollingIntervalMs: number;
197
+ batchSize: number;
198
+ skipValidateCheckpointAttestations?: boolean;
199
+ maxAllowedEthClientDriftSeconds: number;
200
+ ethereumAllowNoDebugHosts?: boolean;
201
+ },
202
+ private readonly blobClient: BlobClientInterface,
203
+ private readonly epochCache: EpochCache,
204
+ private readonly dateProvider: DateProvider,
114
205
  private readonly instrumentation: ArchiverInstrumentation,
115
- private readonly l1constants: L1RollupConstants,
206
+ private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr },
116
207
  private readonly log: Logger = createLogger('archiver'),
117
208
  ) {
118
209
  super();
@@ -120,17 +211,18 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
120
211
  this.tracer = instrumentation.tracer;
121
212
  this.store = new ArchiverStoreHelper(dataStore);
122
213
 
123
- this.rollup = getContract({
124
- address: l1Addresses.rollupAddress.toString(),
125
- abi: RollupAbi,
126
- client: publicClient,
127
- });
214
+ this.rollup = new RollupContract(publicClient, l1Addresses.rollupAddress);
215
+ this.inbox = new InboxContract(publicClient, l1Addresses.inboxAddress);
216
+ this.initialSyncPromise = promiseWithResolvers();
128
217
 
129
- this.inbox = getContract({
130
- address: l1Addresses.inboxAddress.toString(),
131
- abi: InboxAbi,
132
- client: publicClient,
133
- });
218
+ // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
219
+ // are done as fast as possible. This then gets updated once the initial sync completes.
220
+ this.runningPromise = new RunningPromise(
221
+ () => this.sync(),
222
+ this.log,
223
+ this.config.pollingIntervalMs / 10,
224
+ makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError),
225
+ );
134
226
  }
135
227
 
136
228
  /**
@@ -143,86 +235,185 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
143
235
  public static async createAndSync(
144
236
  config: ArchiverConfig,
145
237
  archiverStore: ArchiverDataStore,
146
- deps: { telemetry: TelemetryClient; blobSinkClient: BlobSinkClientInterface },
238
+ deps: ArchiverDeps,
147
239
  blockUntilSynced = true,
148
240
  ): Promise<Archiver> {
149
241
  const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId);
150
242
  const publicClient = createPublicClient({
151
243
  chain: chain.chainInfo,
152
- transport: fallback(config.l1RpcUrls.map(url => http(url))),
244
+ transport: fallback(config.l1RpcUrls.map(url => http(url, { batch: false }))),
153
245
  pollingInterval: config.viemPollingIntervalMS,
154
246
  });
155
247
 
156
- const rollup = getContract({
157
- address: config.l1Contracts.rollupAddress.toString(),
158
- abi: RollupAbi,
159
- client: publicClient,
160
- });
248
+ // Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs
249
+ const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls;
250
+ const debugClient = createPublicClient({
251
+ chain: chain.chainInfo,
252
+ transport: fallback(debugRpcUrls.map(url => http(url, { batch: false }))),
253
+ pollingInterval: config.viemPollingIntervalMS,
254
+ }) as ViemPublicDebugClient;
161
255
 
162
- const [l1StartBlock, l1GenesisTime] = await Promise.all([
163
- rollup.read.L1_BLOCK_AT_GENESIS(),
164
- rollup.read.getGenesisTime(),
165
- ] as const);
256
+ const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress);
257
+
258
+ const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] =
259
+ await Promise.all([
260
+ rollup.getL1StartBlock(),
261
+ rollup.getL1GenesisTime(),
262
+ rollup.getProofSubmissionEpochs(),
263
+ rollup.getGenesisArchiveTreeRoot(),
264
+ rollup.getSlashingProposerAddress(),
265
+ ] as const);
266
+
267
+ const l1StartBlockHash = await publicClient
268
+ .getBlock({ blockNumber: l1StartBlock, includeTransactions: false })
269
+ .then(block => Buffer32.fromString(block.hash));
166
270
 
167
271
  const { aztecEpochDuration: epochDuration, aztecSlotDuration: slotDuration, ethereumSlotDuration } = config;
168
272
 
273
+ const l1Constants = {
274
+ l1StartBlockHash,
275
+ l1StartBlock,
276
+ l1GenesisTime,
277
+ epochDuration,
278
+ slotDuration,
279
+ ethereumSlotDuration,
280
+ proofSubmissionEpochs: Number(proofSubmissionEpochs),
281
+ genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()),
282
+ };
283
+
284
+ const opts = merge(
285
+ {
286
+ pollingIntervalMs: 10_000,
287
+ batchSize: 100,
288
+ maxAllowedEthClientDriftSeconds: 300,
289
+ ethereumAllowNoDebugHosts: false,
290
+ },
291
+ mapArchiverConfig(config),
292
+ );
293
+
294
+ const epochCache = deps.epochCache ?? (await EpochCache.create(config.l1Contracts.rollupAddress, config, deps));
295
+ const telemetry = deps.telemetry ?? getTelemetryClient();
296
+
169
297
  const archiver = new Archiver(
170
298
  publicClient,
171
- config.l1Contracts,
299
+ debugClient,
300
+ { ...config.l1Contracts, slashingProposerAddress },
172
301
  archiverStore,
173
- {
174
- pollingIntervalMs: config.archiverPollingIntervalMS ?? 10_000,
175
- batchSize: config.archiverBatchSize ?? 100,
176
- },
177
- deps.blobSinkClient,
178
- await ArchiverInstrumentation.new(deps.telemetry, () => archiverStore.estimateSize()),
179
- { l1StartBlock, l1GenesisTime, epochDuration, slotDuration, ethereumSlotDuration },
302
+ opts,
303
+ deps.blobClient,
304
+ epochCache,
305
+ deps.dateProvider ?? new DateProvider(),
306
+ await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()),
307
+ l1Constants,
180
308
  );
181
309
  await archiver.start(blockUntilSynced);
182
310
  return archiver;
183
311
  }
184
312
 
313
+ /** Updates archiver config */
314
+ public updateConfig(newConfig: Partial<ArchiverConfig>) {
315
+ this.config = merge(this.config, mapArchiverConfig(newConfig));
316
+ }
317
+
185
318
  /**
186
319
  * Starts sync process.
187
320
  * @param blockUntilSynced - If true, blocks until the archiver has fully synced.
188
321
  */
189
322
  public async start(blockUntilSynced: boolean): Promise<void> {
190
- if (this.runningPromise) {
323
+ if (this.runningPromise.isRunning()) {
191
324
  throw new Error('Archiver is already running');
192
325
  }
193
326
 
194
- if (blockUntilSynced) {
195
- await this.syncSafe(blockUntilSynced);
196
- }
327
+ await this.blobClient.testSources();
328
+ await this.testEthereumNodeSynced();
329
+ await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
197
330
 
198
- this.runningPromise = new RunningPromise(
199
- () => this.sync(false),
200
- this.log,
201
- this.config.pollingIntervalMs,
202
- makeLoggingErrorHandler(
203
- this.log,
204
- // Ignored errors will not log to the console
205
- // We ignore NoBlobBodiesFound as the message may not have been passed to the blob sink yet
206
- NoBlobBodiesFoundError,
207
- ),
331
+ // Log initial state for the archiver
332
+ const { l1StartBlock } = this.l1constants;
333
+ const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
334
+ const currentL2Checkpoint = await this.getSynchedCheckpointNumber();
335
+ this.log.info(
336
+ `Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`,
337
+ { blocksSynchedTo, messagesSynchedTo, currentL2Checkpoint },
208
338
  );
209
339
 
340
+ // Start sync loop, and return the wait for initial sync if we are asked to block until synced
210
341
  this.runningPromise.start();
342
+ if (blockUntilSynced) {
343
+ return this.waitForInitialSync();
344
+ }
211
345
  }
212
346
 
213
- private async syncSafe(initialRun: boolean) {
214
- try {
215
- await this.sync(initialRun);
216
- } catch (error) {
217
- this.log.error('Error during sync', { error });
218
- }
347
+ public syncImmediate() {
348
+ return this.runningPromise.trigger();
219
349
  }
220
350
 
221
351
  /**
222
- * Fetches logs from L1 contracts and processes them.
352
+ * Queues a block to be added to the archiver store and triggers processing.
353
+ * The block will be processed by the sync loop.
354
+ * Implements the L2BlockSink interface.
355
+ * @param block - The L2 block to add.
356
+ * @returns A promise that resolves when the block has been added to the store, or rejects on error.
357
+ */
358
+ public addBlock(block: L2BlockNew): Promise<void> {
359
+ return new Promise<void>((resolve, reject) => {
360
+ this.blockQueue.push({ block, resolve, reject });
361
+ this.log.debug(`Queued block ${block.number} for processing`);
362
+ // Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed
363
+ this.syncImmediate().catch(err => {
364
+ this.log.error(`Sync immediate call failed: ${err}`);
365
+ });
366
+ });
367
+ }
368
+
369
+ /**
370
+ * Processes all queued blocks, adding them to the store.
371
+ * Called at the beginning of each sync iteration.
372
+ * Blocks are processed in the order they were queued.
223
373
  */
224
- @trackSpan('Archiver.sync', initialRun => ({ [Attributes.INITIAL_SYNC]: initialRun }))
225
- private async sync(initialRun: boolean) {
374
+ private async processQueuedBlocks(): Promise<void> {
375
+ if (this.blockQueue.length === 0) {
376
+ return;
377
+ }
378
+
379
+ // Take all blocks from the queue
380
+ const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
381
+ this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
382
+
383
+ // Process each block individually to properly resolve/reject each promise
384
+ for (const { block, resolve, reject } of queuedItems) {
385
+ try {
386
+ await this.store.addBlocks([block]);
387
+ this.log.debug(`Added block ${block.number} to store`);
388
+ resolve();
389
+ } catch (err: any) {
390
+ this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
391
+ reject(err);
392
+ }
393
+ }
394
+ }
395
+
396
+ public waitForInitialSync() {
397
+ return this.initialSyncPromise.promise;
398
+ }
399
+
400
+ /** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */
401
+ private async testEthereumNodeSynced() {
402
+ const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
403
+ if (maxAllowedDelay === 0) {
404
+ return;
405
+ }
406
+ const { number, timestamp: l1Timestamp } = await this.publicClient.getBlock({ includeTransactions: false });
407
+ const currentTime = BigInt(this.dateProvider.nowInSeconds());
408
+ if (currentTime - l1Timestamp > BigInt(maxAllowedDelay)) {
409
+ throw new Error(
410
+ `Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`,
411
+ );
412
+ }
413
+ }
414
+
415
+ @trackSpan('Archiver.syncFromL1')
416
+ private async syncFromL1() {
226
417
  /**
227
418
  * We keep track of three "pointers" to L1 blocks:
228
419
  * 1. the last L1 block that published an L2 block
@@ -232,21 +423,23 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
232
423
  * We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
233
424
  * We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
234
425
  * The archiver will stay back, until there's data on L1 that will move the pointers forward.
235
- *
236
- * This code does not handle reorgs.
237
426
  */
238
- const { l1StartBlock } = this.l1constants;
239
- const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
240
- const currentL1BlockNumber = await this.publicClient.getBlockNumber();
241
-
242
- if (initialRun) {
243
- this.log.info(
244
- `Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${Math.min(
245
- Number(blocksSynchedTo),
246
- Number(messagesSynchedTo),
247
- )} to current L1 block ${currentL1BlockNumber}`,
248
- );
249
- }
427
+ const { l1StartBlock, l1StartBlockHash } = this.l1constants;
428
+ const {
429
+ blocksSynchedTo = l1StartBlock,
430
+ messagesSynchedTo = { l1BlockNumber: l1StartBlock, l1BlockHash: l1StartBlockHash },
431
+ } = await this.store.getSynchPoint();
432
+
433
+ const currentL1Block = await this.publicClient.getBlock({ includeTransactions: false });
434
+ const currentL1BlockNumber = currentL1Block.number;
435
+ const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
436
+
437
+ this.log.trace(`Starting new archiver sync iteration`, {
438
+ blocksSynchedTo,
439
+ messagesSynchedTo,
440
+ currentL1BlockNumber,
441
+ currentL1BlockHash,
442
+ });
250
443
 
251
444
  // ********** Ensuring Consistency of data pulled from L1 **********
252
445
 
@@ -268,75 +461,163 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
268
461
  */
269
462
 
270
463
  // ********** Events that are processed per L1 block **********
271
- await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber);
272
-
273
- // Store latest l1 block number and timestamp seen. Used for epoch and slots calculations.
274
- if (!this.l1BlockNumber || this.l1BlockNumber < currentL1BlockNumber) {
275
- this.l1Timestamp = (await this.publicClient.getBlock({ blockNumber: currentL1BlockNumber })).timestamp;
276
- this.l1BlockNumber = currentL1BlockNumber;
464
+ await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber, currentL1BlockHash);
465
+
466
+ // Get L1 timestamp for the current block
467
+ const currentL1Timestamp =
468
+ !this.l1Timestamp || !this.l1BlockNumber || this.l1BlockNumber !== currentL1BlockNumber
469
+ ? (await this.publicClient.getBlock({ blockNumber: currentL1BlockNumber })).timestamp
470
+ : this.l1Timestamp;
471
+
472
+ // Warn if the latest L1 block timestamp is too old
473
+ const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
474
+ const now = this.dateProvider.nowInSeconds();
475
+ if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
476
+ this.log.warn(
477
+ `Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`,
478
+ { currentL1BlockNumber, currentL1Timestamp, now, maxAllowedDelay },
479
+ );
277
480
  }
278
481
 
279
- // ********** Events that are processed per L2 block **********
482
+ // ********** Events that are processed per checkpoint **********
280
483
  if (currentL1BlockNumber > blocksSynchedTo) {
281
- // First we retrieve new L2 blocks
282
- const { provenBlockNumber } = await this.handleL2blocks(blocksSynchedTo, currentL1BlockNumber);
283
- // And then we prune the current epoch if it'd reorg on next submission.
284
- // Note that we don't do this before retrieving L2 blocks because we may need to retrieve
285
- // blocks from more than 2 epochs ago, so we want to make sure we have the latest view of
484
+ // First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
485
+ // pending chain validation status, proven checkpoint number, and synched L1 block number.
486
+ const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
487
+ // Then we prune the current epoch if it'd reorg on next submission.
488
+ // Note that we don't do this before retrieving checkpoints because we may need to retrieve
489
+ // checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
286
490
  // the chain locally before we start unwinding stuff. This can be optimized by figuring out
287
- // up to which point we're pruning, and then requesting L2 blocks up to that point only.
288
- await this.handleEpochPrune(provenBlockNumber, currentL1BlockNumber);
491
+ // up to which point we're pruning, and then requesting checkpoints up to that point only.
492
+ const { rollupCanPrune } = await this.handleEpochPrune(
493
+ rollupStatus.provenCheckpointNumber,
494
+ currentL1BlockNumber,
495
+ currentL1Timestamp,
496
+ );
497
+
498
+ // If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
499
+ // past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
500
+ // we get a valid checkpoint to advance the syncpoint.
501
+ if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
502
+ await this.store.setCheckpointSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
503
+ }
504
+
505
+ // And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
506
+ // We only do this if rollup cant prune on the next submission. Otherwise we will end up
507
+ // re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
508
+ // since the archiver will rightfully refuse to sync up to it.
509
+ if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
510
+ await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
511
+ }
289
512
 
290
513
  this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
291
514
  }
292
515
 
293
- if (initialRun) {
294
- this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete.`, {
516
+ // After syncing has completed, update the current l1 block number and timestamp,
517
+ // otherwise we risk announcing to the world that we've synced to a given point,
518
+ // but the corresponding blocks have not been processed (see #12631).
519
+ this.l1Timestamp = currentL1Timestamp;
520
+ this.l1BlockNumber = currentL1BlockNumber;
521
+
522
+ // We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace)
523
+ // so if the initial sync took too long, we still go for another iteration.
524
+ if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= (await this.publicClient.getBlockNumber())) {
525
+ this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, {
295
526
  l1BlockNumber: currentL1BlockNumber,
296
527
  syncPoint: await this.store.getSynchPoint(),
297
528
  ...(await this.getL2Tips()),
298
529
  });
530
+ this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs);
531
+ this.initialSyncComplete = true;
532
+ this.initialSyncPromise.resolve();
299
533
  }
300
534
  }
301
535
 
302
- /** Queries the rollup contract on whether a prune can be executed on the immediatenext L1 block. */
303
- private async canPrune(currentL1BlockNumber: bigint) {
304
- const time = (this.l1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
305
- return await this.rollup.read.canPruneAtTime([time], { blockNumber: currentL1BlockNumber });
536
+ /**
537
+ * Fetches logs from L1 contracts and processes them.
538
+ */
539
+ @trackSpan('Archiver.sync')
540
+ private async sync() {
541
+ // Process any queued blocks first, before doing L1 sync
542
+ await this.processQueuedBlocks();
543
+ // Now perform L1 sync
544
+ await this.syncFromL1();
545
+ }
546
+
547
+ /** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */
548
+ private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint) {
549
+ const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
550
+ const result = await this.rollup.canPruneAtTime(time, { blockNumber: currentL1BlockNumber });
551
+ if (result) {
552
+ this.log.debug(`Rollup contract allows pruning at L1 block ${currentL1BlockNumber} time ${time}`, {
553
+ currentL1Timestamp,
554
+ pruneTime: time,
555
+ currentL1BlockNumber,
556
+ });
557
+ }
558
+ return result;
306
559
  }
307
560
 
308
- /** Checks if there'd be a reorg for the next block submission and start pruning now. */
309
- private async handleEpochPrune(provenBlockNumber: bigint, currentL1BlockNumber: bigint) {
310
- const localPendingBlockNumber = BigInt(await this.getBlockNumber());
311
- const canPrune = localPendingBlockNumber > provenBlockNumber && (await this.canPrune(currentL1BlockNumber));
561
+ /** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */
562
+ @trackSpan('Archiver.handleEpochPrune')
563
+ private async handleEpochPrune(
564
+ provenCheckpointNumber: CheckpointNumber,
565
+ currentL1BlockNumber: bigint,
566
+ currentL1Timestamp: bigint,
567
+ ) {
568
+ const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
569
+ const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
570
+ const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
312
571
 
313
572
  if (canPrune) {
314
- const localPendingSlotNumber = await this.getL2SlotNumber();
315
- const localPendingEpochNumber = getEpochAtSlot(localPendingSlotNumber, this.l1constants);
573
+ const timer = new Timer();
574
+ const pruneFrom = CheckpointNumber(provenCheckpointNumber + 1);
575
+
576
+ const header = await this.getCheckpointHeader(pruneFrom);
577
+ if (header === undefined) {
578
+ throw new Error(`Missing checkpoint header ${pruneFrom}`);
579
+ }
580
+
581
+ const pruneFromSlotNumber = header.slotNumber;
582
+ const pruneFromEpochNumber: EpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
583
+
584
+ const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
585
+
586
+ const checkpointPromises = Array.from({ length: checkpointsToUnwind })
587
+ .fill(0)
588
+ .map((_, i) => this.store.getCheckpointData(CheckpointNumber(i + pruneFrom)));
589
+ const checkpoints = await Promise.all(checkpointPromises);
590
+
591
+ const blockPromises = await Promise.all(
592
+ checkpoints
593
+ .filter(isDefined)
594
+ .map(cp => this.store.getBlocksForCheckpoint(CheckpointNumber(cp.checkpointNumber))),
595
+ );
596
+ const newBlocks = blockPromises.filter(isDefined).flat();
316
597
 
317
598
  // Emit an event for listening services to react to the chain prune
318
599
  this.emit(L2BlockSourceEvents.L2PruneDetected, {
319
600
  type: L2BlockSourceEvents.L2PruneDetected,
320
- blockNumber: localPendingBlockNumber,
321
- slotNumber: localPendingSlotNumber,
322
- epochNumber: localPendingEpochNumber,
601
+ epochNumber: pruneFromEpochNumber,
602
+ blocks: newBlocks,
323
603
  });
324
604
 
325
- const blocksToUnwind = localPendingBlockNumber - provenBlockNumber;
326
605
  this.log.debug(
327
- `L2 prune from ${provenBlockNumber + 1n} to ${localPendingBlockNumber} will occur on next block submission.`,
606
+ `L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`,
328
607
  );
329
- await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
608
+ await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
330
609
  this.log.warn(
331
- `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` +
332
- `to ${provenBlockNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
333
- `Updated L2 latest block is ${await this.getBlockNumber()}.`,
610
+ `Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
611
+ `to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` +
612
+ `Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
334
613
  );
335
- this.instrumentation.processPrune();
614
+ this.instrumentation.processPrune(timer.ms());
336
615
  // TODO(palla/reorg): Do we need to set the block synched L1 block number here?
337
616
  // Seems like the next iteration should handle this.
338
617
  // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
339
618
  }
619
+
620
+ return { rollupCanPrune };
340
621
  }
341
622
 
342
623
  private nextRange(end: bigint, limit: bigint): [bigint, bigint] {
@@ -349,196 +630,558 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
349
630
  return [nextStart, nextEnd];
350
631
  }
351
632
 
352
- private async handleL1ToL2Messages(messagesSynchedTo: bigint, currentL1BlockNumber: bigint) {
353
- this.log.trace(`Handling L1 to L2 messages from ${messagesSynchedTo} to ${currentL1BlockNumber}.`);
354
- if (currentL1BlockNumber <= messagesSynchedTo) {
633
+ @trackSpan('Archiver.handleL1ToL2Messages')
634
+ private async handleL1ToL2Messages(
635
+ messagesSyncPoint: L1BlockId,
636
+ currentL1BlockNumber: bigint,
637
+ _currentL1BlockHash: Buffer32,
638
+ ) {
639
+ this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`);
640
+ if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) {
355
641
  return;
356
642
  }
357
643
 
358
- const localTotalMessageCount = await this.store.getTotalL1ToL2MessageCount();
359
- const destinationTotalMessageCount = await this.inbox.read.totalMessagesInserted();
644
+ // Load remote and local inbox states.
645
+ const localMessagesInserted = await this.store.getTotalL1ToL2MessageCount();
646
+ const localLastMessage = await this.store.getLastL1ToL2Message();
647
+ const remoteMessagesState = await this.inbox.getState({ blockNumber: currentL1BlockNumber });
648
+
649
+ this.log.trace(`Retrieved remote inbox state at L1 block ${currentL1BlockNumber}.`, {
650
+ localMessagesInserted,
651
+ localLastMessage,
652
+ remoteMessagesState,
653
+ });
360
654
 
361
- if (localTotalMessageCount === destinationTotalMessageCount) {
362
- await this.store.setMessageSynchedL1BlockNumber(currentL1BlockNumber);
655
+ // Compare message count and rolling hash. If they match, no need to retrieve anything.
656
+ if (
657
+ remoteMessagesState.totalMessagesInserted === localMessagesInserted &&
658
+ remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer16.ZERO)
659
+ ) {
363
660
  this.log.trace(
364
- `Retrieved no new L1 to L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`,
661
+ `No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`,
365
662
  );
366
663
  return;
367
664
  }
368
665
 
369
- // Retrieve messages in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
370
- let searchStartBlock: bigint = messagesSynchedTo;
371
- let searchEndBlock: bigint = messagesSynchedTo;
666
+ // Check if our syncpoint is still valid. If not, there was an L1 reorg and we need to re-retrieve messages.
667
+ // Note that we need to fetch it from logs and not from inbox state at the syncpoint l1 block number, since it
668
+ // could be older than 128 blocks and non-archive nodes cannot resolve it.
669
+ if (localLastMessage) {
670
+ const remoteLastMessage = await this.retrieveL1ToL2Message(localLastMessage.leaf);
671
+ this.log.trace(`Retrieved remote message for local last`, { remoteLastMessage, localLastMessage });
672
+ if (!remoteLastMessage || !remoteLastMessage.rollingHash.equals(localLastMessage.rollingHash)) {
673
+ this.log.warn(`Rolling back L1 to L2 messages due to hash mismatch or msg not found.`, {
674
+ remoteLastMessage,
675
+ messagesSyncPoint,
676
+ localLastMessage,
677
+ });
678
+
679
+ messagesSyncPoint = await this.rollbackL1ToL2Messages(localLastMessage, messagesSyncPoint);
680
+ this.log.debug(`Rolled back L1 to L2 messages to L1 block ${messagesSyncPoint.l1BlockNumber}.`, {
681
+ messagesSyncPoint,
682
+ });
683
+ }
684
+ }
685
+
686
+ // Retrieve and save messages in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
687
+ let searchStartBlock: bigint = 0n;
688
+ let searchEndBlock: bigint = messagesSyncPoint.l1BlockNumber;
689
+
690
+ let lastMessage: InboxMessage | undefined;
691
+ let messageCount = 0;
692
+
372
693
  do {
373
694
  [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
374
695
  this.log.trace(`Retrieving L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`);
375
- const retrievedL1ToL2Messages = await retrieveL1ToL2Messages(this.inbox, searchStartBlock, searchEndBlock);
696
+ const messages = await retrieveL1ToL2Messages(this.inbox.getContract(), searchStartBlock, searchEndBlock);
376
697
  this.log.verbose(
377
- `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`,
698
+ `Retrieved ${messages.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`,
378
699
  );
379
- await this.store.addL1ToL2Messages(retrievedL1ToL2Messages);
380
- for (const msg of retrievedL1ToL2Messages.retrievedData) {
381
- this.log.debug(`Downloaded L1 to L2 message`, { leaf: msg.leaf.toString(), index: msg.index });
700
+ const timer = new Timer();
701
+ await this.store.addL1ToL2Messages(messages);
702
+ const perMsg = timer.ms() / messages.length;
703
+ this.instrumentation.processNewMessages(messages.length, perMsg);
704
+ for (const msg of messages) {
705
+ this.log.debug(`Downloaded L1 to L2 message`, { ...msg, leaf: msg.leaf.toString() });
706
+ lastMessage = msg;
707
+ messageCount++;
382
708
  }
383
709
  } while (searchEndBlock < currentL1BlockNumber);
710
+
711
+ // Log stats for messages retrieved (if any).
712
+ if (messageCount > 0) {
713
+ this.log.info(
714
+ `Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for checkpoint ${lastMessage?.checkpointNumber}`,
715
+ { lastMessage, messageCount },
716
+ );
717
+ }
718
+
719
+ // Warn if the resulting rolling hash does not match the remote state we had retrieved.
720
+ if (lastMessage && !lastMessage.rollingHash.equals(remoteMessagesState.messagesRollingHash)) {
721
+ this.log.warn(`Last message retrieved rolling hash does not match remote state.`, {
722
+ lastMessage,
723
+ remoteMessagesState,
724
+ });
725
+ }
384
726
  }
385
727
 
386
- private async handleL2blocks(
387
- blocksSynchedTo: bigint,
388
- currentL1BlockNumber: bigint,
389
- ): Promise<{ provenBlockNumber: bigint }> {
390
- const localPendingBlockNumber = BigInt(await this.getBlockNumber());
391
- const [provenBlockNumber, provenArchive, pendingBlockNumber, pendingArchive, archiveForLocalPendingBlockNumber] =
392
- await this.rollup.read.status([localPendingBlockNumber], { blockNumber: currentL1BlockNumber });
393
-
394
- const updateProvenBlock = async () => {
395
- const localBlockForDestinationProvenBlockNumber = await this.getBlock(Number(provenBlockNumber));
396
-
397
- // Sanity check. I've hit what seems to be a state where the proven block is set to a value greater than the latest
398
- // synched block when requesting L2Tips from the archiver. This is the only place where the proven block is set.
399
- const synched = await this.store.getSynchedL2BlockNumber();
400
- if (localBlockForDestinationProvenBlockNumber && synched < localBlockForDestinationProvenBlockNumber?.number) {
728
+ private async retrieveL1ToL2Message(leaf: Fr): Promise<InboxMessage | undefined> {
729
+ const currentL1BlockNumber = await this.publicClient.getBlockNumber();
730
+ let searchStartBlock: bigint = 0n;
731
+ let searchEndBlock: bigint = this.l1constants.l1StartBlock - 1n;
732
+
733
+ do {
734
+ [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
735
+
736
+ const message = await retrieveL1ToL2Message(this.inbox.getContract(), leaf, searchStartBlock, searchEndBlock);
737
+
738
+ if (message) {
739
+ return message;
740
+ }
741
+ } while (searchEndBlock < currentL1BlockNumber);
742
+
743
+ return undefined;
744
+ }
745
+
746
+ private async rollbackL1ToL2Messages(localLastMessage: InboxMessage, messagesSyncPoint: L1BlockId) {
747
+ // Slowly go back through our messages until we find the last common message.
748
+ // We could query the logs in batch as an optimization, but the depth of the reorg should not be deep, and this
749
+ // is a very rare case, so it's fine to query one log at a time.
750
+ let commonMsg: undefined | InboxMessage;
751
+ this.log.verbose(`Searching most recent common L1 to L2 message at or before index ${localLastMessage.index}`);
752
+ for await (const msg of this.store.iterateL1ToL2Messages({ reverse: true, end: localLastMessage.index })) {
753
+ const remoteMsg = await this.retrieveL1ToL2Message(msg.leaf);
754
+ const logCtx = { remoteMsg, localMsg: msg };
755
+ if (remoteMsg && remoteMsg.rollingHash.equals(msg.rollingHash)) {
756
+ this.log.verbose(
757
+ `Found most recent common L1 to L2 message at index ${msg.index} on L1 block ${msg.l1BlockNumber}`,
758
+ logCtx,
759
+ );
760
+ commonMsg = remoteMsg;
761
+ break;
762
+ } else if (remoteMsg) {
763
+ this.log.debug(`Local L1 to L2 message with index ${msg.index} has different rolling hash`, logCtx);
764
+ } else {
765
+ this.log.debug(`Local L1 to L2 message with index ${msg.index} not found on L1`, logCtx);
766
+ }
767
+ }
768
+
769
+ // Delete everything after the common message we found.
770
+ const lastGoodIndex = commonMsg?.index;
771
+ this.log.warn(`Deleting all local L1 to L2 messages after index ${lastGoodIndex ?? 'undefined'}`);
772
+ await this.store.removeL1ToL2Messages(lastGoodIndex !== undefined ? lastGoodIndex + 1n : 0n);
773
+
774
+ // Update the syncpoint so the loop below reprocesses the changed messages. We go to the block before
775
+ // the last common one, so we force reprocessing it, in case new messages were added on that same L1 block
776
+ // after the last common message.
777
+ const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.l1constants.l1StartBlock;
778
+ const syncPointL1BlockHash = await this.getL1BlockHash(syncPointL1BlockNumber);
779
+ messagesSyncPoint = { l1BlockNumber: syncPointL1BlockNumber, l1BlockHash: syncPointL1BlockHash };
780
+ await this.store.setMessageSynchedL1Block(messagesSyncPoint);
781
+ return messagesSyncPoint;
782
+ }
783
+
784
+ private async getL1BlockHash(l1BlockNumber: bigint): Promise<Buffer32> {
785
+ const block = await this.publicClient.getBlock({ blockNumber: l1BlockNumber, includeTransactions: false });
786
+ if (!block) {
787
+ throw new Error(`Missing L1 block ${l1BlockNumber}`);
788
+ }
789
+ return Buffer32.fromString(block.hash);
790
+ }
791
+
792
+ @trackSpan('Archiver.handleCheckpoints')
793
+ private async handleCheckpoints(blocksSynchedTo: bigint, currentL1BlockNumber: bigint): Promise<RollupStatus> {
794
+ const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
795
+ const initialValidationResult: ValidateCheckpointResult | undefined =
796
+ await this.store.getPendingChainValidationStatus();
797
+ const {
798
+ provenCheckpointNumber,
799
+ provenArchive,
800
+ pendingCheckpointNumber,
801
+ pendingArchive,
802
+ archiveOfMyCheckpoint: archiveForLocalPendingCheckpointNumber,
803
+ } = await execInSpan(this.tracer, 'Archiver.getRollupStatus', () =>
804
+ this.rollup.status(localPendingCheckpointNumber, { blockNumber: currentL1BlockNumber }),
805
+ );
806
+ const rollupStatus: RollupStatus = {
807
+ provenCheckpointNumber,
808
+ provenArchive: provenArchive.toString(),
809
+ pendingCheckpointNumber,
810
+ pendingArchive: pendingArchive.toString(),
811
+ validationResult: initialValidationResult,
812
+ };
813
+ this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
814
+ localPendingCheckpointNumber,
815
+ blocksSynchedTo,
816
+ currentL1BlockNumber,
817
+ archiveForLocalPendingCheckpointNumber,
818
+ ...rollupStatus,
819
+ });
820
+
821
+ const updateProvenCheckpoint = async () => {
822
+ // Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
823
+ // we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
824
+ // so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
825
+ if (provenCheckpointNumber === 0) {
826
+ const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
827
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
828
+ await this.setProvenCheckpointNumber(provenCheckpointNumber);
829
+ this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
830
+ }
831
+ }
832
+
833
+ const localCheckpointForDestinationProvenCheckpointNumber =
834
+ await this.store.getCheckpointData(provenCheckpointNumber);
835
+
836
+ // Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
837
+ // synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
838
+ const synched = await this.getSynchedCheckpointNumber();
839
+ if (
840
+ localCheckpointForDestinationProvenCheckpointNumber &&
841
+ synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber
842
+ ) {
401
843
  this.log.error(
402
- `Hit local block greater than last synched block: ${localBlockForDestinationProvenBlockNumber.number} > ${synched}`,
844
+ `Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber} > ${synched}`,
403
845
  );
404
846
  }
405
847
 
848
+ this.log.trace(
849
+ `Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${
850
+ localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'
851
+ }`,
852
+ );
853
+
406
854
  if (
407
- localBlockForDestinationProvenBlockNumber &&
408
- provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString()
855
+ localCheckpointForDestinationProvenCheckpointNumber &&
856
+ provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root)
409
857
  ) {
410
- const localProvenBlockNumber = await this.store.getProvenL2BlockNumber();
411
- if (localProvenBlockNumber !== Number(provenBlockNumber)) {
412
- await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
413
- this.log.info(`Updated proven chain to block ${provenBlockNumber}`, {
414
- provenBlockNumber,
858
+ const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
859
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
860
+ await this.setProvenCheckpointNumber(provenCheckpointNumber);
861
+ this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber });
862
+ const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
863
+ const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
864
+ const lastBlockNumberInCheckpoint =
865
+ localCheckpointForDestinationProvenCheckpointNumber.startBlock +
866
+ localCheckpointForDestinationProvenCheckpointNumber.numBlocks -
867
+ 1;
868
+
869
+ this.emit(L2BlockSourceEvents.L2BlockProven, {
870
+ type: L2BlockSourceEvents.L2BlockProven,
871
+ blockNumber: BlockNumber(lastBlockNumberInCheckpoint),
872
+ slotNumber: provenSlotNumber,
873
+ epochNumber: provenEpochNumber,
415
874
  });
875
+ this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint);
876
+ } else {
877
+ this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
416
878
  }
417
879
  }
418
- this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
419
880
  };
420
881
 
421
- // This is an edge case that we only hit if there are no proposed blocks.
422
- // If we have 0 blocks locally and there are no blocks onchain there is nothing to do.
423
- const noBlocks = localPendingBlockNumber === 0n && pendingBlockNumber === 0n;
424
- if (noBlocks) {
425
- await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
426
- this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
427
- return { provenBlockNumber };
882
+ // This is an edge case that we only hit if there are no proposed checkpoints.
883
+ // If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
884
+ const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
885
+ if (noCheckpoints) {
886
+ await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
887
+ this.log.debug(
888
+ `No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`,
889
+ );
890
+ return rollupStatus;
428
891
  }
429
892
 
430
- await updateProvenBlock();
893
+ await updateProvenCheckpoint();
431
894
 
432
895
  // Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
433
- // are any state that could be impacted by it. If we have no blocks, there is no impact.
434
- if (localPendingBlockNumber > 0) {
435
- const localPendingBlock = await this.getBlock(Number(localPendingBlockNumber));
436
- if (localPendingBlock === undefined) {
437
- throw new Error(`Missing block ${localPendingBlockNumber}`);
896
+ // are any state that could be impacted by it. If we have no checkpoints, there is no impact.
897
+ if (localPendingCheckpointNumber > 0) {
898
+ const localPendingCheckpoint = await this.store.getCheckpointData(localPendingCheckpointNumber);
899
+ if (localPendingCheckpoint === undefined) {
900
+ throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
438
901
  }
439
902
 
440
- const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingBlock.archive.root.toString();
441
- if (noBlockSinceLast) {
442
- await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
443
- this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
444
- return { provenBlockNumber };
903
+ const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
904
+ const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive.toString() === localPendingArchiveRoot;
905
+ if (noCheckpointSinceLast) {
906
+ // We believe the following line causes a problem when we encounter L1 re-orgs.
907
+ // Basically, by setting the synched L1 block number here, we are saying that we have
908
+ // processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
909
+ // this block again (or any blocks before).
910
+ // However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
911
+ // We must only set this block number based on actually retrieved logs.
912
+ // TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
913
+ // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
914
+ this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
915
+ return rollupStatus;
445
916
  }
446
917
 
447
- const localPendingBlockInChain = archiveForLocalPendingBlockNumber === localPendingBlock.archive.root.toString();
448
- if (!localPendingBlockInChain) {
449
- // If our local pending block tip is not in the chain on L1 a "prune" must have happened
918
+ const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber.equals(
919
+ localPendingCheckpoint.archive.root,
920
+ );
921
+ if (!localPendingCheckpointInChain) {
922
+ // If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
450
923
  // or the L1 have reorged.
451
924
  // In any case, we have to figure out how far into the past the action will take us.
452
- // For simplicity here, we will simply rewind until we end in a block that is also on the chain on L1.
453
- this.log.debug(`L2 prune has been detected.`);
925
+ // For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
926
+ this.log.debug(
927
+ `L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`,
928
+ { localPendingCheckpointNumber, localPendingArchiveRoot, archiveForLocalPendingCheckpointNumber },
929
+ );
454
930
 
455
- let tipAfterUnwind = localPendingBlockNumber;
931
+ let tipAfterUnwind = localPendingCheckpointNumber;
456
932
  while (true) {
457
- const candidateBlock = await this.getBlock(Number(tipAfterUnwind));
458
- if (candidateBlock === undefined) {
933
+ const candidateCheckpoint = await this.store.getCheckpointData(tipAfterUnwind);
934
+ if (candidateCheckpoint === undefined) {
459
935
  break;
460
936
  }
461
937
 
462
- const archiveAtContract = await this.rollup.read.archiveAt([BigInt(candidateBlock.number)]);
463
-
464
- if (archiveAtContract === candidateBlock.archive.root.toString()) {
938
+ const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.checkpointNumber);
939
+ this.log.trace(
940
+ `Checking local checkpoint ${candidateCheckpoint.checkpointNumber} with archive ${candidateCheckpoint.archive.root}`,
941
+ {
942
+ archiveAtContract,
943
+ archiveLocal: candidateCheckpoint.archive.root.toString(),
944
+ },
945
+ );
946
+ if (archiveAtContract.equals(candidateCheckpoint.archive.root)) {
465
947
  break;
466
948
  }
467
949
  tipAfterUnwind--;
468
950
  }
469
951
 
470
- const blocksToUnwind = localPendingBlockNumber - tipAfterUnwind;
471
- await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
952
+ const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
953
+ await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
472
954
 
473
955
  this.log.warn(
474
- `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` +
475
- `due to mismatched block hashes at L1 block ${currentL1BlockNumber}. ` +
476
- `Updated L2 latest block is ${await this.getBlockNumber()}.`,
956
+ `Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` +
957
+ `due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` +
958
+ `Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`,
477
959
  );
478
960
  }
479
961
  }
480
962
 
481
- // Retrieve L2 blocks in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
963
+ // Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
482
964
  // computed using the L2 block time vs the L1 block time.
483
965
  let searchStartBlock: bigint = blocksSynchedTo;
484
966
  let searchEndBlock: bigint = blocksSynchedTo;
967
+ let lastRetrievedCheckpoint: PublishedCheckpoint | undefined;
968
+ let lastL1BlockWithCheckpoint: bigint | undefined = undefined;
485
969
 
486
970
  do {
487
971
  [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
488
972
 
489
- this.log.trace(`Retrieving L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
490
-
491
- // TODO(md): Retreive from blob sink then from consensus client, then from peers
492
- const retrievedBlocks = await retrieveBlocksFromRollup(
493
- this.rollup,
494
- this.publicClient,
495
- this.blobSinkClient,
496
- searchStartBlock, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier
497
- searchEndBlock,
498
- this.log,
973
+ this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
974
+
975
+ // TODO(md): Retrieve from blob client then from consensus client, then from peers
976
+ const retrievedCheckpoints = await execInSpan(this.tracer, 'Archiver.retrieveCheckpointsFromRollup', () =>
977
+ retrieveCheckpointsFromRollup(
978
+ this.rollup.getContract() as GetContractReturnType<typeof RollupAbi, ViemPublicClient>,
979
+ this.publicClient,
980
+ this.debugClient,
981
+ this.blobClient,
982
+ searchStartBlock, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier
983
+ searchEndBlock,
984
+ this.l1Addresses,
985
+ this.instrumentation,
986
+ this.log,
987
+ !this.initialSyncComplete, // isHistoricalSync
988
+ ),
499
989
  );
500
990
 
501
- if (retrievedBlocks.length === 0) {
991
+ if (retrievedCheckpoints.length === 0) {
502
992
  // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
503
993
  // See further details in earlier comments.
504
- this.log.trace(`Retrieved no new L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`);
994
+ this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
505
995
  continue;
506
996
  }
507
997
 
508
- const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber;
509
998
  this.log.debug(
510
- `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${searchStartBlock} and ${searchEndBlock} with last processed L1 block ${lastProcessedL1BlockNumber}.`,
999
+ `Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`,
1000
+ {
1001
+ lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
1002
+ searchStartBlock,
1003
+ searchEndBlock,
1004
+ },
511
1005
  );
512
1006
 
513
- for (const block of retrievedBlocks) {
514
- this.log.debug(`Ingesting new L2 block ${block.data.number} with ${block.data.body.txEffects.length} txs`, {
515
- blockHash: block.data.hash(),
516
- l1BlockNumber: block.l1.blockNumber,
517
- ...block.data.header.globalVariables.toInspect(),
518
- ...block.data.getStats(),
519
- });
1007
+ const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map(b => retrievedToPublishedCheckpoint(b)));
1008
+ const validCheckpoints: PublishedCheckpoint[] = [];
1009
+
1010
+ for (const published of publishedCheckpoints) {
1011
+ const validationResult = this.config.skipValidateCheckpointAttestations
1012
+ ? { valid: true as const }
1013
+ : await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
1014
+
1015
+ // Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
1016
+ // in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
1017
+ // There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
1018
+ // we need to update the validation result, since we need to be able to invalidate the new one.
1019
+ // See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
1020
+ if (
1021
+ rollupStatus.validationResult?.valid !== validationResult.valid ||
1022
+ (!rollupStatus.validationResult.valid &&
1023
+ !validationResult.valid &&
1024
+ rollupStatus.validationResult.checkpoint.checkpointNumber === validationResult.checkpoint.checkpointNumber)
1025
+ ) {
1026
+ rollupStatus.validationResult = validationResult;
1027
+ }
1028
+
1029
+ if (!validationResult.valid) {
1030
+ this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
1031
+ checkpointHash: published.checkpoint.hash(),
1032
+ l1BlockNumber: published.l1.blockNumber,
1033
+ ...pick(validationResult, 'reason'),
1034
+ });
1035
+
1036
+ // Emit event for invalid checkpoint detection
1037
+ this.emit(L2BlockSourceEvents.InvalidAttestationsCheckpointDetected, {
1038
+ type: L2BlockSourceEvents.InvalidAttestationsCheckpointDetected,
1039
+ validationResult,
1040
+ });
1041
+
1042
+ // We keep consuming checkpoints if we find an invalid one, since we do not listen for CheckpointInvalidated events
1043
+ // We just pretend the invalid ones are not there and keep consuming the next checkpoints
1044
+ // Note that this breaks if the committee ever attests to a descendant of an invalid checkpoint
1045
+ continue;
1046
+ }
1047
+
1048
+ // Check the inHash of the checkpoint against the l1->l2 messages.
1049
+ // The messages should've been synced up to the currentL1BlockNumber and must be available for the published
1050
+ // checkpoints we just retrieved.
1051
+ const l1ToL2Messages = await this.getL1ToL2Messages(published.checkpoint.number);
1052
+ const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);
1053
+ const publishedInHash = published.checkpoint.header.inHash;
1054
+ if (!computedInHash.equals(publishedInHash)) {
1055
+ this.log.fatal(`Mismatch inHash for checkpoint ${published.checkpoint.number}`, {
1056
+ checkpointHash: published.checkpoint.hash(),
1057
+ l1BlockNumber: published.l1.blockNumber,
1058
+ computedInHash,
1059
+ publishedInHash,
1060
+ });
1061
+ // Throwing an error since this is most likely caused by a bug.
1062
+ throw new Error(
1063
+ `Mismatch inHash for checkpoint ${published.checkpoint.number}. Expected ${computedInHash} but got ${publishedInHash}`,
1064
+ );
1065
+ }
1066
+
1067
+ validCheckpoints.push(published);
1068
+ this.log.debug(
1069
+ `Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`,
1070
+ {
1071
+ checkpointHash: published.checkpoint.hash(),
1072
+ l1BlockNumber: published.l1.blockNumber,
1073
+ ...published.checkpoint.header.toInspect(),
1074
+ blocks: published.checkpoint.blocks.map(b => b.getStats()),
1075
+ },
1076
+ );
520
1077
  }
521
1078
 
522
- const [processDuration] = await elapsed(() => this.store.addBlocks(retrievedBlocks));
523
- this.instrumentation.processNewBlocks(
524
- processDuration / retrievedBlocks.length,
525
- retrievedBlocks.map(b => b.data),
526
- );
1079
+ try {
1080
+ const updatedValidationResult =
1081
+ rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
1082
+ const [processDuration] = await elapsed(() =>
1083
+ execInSpan(this.tracer, 'Archiver.addCheckpoints', () =>
1084
+ this.addCheckpoints(validCheckpoints, updatedValidationResult),
1085
+ ),
1086
+ );
1087
+ this.instrumentation.processNewBlocks(
1088
+ processDuration / validCheckpoints.length,
1089
+ validCheckpoints.flatMap(c => c.checkpoint.blocks),
1090
+ );
1091
+ } catch (err) {
1092
+ if (err instanceof InitialCheckpointNumberNotSequentialError) {
1093
+ const { previousCheckpointNumber, newCheckpointNumber } = err;
1094
+ const previousCheckpoint = previousCheckpointNumber
1095
+ ? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber))
1096
+ : undefined;
1097
+ const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1constants.l1StartBlock;
1098
+ await this.store.setBlockSynchedL1BlockNumber(updatedL1SyncPoint);
1099
+ this.log.warn(
1100
+ `Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`,
1101
+ {
1102
+ previousCheckpointNumber,
1103
+ newCheckpointNumber,
1104
+ updatedL1SyncPoint,
1105
+ },
1106
+ );
1107
+ }
1108
+ throw err;
1109
+ }
527
1110
 
528
- for (const block of retrievedBlocks) {
529
- this.log.info(`Downloaded L2 block ${block.data.number}`, {
530
- blockHash: block.data.hash(),
531
- blockNumber: block.data.number,
532
- txCount: block.data.body.txEffects.length,
533
- globalVariables: block.data.header.globalVariables.toInspect(),
1111
+ for (const checkpoint of validCheckpoints) {
1112
+ this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
1113
+ checkpointHash: checkpoint.checkpoint.hash(),
1114
+ checkpointNumber: checkpoint.checkpoint.number,
1115
+ blockCount: checkpoint.checkpoint.blocks.length,
1116
+ txCount: checkpoint.checkpoint.blocks.reduce((acc, b) => acc + b.body.txEffects.length, 0),
1117
+ header: checkpoint.checkpoint.header.toInspect(),
1118
+ archiveRoot: checkpoint.checkpoint.archive.root.toString(),
1119
+ archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex,
534
1120
  });
535
1121
  }
1122
+ lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
1123
+ lastL1BlockWithCheckpoint = retrievedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
536
1124
  } while (searchEndBlock < currentL1BlockNumber);
537
1125
 
538
1126
  // Important that we update AFTER inserting the blocks.
539
- await updateProvenBlock();
1127
+ await updateProvenCheckpoint();
1128
+
1129
+ return { ...rollupStatus, lastRetrievedCheckpoint, lastL1BlockWithCheckpoint };
1130
+ }
1131
+
1132
+ private async checkForNewCheckpointsBeforeL1SyncPoint(
1133
+ status: RollupStatus,
1134
+ blocksSynchedTo: bigint,
1135
+ currentL1BlockNumber: bigint,
1136
+ ) {
1137
+ const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
1138
+ // Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
1139
+ // rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
1140
+ const latestLocalCheckpointNumber =
1141
+ lastRetrievedCheckpoint?.checkpoint.number ?? (await this.getSynchedCheckpointNumber());
1142
+ if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
1143
+ // Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
1144
+ // but still haven't reached the pending checkpoint according to the call to the rollup contract.
1145
+ // We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
1146
+ // the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
1147
+ // we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
1148
+ let latestLocalCheckpointArchive: string | undefined = undefined;
1149
+ let targetL1BlockNumber = maxBigint(currentL1BlockNumber - 64n, 0n);
1150
+ if (lastRetrievedCheckpoint) {
1151
+ latestLocalCheckpointArchive = lastRetrievedCheckpoint.checkpoint.archive.root.toString();
1152
+ targetL1BlockNumber = lastRetrievedCheckpoint.l1.blockNumber;
1153
+ } else if (latestLocalCheckpointNumber > 0) {
1154
+ const checkpoint = await this.store.getRangeOfCheckpoints(latestLocalCheckpointNumber, 1).then(([c]) => c);
1155
+ latestLocalCheckpointArchive = checkpoint.archive.root.toString();
1156
+ targetL1BlockNumber = checkpoint.l1.blockNumber;
1157
+ }
1158
+ this.log.warn(
1159
+ `Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` +
1160
+ `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`,
1161
+ {
1162
+ latestLocalCheckpointNumber,
1163
+ latestLocalCheckpointArchive,
1164
+ blocksSynchedTo,
1165
+ currentL1BlockNumber,
1166
+ ...status,
1167
+ },
1168
+ );
1169
+ await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
1170
+ } else {
1171
+ this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
1172
+ latestLocalCheckpointNumber,
1173
+ pendingCheckpointNumber,
1174
+ });
1175
+ }
1176
+ }
540
1177
 
541
- return { provenBlockNumber };
1178
+ /** Resumes the archiver after a stop. */
1179
+ public resume() {
1180
+ if (this.runningPromise.isRunning()) {
1181
+ this.log.warn(`Archiver already running`);
1182
+ }
1183
+ this.log.info(`Restarting archiver`);
1184
+ this.runningPromise.start();
542
1185
  }
543
1186
 
544
1187
  /**
@@ -547,16 +1190,24 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
547
1190
  */
548
1191
  public async stop(): Promise<void> {
549
1192
  this.log.debug('Stopping...');
550
- await this.runningPromise?.stop();
1193
+ await this.runningPromise.stop();
551
1194
 
552
1195
  this.log.info('Stopped.');
553
1196
  return Promise.resolve();
554
1197
  }
555
1198
 
1199
+ public backupTo(destPath: string): Promise<string> {
1200
+ return this.dataStore.backupTo(destPath);
1201
+ }
1202
+
556
1203
  public getL1Constants(): Promise<L1RollupConstants> {
557
1204
  return Promise.resolve(this.l1constants);
558
1205
  }
559
1206
 
1207
+ public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> {
1208
+ return Promise.resolve({ genesisArchiveRoot: this.l1constants.genesisArchiveRoot });
1209
+ }
1210
+
560
1211
  public getRollupAddress(): Promise<EthAddress> {
561
1212
  return Promise.resolve(this.l1Addresses.rollupAddress);
562
1213
  }
@@ -565,52 +1216,79 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
565
1216
  return Promise.resolve(this.l1Addresses.registryAddress);
566
1217
  }
567
1218
 
568
- public getL1BlockNumber(): bigint {
569
- const l1BlockNumber = this.l1BlockNumber;
570
- if (!l1BlockNumber) {
571
- throw new Error('L1 block number not yet available. Complete an initial sync first.');
572
- }
573
- return l1BlockNumber;
1219
+ public getL1BlockNumber(): bigint | undefined {
1220
+ return this.l1BlockNumber;
574
1221
  }
575
1222
 
576
- public getL1Timestamp(): bigint {
577
- const l1Timestamp = this.l1Timestamp;
578
- if (!l1Timestamp) {
579
- throw new Error('L1 timestamp not yet available. Complete an initial sync first.');
580
- }
581
- return l1Timestamp;
1223
+ public getL1Timestamp(): Promise<bigint | undefined> {
1224
+ return Promise.resolve(this.l1Timestamp);
582
1225
  }
583
1226
 
584
- public getL2SlotNumber(): Promise<bigint> {
585
- return Promise.resolve(getSlotAtTimestamp(this.getL1Timestamp(), this.l1constants));
1227
+ public getL2SlotNumber(): Promise<SlotNumber | undefined> {
1228
+ return Promise.resolve(
1229
+ this.l1Timestamp === undefined ? undefined : getSlotAtTimestamp(this.l1Timestamp, this.l1constants),
1230
+ );
586
1231
  }
587
1232
 
588
- public getL2EpochNumber(): Promise<bigint> {
589
- return Promise.resolve(getEpochNumberAtTimestamp(this.getL1Timestamp(), this.l1constants));
1233
+ public getL2EpochNumber(): Promise<EpochNumber | undefined> {
1234
+ return Promise.resolve(
1235
+ this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants),
1236
+ );
590
1237
  }
591
1238
 
592
- public async getBlocksForEpoch(epochNumber: bigint): Promise<L2Block[]> {
1239
+ public async getBlocksForEpoch(epochNumber: EpochNumber): Promise<L2Block[]> {
593
1240
  const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
594
1241
  const blocks: L2Block[] = [];
595
1242
 
596
- // Walk the list of blocks backwards and filter by slots matching the requested epoch.
597
- // We'll typically ask for blocks for a very recent epoch, so we shouldn't need an index here.
598
- let block = await this.getBlock(await this.store.getSynchedL2BlockNumber());
599
- const slot = (b: L2Block) => b.header.globalVariables.slotNumber.toBigInt();
600
- while (block && slot(block) >= start) {
601
- if (slot(block) <= end) {
602
- blocks.push(block);
1243
+ // Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
1244
+ // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
1245
+ let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
1246
+ const slot = (b: CheckpointData) => b.header.slotNumber;
1247
+ while (checkpoint && slot(checkpoint) >= start) {
1248
+ if (slot(checkpoint) <= end) {
1249
+ // push the blocks on backwards
1250
+ const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
1251
+ for (let i = endBlock; i >= checkpoint.startBlock; i--) {
1252
+ const block = await this.getBlock(BlockNumber(i));
1253
+ if (block) {
1254
+ blocks.push(block);
1255
+ }
1256
+ }
603
1257
  }
604
- block = await this.getBlock(block.number - 1);
1258
+ checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
605
1259
  }
606
1260
 
607
1261
  return blocks.reverse();
608
1262
  }
609
1263
 
610
- public async isEpochComplete(epochNumber: bigint): Promise<boolean> {
1264
+ public async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise<BlockHeader[]> {
1265
+ const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
1266
+ const blocks: BlockHeader[] = [];
1267
+
1268
+ // Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
1269
+ // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
1270
+ let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
1271
+ const slot = (b: CheckpointData) => b.header.slotNumber;
1272
+ while (checkpoint && slot(checkpoint) >= start) {
1273
+ if (slot(checkpoint) <= end) {
1274
+ // push the blocks on backwards
1275
+ const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
1276
+ for (let i = endBlock; i >= checkpoint.startBlock; i--) {
1277
+ const block = await this.getBlockHeader(BlockNumber(i));
1278
+ if (block) {
1279
+ blocks.push(block);
1280
+ }
1281
+ }
1282
+ }
1283
+ checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
1284
+ }
1285
+ return blocks.reverse();
1286
+ }
1287
+
1288
+ public async isEpochComplete(epochNumber: EpochNumber): Promise<boolean> {
611
1289
  // The epoch is complete if the current L2 block is the last one in the epoch (or later)
612
1290
  const header = await this.getBlockHeader('latest');
613
- const slot = header?.globalVariables.slotNumber.toBigInt();
1291
+ const slot = header ? header.globalVariables.slotNumber : undefined;
614
1292
  const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants);
615
1293
  if (slot && slot >= endSlot) {
616
1294
  return true;
@@ -635,18 +1313,66 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
635
1313
  return l1Timestamp + leeway >= endTimestamp;
636
1314
  }
637
1315
 
638
- /**
639
- * Gets up to `limit` amount of L2 blocks starting from `from`.
640
- * @param from - Number of the first block to return (inclusive).
641
- * @param limit - The number of blocks to return.
642
- * @param proven - If true, only return blocks that have been proven.
643
- * @returns The requested L2 blocks.
644
- */
645
- public async getBlocks(from: number, limit: number, proven?: boolean): Promise<L2Block[]> {
646
- const limitWithProven = proven
647
- ? Math.min(limit, Math.max((await this.store.getProvenL2BlockNumber()) - from + 1, 0))
648
- : limit;
649
- return limitWithProven === 0 ? [] : (await this.store.getBlocks(from, limitWithProven)).map(b => b.data);
1316
+ /** Returns whether the archiver has completed an initial sync run successfully. */
1317
+ public isInitialSyncComplete(): boolean {
1318
+ return this.initialSyncComplete;
1319
+ }
1320
+
1321
+ public async getCheckpointHeader(number: CheckpointNumber | 'latest'): Promise<CheckpointHeader | undefined> {
1322
+ if (number === 'latest') {
1323
+ number = await this.getSynchedCheckpointNumber();
1324
+ }
1325
+ if (number === 0) {
1326
+ return undefined;
1327
+ }
1328
+ const checkpoint = await this.store.getCheckpointData(number);
1329
+ if (!checkpoint) {
1330
+ return undefined;
1331
+ }
1332
+ return checkpoint.header;
1333
+ }
1334
+
1335
+ public getCheckpointNumber(): Promise<CheckpointNumber> {
1336
+ return this.getSynchedCheckpointNumber();
1337
+ }
1338
+
1339
+ public getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
1340
+ return this.store.getSynchedCheckpointNumber();
1341
+ }
1342
+
1343
+ public getProvenCheckpointNumber(): Promise<CheckpointNumber> {
1344
+ return this.store.getProvenCheckpointNumber();
1345
+ }
1346
+
1347
+ public setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise<void> {
1348
+ return this.store.setProvenCheckpointNumber(checkpointNumber);
1349
+ }
1350
+
1351
+ public unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
1352
+ return this.store.unwindCheckpoints(from, checkpointsToUnwind);
1353
+ }
1354
+
1355
+ public async getLastBlockNumberInCheckpoint(checkpointNumber: CheckpointNumber): Promise<BlockNumber | undefined> {
1356
+ const checkpointData = await this.store.getCheckpointData(checkpointNumber);
1357
+ if (!checkpointData) {
1358
+ return undefined;
1359
+ }
1360
+ return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1);
1361
+ }
1362
+
1363
+ public addCheckpoints(
1364
+ checkpoints: PublishedCheckpoint[],
1365
+ pendingChainValidationStatus?: ValidateCheckpointResult,
1366
+ ): Promise<boolean> {
1367
+ return this.store.addCheckpoints(checkpoints, pendingChainValidationStatus);
1368
+ }
1369
+
1370
+ public getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
1371
+ return this.store.getBlockHeaderByHash(blockHash);
1372
+ }
1373
+
1374
+ public getBlockHeaderByArchive(archive: Fr): Promise<BlockHeader | undefined> {
1375
+ return this.store.getBlockHeaderByArchive(archive);
650
1376
  }
651
1377
 
652
1378
  /**
@@ -654,19 +1380,29 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
654
1380
  * @param number - The block number to return.
655
1381
  * @returns The requested L2 block.
656
1382
  */
657
- public async getBlock(number: number): Promise<L2Block | undefined> {
1383
+ public async getL2BlockNew(number: BlockNumber): Promise<L2BlockNew | undefined> {
658
1384
  // If the number provided is -ve, then return the latest block.
659
1385
  if (number < 0) {
660
1386
  number = await this.store.getSynchedL2BlockNumber();
661
1387
  }
662
- if (number == 0) {
1388
+ if (number === 0) {
663
1389
  return undefined;
664
1390
  }
665
- const blocks = await this.store.getBlocks(number, 1);
666
- return blocks.length === 0 ? undefined : blocks[0].data;
1391
+ const publishedBlock = await this.store.store.getBlock(number);
1392
+ return publishedBlock;
1393
+ }
1394
+
1395
+ public async getL2BlocksNew(from: BlockNumber, limit: number, proven?: boolean): Promise<L2BlockNew[]> {
1396
+ const blocks = await this.store.store.getBlocks(from, limit);
1397
+
1398
+ if (proven === true) {
1399
+ const provenBlockNumber = await this.store.getProvenBlockNumber();
1400
+ return blocks.filter(b => b.number <= provenBlockNumber);
1401
+ }
1402
+ return blocks;
667
1403
  }
668
1404
 
669
- public async getBlockHeader(number: number | 'latest'): Promise<BlockHeader | undefined> {
1405
+ public async getBlockHeader(number: BlockNumber | 'latest'): Promise<BlockHeader | undefined> {
670
1406
  if (number === 'latest') {
671
1407
  number = await this.store.getSynchedL2BlockNumber();
672
1408
  }
@@ -677,66 +1413,52 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
677
1413
  return headers.length === 0 ? undefined : headers[0];
678
1414
  }
679
1415
 
680
- public getTxEffect(txHash: TxHash) {
681
- return this.store.getTxEffect(txHash);
1416
+ getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
1417
+ return this.store.getCheckpointedBlock(number);
682
1418
  }
683
1419
 
684
- public getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
685
- return this.store.getSettledTxReceipt(txHash);
686
- }
1420
+ public async getCheckpointedBlocks(
1421
+ from: BlockNumber,
1422
+ limit: number,
1423
+ proven?: boolean,
1424
+ ): Promise<CheckpointedL2Block[]> {
1425
+ const blocks = await this.store.store.getCheckpointedBlocks(from, limit);
687
1426
 
688
- /**
689
- * Gets the public function data for a contract.
690
- * @param address - The contract address containing the function to fetch.
691
- * @param selector - The function selector of the function to fetch.
692
- * @returns The public function data (if found).
693
- */
694
- public async getPublicFunction(
695
- address: AztecAddress,
696
- selector: FunctionSelector,
697
- ): Promise<PublicFunction | undefined> {
698
- const instance = await this.getContract(address);
699
- if (!instance) {
700
- throw new Error(`Contract ${address.toString()} not found`);
701
- }
702
- const contractClass = await this.getContractClass(instance.currentContractClassId);
703
- if (!contractClass) {
704
- throw new Error(
705
- `Contract class ${instance.currentContractClassId.toString()} for ${address.toString()} not found`,
706
- );
1427
+ if (proven === true) {
1428
+ const provenBlockNumber = await this.store.getProvenBlockNumber();
1429
+ return blocks.filter(b => b.block.number <= provenBlockNumber);
707
1430
  }
708
- return contractClass.publicFunctions.find(f => f.selector.equals(selector));
1431
+ return blocks;
709
1432
  }
710
1433
 
711
- /**
712
- * Retrieves all private logs from up to `limit` blocks, starting from the block number `from`.
713
- * @param from - The block number from which to begin retrieving logs.
714
- * @param limit - The maximum number of blocks to retrieve logs from.
715
- * @returns An array of private logs from the specified range of blocks.
716
- */
717
- public getPrivateLogs(from: number, limit: number): Promise<PrivateLog[]> {
718
- return this.store.getPrivateLogs(from, limit);
1434
+ getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
1435
+ return this.store.getCheckpointedBlockByHash(blockHash);
719
1436
  }
720
1437
 
721
- /**
722
- * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
723
- * @param tags - The tags to filter the logs by.
724
- * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
725
- * that tag.
726
- */
727
- getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
728
- return this.store.getLogsByTags(tags);
1438
+ getProvenBlockNumber(): Promise<BlockNumber> {
1439
+ return this.store.getProvenBlockNumber();
1440
+ }
1441
+ getCheckpointedBlockNumber(): Promise<BlockNumber> {
1442
+ return this.store.getCheckpointedL2BlockNumber();
1443
+ }
1444
+ getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
1445
+ return this.store.getCheckpointedBlockByArchive(archive);
729
1446
  }
730
1447
 
731
- /**
732
- * Returns the provided nullifier indexes scoped to the block
733
- * they were first included in, or undefined if they're not present in the tree
734
- * @param blockNumber Max block number to search for the nullifiers
735
- * @param nullifiers Nullifiers to get
736
- * @returns The block scoped indexes of the provided nullifiers, or undefined if the nullifier doesn't exist in the tree
737
- */
738
- findNullifiersIndexesWithBlock(blockNumber: number, nullifiers: Fr[]): Promise<(InBlock<bigint> | undefined)[]> {
739
- return this.store.findNullifiersIndexesWithBlock(blockNumber, nullifiers);
1448
+ public getTxEffect(txHash: TxHash) {
1449
+ return this.store.getTxEffect(txHash);
1450
+ }
1451
+
1452
+ public getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
1453
+ return this.store.getSettledTxReceipt(txHash);
1454
+ }
1455
+
1456
+ getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
1457
+ return this.store.getPrivateLogsByTags(tags);
1458
+ }
1459
+
1460
+ getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
1461
+ return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
740
1462
  }
741
1463
 
742
1464
  /**
@@ -759,19 +1481,11 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
759
1481
 
760
1482
  /**
761
1483
  * Gets the number of the latest L2 block processed by the block source implementation.
1484
+ * This includes both checkpointed and uncheckpointed blocks.
762
1485
  * @returns The number of the latest L2 block processed by the block source implementation.
763
1486
  */
764
- public getBlockNumber(): Promise<number> {
765
- return this.store.getSynchedL2BlockNumber();
766
- }
767
-
768
- public getProvenBlockNumber(): Promise<number> {
769
- return this.store.getProvenL2BlockNumber();
770
- }
771
-
772
- /** Forcefully updates the last proven block number. Use for testing. */
773
- public setProvenBlockNumber(blockNumber: number): Promise<void> {
774
- return this.store.setProvenL2BlockNumber(blockNumber);
1487
+ public getBlockNumber(): Promise<BlockNumber> {
1488
+ return this.store.getLatestBlockNumber();
775
1489
  }
776
1490
 
777
1491
  public getContractClass(id: Fr): Promise<ContractClassPublic | undefined> {
@@ -782,17 +1496,29 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
782
1496
  return this.store.getBytecodeCommitment(id);
783
1497
  }
784
1498
 
785
- public getContract(address: AztecAddress): Promise<ContractInstanceWithAddress | undefined> {
786
- return this.store.getContractInstance(address);
1499
+ public async getContract(
1500
+ address: AztecAddress,
1501
+ maybeTimestamp?: UInt64,
1502
+ ): Promise<ContractInstanceWithAddress | undefined> {
1503
+ let timestamp;
1504
+ if (maybeTimestamp === undefined) {
1505
+ const latestBlockHeader = await this.getBlockHeader('latest');
1506
+ // If we get undefined block header, it means that the archiver has not yet synced any block so we default to 0.
1507
+ timestamp = latestBlockHeader ? latestBlockHeader.globalVariables.timestamp : 0n;
1508
+ } else {
1509
+ timestamp = maybeTimestamp;
1510
+ }
1511
+
1512
+ return this.store.getContractInstance(address, timestamp);
787
1513
  }
788
1514
 
789
1515
  /**
790
- * Gets L1 to L2 message (to be) included in a given block.
791
- * @param blockNumber - L2 block number to get messages for.
1516
+ * Gets L1 to L2 message (to be) included in a given checkpoint.
1517
+ * @param checkpointNumber - Checkpoint number to get messages for.
792
1518
  * @returns The L1 to L2 messages/leaves of the messages subtree (throws if not found).
793
1519
  */
794
- getL1ToL2Messages(blockNumber: bigint): Promise<Fr[]> {
795
- return this.store.getL1ToL2Messages(blockNumber);
1520
+ getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
1521
+ return this.store.getL1ToL2Messages(checkpointNumber);
796
1522
  }
797
1523
 
798
1524
  /**
@@ -808,62 +1534,312 @@ export class Archiver extends EventEmitter implements ArchiveSource, Traceable {
808
1534
  return this.store.getContractClassIds();
809
1535
  }
810
1536
 
811
- // TODO(#10007): Remove this method
812
- async addContractClass(contractClass: ContractClassPublic): Promise<void> {
813
- await this.store.addContractClasses(
814
- [contractClass],
815
- [await computePublicBytecodeCommitment(contractClass.packedBytecode)],
816
- 0,
817
- );
818
- return;
1537
+ registerContractFunctionSignatures(signatures: string[]): Promise<void> {
1538
+ return this.store.registerContractFunctionSignatures(signatures);
1539
+ }
1540
+
1541
+ getDebugFunctionName(address: AztecAddress, selector: FunctionSelector): Promise<string | undefined> {
1542
+ return this.store.getDebugFunctionName(address, selector);
819
1543
  }
820
1544
 
821
- registerContractFunctionSignatures(address: AztecAddress, signatures: string[]): Promise<void> {
822
- return this.store.registerContractFunctionSignatures(address, signatures);
1545
+ async getPendingChainValidationStatus(): Promise<ValidateCheckpointResult> {
1546
+ return (await this.store.getPendingChainValidationStatus()) ?? { valid: true };
823
1547
  }
824
1548
 
825
- getContractFunctionName(address: AztecAddress, selector: FunctionSelector): Promise<string | undefined> {
826
- return this.store.getContractFunctionName(address, selector);
1549
+ isPendingChainInvalid(): Promise<boolean> {
1550
+ return this.getPendingChainValidationStatus().then(status => !status.valid);
827
1551
  }
828
1552
 
829
1553
  async getL2Tips(): Promise<L2Tips> {
830
- const [latestBlockNumber, provenBlockNumber] = await Promise.all([
1554
+ const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber] = await Promise.all([
831
1555
  this.getBlockNumber(),
832
1556
  this.getProvenBlockNumber(),
1557
+ this.getCheckpointedBlockNumber(),
833
1558
  ] as const);
834
1559
 
835
- const [latestBlockHeader, provenBlockHeader] = await Promise.all([
836
- latestBlockNumber > 0 ? this.getBlockHeader(latestBlockNumber) : undefined,
837
- provenBlockNumber > 0 ? this.getBlockHeader(provenBlockNumber) : undefined,
838
- ] as const);
839
-
840
- if (latestBlockNumber > 0 && !latestBlockHeader) {
1560
+ // TODO(#13569): Compute proper finalized block number based on L1 finalized block.
1561
+ // We just force it 2 epochs worth of proven data for now.
1562
+ // NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations
1563
+ const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0));
1564
+
1565
+ const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
1566
+
1567
+ // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks
1568
+ const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] =
1569
+ await Promise.all([
1570
+ latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined,
1571
+ provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined,
1572
+ finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined,
1573
+ checkpointedBlockNumber > beforeInitialblockNumber
1574
+ ? this.getCheckpointedBlock(checkpointedBlockNumber)
1575
+ : undefined,
1576
+ ] as const);
1577
+
1578
+ if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) {
841
1579
  throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`);
842
1580
  }
843
1581
 
844
- if (provenBlockNumber > 0 && !provenBlockHeader) {
1582
+ // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number.
1583
+ if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) {
1584
+ throw new Error(
1585
+ `Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`,
1586
+ );
1587
+ }
1588
+
1589
+ if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) {
845
1590
  throw new Error(
846
- `Failed to retrieve proven block header for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`,
1591
+ `Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`,
847
1592
  );
848
1593
  }
849
1594
 
850
- const latestBlockHeaderHash = await latestBlockHeader?.hash();
851
- const provenBlockHeaderHash = await provenBlockHeader?.hash();
852
- const finalizedBlockHeaderHash = await provenBlockHeader?.hash();
853
- return {
854
- latest: {
1595
+ if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) {
1596
+ throw new Error(
1597
+ `Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`,
1598
+ );
1599
+ }
1600
+
1601
+ const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
1602
+ const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
1603
+ const finalizedBlockHeaderHash =
1604
+ (await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
1605
+ const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH;
1606
+
1607
+ // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks
1608
+ const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([
1609
+ provenCheckpointedBlock !== undefined
1610
+ ? await this.getPublishedCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1)
1611
+ : [undefined],
1612
+ finalizedCheckpointedBlock !== undefined
1613
+ ? await this.getPublishedCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1)
1614
+ : [undefined],
1615
+ checkpointedBlock !== undefined
1616
+ ? await this.getPublishedCheckpoints(checkpointedBlock?.checkpointNumber, 1)
1617
+ : [undefined],
1618
+ ]);
1619
+
1620
+ const initialcheckpointId: CheckpointId = {
1621
+ number: CheckpointNumber.ZERO,
1622
+ hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(),
1623
+ };
1624
+
1625
+ const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => {
1626
+ if (checkpoint === undefined) {
1627
+ return initialcheckpointId;
1628
+ }
1629
+ return {
1630
+ number: checkpoint.checkpoint.number,
1631
+ hash: checkpoint.checkpoint.hash().toString(),
1632
+ };
1633
+ };
1634
+
1635
+ const l2Tips: L2Tips = {
1636
+ proposed: {
855
1637
  number: latestBlockNumber,
856
- hash: latestBlockHeaderHash?.toString(),
857
- } as L2BlockId,
1638
+ hash: latestBlockHeaderHash.toString(),
1639
+ },
858
1640
  proven: {
859
- number: provenBlockNumber,
860
- hash: provenBlockHeaderHash?.toString(),
861
- } as L2BlockId,
1641
+ block: {
1642
+ number: provenBlockNumber,
1643
+ hash: provenBlockHeaderHash.toString(),
1644
+ },
1645
+ checkpoint: makeCheckpointId(provenBlockCheckpoint),
1646
+ },
862
1647
  finalized: {
863
- number: provenBlockNumber,
864
- hash: finalizedBlockHeaderHash?.toString(),
865
- } as L2BlockId,
1648
+ block: {
1649
+ number: finalizedBlockNumber,
1650
+ hash: finalizedBlockHeaderHash.toString(),
1651
+ },
1652
+ checkpoint: makeCheckpointId(finalizedBlockCheckpoint),
1653
+ },
1654
+ checkpointed: {
1655
+ block: {
1656
+ number: checkpointedBlockNumber,
1657
+ hash: checkpointedBlockHeaderHash.toString(),
1658
+ },
1659
+ checkpoint: makeCheckpointId(checkpointedBlockCheckpoint),
1660
+ },
866
1661
  };
1662
+
1663
+ return l2Tips;
1664
+ }
1665
+
1666
+ public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise<void> {
1667
+ // TODO(pw/mbps): This still assumes 1 block per checkpoint
1668
+ const currentBlocks = await this.getL2Tips();
1669
+ const currentL2Block = currentBlocks.proposed.number;
1670
+ const currentProvenBlock = currentBlocks.proven.block.number;
1671
+
1672
+ if (targetL2BlockNumber >= currentL2Block) {
1673
+ throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`);
1674
+ }
1675
+ const blocksToUnwind = currentL2Block - targetL2BlockNumber;
1676
+ const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber);
1677
+ if (!targetL2Block) {
1678
+ throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
1679
+ }
1680
+ const targetL1BlockNumber = targetL2Block.l1.blockNumber;
1681
+ const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber);
1682
+ const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber);
1683
+ this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`);
1684
+ await this.store.unwindCheckpoints(CheckpointNumber(currentL2Block), blocksToUnwind);
1685
+ this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`);
1686
+ await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
1687
+ this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
1688
+ await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
1689
+ await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash });
1690
+ if (targetL2BlockNumber < currentProvenBlock) {
1691
+ this.log.info(`Clearing proven L2 block number`);
1692
+ await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
1693
+ }
1694
+ // TODO(palla/reorg): Set the finalized block when we add support for it.
1695
+ // if (targetL2BlockNumber < currentFinalizedBlock) {
1696
+ // this.log.info(`Clearing finalized L2 block number`);
1697
+ // await this.store.setFinalizedL2BlockNumber(0);
1698
+ // }
1699
+ }
1700
+
1701
+ public async getPublishedCheckpoints(
1702
+ checkpointNumber: CheckpointNumber,
1703
+ limit: number,
1704
+ ): Promise<PublishedCheckpoint[]> {
1705
+ const checkpoints = await this.store.getRangeOfCheckpoints(checkpointNumber, limit);
1706
+ const blocks = (
1707
+ await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber)))
1708
+ ).filter(isDefined);
1709
+
1710
+ const fullCheckpoints: PublishedCheckpoint[] = [];
1711
+ for (let i = 0; i < checkpoints.length; i++) {
1712
+ const blocksForCheckpoint = blocks[i];
1713
+ const checkpoint = checkpoints[i];
1714
+ const fullCheckpoint = new Checkpoint(
1715
+ checkpoint.archive,
1716
+ checkpoint.header,
1717
+ blocksForCheckpoint,
1718
+ checkpoint.checkpointNumber,
1719
+ );
1720
+ const publishedCheckpoint = new PublishedCheckpoint(
1721
+ fullCheckpoint,
1722
+ checkpoint.l1,
1723
+ checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
1724
+ );
1725
+ fullCheckpoints.push(publishedCheckpoint);
1726
+ }
1727
+ return fullCheckpoints;
1728
+ }
1729
+
1730
+ public async getCheckpointsForEpoch(epochNumber: EpochNumber): Promise<Checkpoint[]> {
1731
+ const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
1732
+ const checkpoints: Checkpoint[] = [];
1733
+
1734
+ // Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
1735
+ // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
1736
+ let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
1737
+ const slot = (b: CheckpointData) => b.header.slotNumber;
1738
+ while (checkpointData && slot(checkpointData) >= start) {
1739
+ if (slot(checkpointData) <= end) {
1740
+ // push the checkpoints on backwards
1741
+ const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1);
1742
+ checkpoints.push(checkpoint.checkpoint);
1743
+ }
1744
+ checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1));
1745
+ }
1746
+
1747
+ return checkpoints.reverse();
1748
+ }
1749
+
1750
+ /* Legacy APIs */
1751
+
1752
+ public async getPublishedBlockByHash(blockHash: Fr): Promise<PublishedL2Block | undefined> {
1753
+ const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
1754
+ return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
1755
+ }
1756
+ public async getPublishedBlockByArchive(archive: Fr): Promise<PublishedL2Block | undefined> {
1757
+ const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive);
1758
+ return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
1759
+ }
1760
+
1761
+ /**
1762
+ * Gets up to `limit` amount of L2 blocks starting from `from`.
1763
+ * @param from - Number of the first block to return (inclusive).
1764
+ * @param limit - The number of blocks to return.
1765
+ * @param proven - If true, only return blocks that have been proven.
1766
+ * @returns The requested L2 blocks.
1767
+ */
1768
+ public async getBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise<L2Block[]> {
1769
+ const publishedBlocks = await this.getPublishedBlocks(from, limit, proven);
1770
+ return publishedBlocks.map(x => x.block);
1771
+ }
1772
+
1773
+ public async getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise<PublishedL2Block[]> {
1774
+ const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit);
1775
+ const provenCheckpointNumber = await this.getProvenCheckpointNumber();
1776
+ const blocks = (
1777
+ await Promise.all(checkpoints.map(ch => this.store.getBlocksForCheckpoint(ch.checkpointNumber)))
1778
+ ).filter(isDefined);
1779
+
1780
+ const olbBlocks: PublishedL2Block[] = [];
1781
+ for (let i = 0; i < checkpoints.length; i++) {
1782
+ const blockForCheckpoint = blocks[i][0];
1783
+ const checkpoint = checkpoints[i];
1784
+ if (checkpoint.checkpointNumber > provenCheckpointNumber && proven === true) {
1785
+ // this checkpointisn't proven and we only want proven
1786
+ continue;
1787
+ }
1788
+ const oldCheckpoint = new Checkpoint(
1789
+ blockForCheckpoint.archive,
1790
+ checkpoint.header,
1791
+ [blockForCheckpoint],
1792
+ checkpoint.checkpointNumber,
1793
+ );
1794
+ const oldBlock = L2Block.fromCheckpoint(oldCheckpoint);
1795
+ const publishedBlock = new PublishedL2Block(
1796
+ oldBlock,
1797
+ checkpoint.l1,
1798
+ checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
1799
+ );
1800
+ olbBlocks.push(publishedBlock);
1801
+ }
1802
+ return olbBlocks;
1803
+ }
1804
+
1805
+ private async buildOldBlockFromCheckpointedBlock(
1806
+ checkpointedBlock: CheckpointedL2Block | undefined,
1807
+ ): Promise<PublishedL2Block | undefined> {
1808
+ if (!checkpointedBlock) {
1809
+ return undefined;
1810
+ }
1811
+ const checkpoint = await this.store.getCheckpointData(checkpointedBlock.checkpointNumber);
1812
+ if (!checkpoint) {
1813
+ return checkpoint;
1814
+ }
1815
+ const fullCheckpoint = new Checkpoint(
1816
+ checkpointedBlock?.block.archive,
1817
+ checkpoint?.header,
1818
+ [checkpointedBlock.block],
1819
+ checkpoint.checkpointNumber,
1820
+ );
1821
+ const oldBlock = L2Block.fromCheckpoint(fullCheckpoint);
1822
+ const published = new PublishedL2Block(
1823
+ oldBlock,
1824
+ checkpoint.l1,
1825
+ checkpoint.attestations.map(x => CommitteeAttestation.fromBuffer(x)),
1826
+ );
1827
+ return published;
1828
+ }
1829
+
1830
+ public async getBlock(number: BlockNumber): Promise<L2Block | undefined> {
1831
+ // If the number provided is -ve, then return the latest block.
1832
+ if (number < 0) {
1833
+ number = await this.store.getSynchedL2BlockNumber();
1834
+ }
1835
+ if (number === 0) {
1836
+ return undefined;
1837
+ }
1838
+ const publishedBlocks = await this.getPublishedBlocks(number, 1);
1839
+ if (publishedBlocks.length === 0) {
1840
+ return undefined;
1841
+ }
1842
+ return publishedBlocks[0].block;
867
1843
  }
868
1844
  }
869
1845
 
@@ -878,14 +1854,12 @@ enum Operation {
878
1854
  * I would have preferred to not have this type. But it is useful for handling the logic that any
879
1855
  * store would need to include otherwise while exposing fewer functions and logic directly to the archiver.
880
1856
  */
881
- class ArchiverStoreHelper
1857
+ export class ArchiverStoreHelper
882
1858
  implements
883
1859
  Omit<
884
1860
  ArchiverDataStore,
885
1861
  | 'addLogs'
886
1862
  | 'deleteLogs'
887
- | 'addNullifiers'
888
- | 'deleteNullifiers'
889
1863
  | 'addContractClasses'
890
1864
  | 'deleteContractClasses'
891
1865
  | 'addContractInstances'
@@ -893,31 +1867,29 @@ class ArchiverStoreHelper
893
1867
  | 'addContractInstanceUpdates'
894
1868
  | 'deleteContractInstanceUpdates'
895
1869
  | 'addFunctions'
1870
+ | 'backupTo'
1871
+ | 'close'
1872
+ | 'transactionAsync'
1873
+ | 'addBlocks'
1874
+ | 'getBlock'
1875
+ | 'getBlocks'
1876
+ | 'getCheckpointedBlocks'
896
1877
  >
897
1878
  {
898
1879
  #log = createLogger('archiver:block-helper');
899
1880
 
900
- constructor(private readonly store: ArchiverDataStore) {}
901
-
902
- // TODO(#10007): Remove this method
903
- addContractClasses(
904
- contractClasses: ContractClassPublic[],
905
- bytecodeCommitments: Fr[],
906
- blockNum: number,
907
- ): Promise<boolean> {
908
- return this.store.addContractClasses(contractClasses, bytecodeCommitments, blockNum);
909
- }
1881
+ constructor(public readonly store: ArchiverDataStore) {}
910
1882
 
911
1883
  /**
912
- * Extracts and stores contract classes out of ContractClassRegistered events emitted by the class registerer contract.
1884
+ * Extracts and stores contract classes out of ContractClassPublished events emitted by the class registry contract.
913
1885
  * @param allLogs - All logs emitted in a bunch of blocks.
914
1886
  */
915
- async #updateRegisteredContractClasses(allLogs: ContractClassLog[], blockNum: number, operation: Operation) {
916
- const contractClassRegisteredEvents = allLogs
917
- .filter(log => ContractClassRegisteredEvent.isContractClassRegisteredEvent(log))
918
- .map(log => ContractClassRegisteredEvent.fromLog(log));
1887
+ async #updatePublishedContractClasses(allLogs: ContractClassLog[], blockNum: BlockNumber, operation: Operation) {
1888
+ const contractClassPublishedEvents = allLogs
1889
+ .filter(log => ContractClassPublishedEvent.isContractClassPublishedEvent(log))
1890
+ .map(log => ContractClassPublishedEvent.fromLog(log));
919
1891
 
920
- const contractClasses = await Promise.all(contractClassRegisteredEvents.map(e => e.toContractClassPublic()));
1892
+ const contractClasses = await Promise.all(contractClassPublishedEvents.map(e => e.toContractClassPublic()));
921
1893
  if (contractClasses.length > 0) {
922
1894
  contractClasses.forEach(c => this.#log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`));
923
1895
  if (operation == Operation.Store) {
@@ -934,13 +1906,13 @@ class ArchiverStoreHelper
934
1906
  }
935
1907
 
936
1908
  /**
937
- * Extracts and stores contract instances out of ContractInstanceDeployed events emitted by the canonical deployer contract.
1909
+ * Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract.
938
1910
  * @param allLogs - All logs emitted in a bunch of blocks.
939
1911
  */
940
- async #updateDeployedContractInstances(allLogs: PrivateLog[], blockNum: number, operation: Operation) {
1912
+ async #updateDeployedContractInstances(allLogs: PrivateLog[], blockNum: BlockNumber, operation: Operation) {
941
1913
  const contractInstances = allLogs
942
- .filter(log => ContractInstanceDeployedEvent.isContractInstanceDeployedEvent(log))
943
- .map(log => ContractInstanceDeployedEvent.fromLog(log))
1914
+ .filter(log => ContractInstancePublishedEvent.isContractInstancePublishedEvent(log))
1915
+ .map(log => ContractInstancePublishedEvent.fromLog(log))
944
1916
  .map(e => e.toContractInstance());
945
1917
  if (contractInstances.length > 0) {
946
1918
  contractInstances.forEach(c =>
@@ -956,10 +1928,12 @@ class ArchiverStoreHelper
956
1928
  }
957
1929
 
958
1930
  /**
959
- * Extracts and stores contract instances out of ContractInstanceDeployed events emitted by the canonical deployer contract.
1931
+ * Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract.
960
1932
  * @param allLogs - All logs emitted in a bunch of blocks.
1933
+ * @param timestamp - Timestamp at which the updates were scheduled.
1934
+ * @param operation - The operation to perform on the contract instance updates (Store or Delete).
961
1935
  */
962
- async #updateUpdatedContractInstances(allLogs: PublicLog[], blockNum: number, operation: Operation) {
1936
+ async #updateUpdatedContractInstances(allLogs: PublicLog[], timestamp: UInt64, operation: Operation) {
963
1937
  const contractUpdates = allLogs
964
1938
  .filter(log => ContractInstanceUpdatedEvent.isContractInstanceUpdatedEvent(log))
965
1939
  .map(log => ContractInstanceUpdatedEvent.fromLog(log))
@@ -970,16 +1944,16 @@ class ArchiverStoreHelper
970
1944
  this.#log.verbose(`${Operation[operation]} contract instance update at ${c.address.toString()}`),
971
1945
  );
972
1946
  if (operation == Operation.Store) {
973
- return await this.store.addContractInstanceUpdates(contractUpdates, blockNum);
1947
+ return await this.store.addContractInstanceUpdates(contractUpdates, timestamp);
974
1948
  } else if (operation == Operation.Delete) {
975
- return await this.store.deleteContractInstanceUpdates(contractUpdates, blockNum);
1949
+ return await this.store.deleteContractInstanceUpdates(contractUpdates, timestamp);
976
1950
  }
977
1951
  }
978
1952
  return true;
979
1953
  }
980
1954
 
981
1955
  /**
982
- * Stores the functions that was broadcasted individually
1956
+ * Stores the functions that were broadcasted individually
983
1957
  *
984
1958
  * @dev Beware that there is not a delete variant of this, since they are added to contract classes
985
1959
  * and will be deleted as part of the class if needed.
@@ -988,18 +1962,18 @@ class ArchiverStoreHelper
988
1962
  * @param _blockNum - The block number
989
1963
  * @returns
990
1964
  */
991
- async #storeBroadcastedIndividualFunctions(allLogs: ContractClassLog[], _blockNum: number) {
992
- // Filter out private and unconstrained function broadcast events
1965
+ async #storeBroadcastedIndividualFunctions(allLogs: ContractClassLog[], _blockNum: BlockNumber) {
1966
+ // Filter out private and utility function broadcast events
993
1967
  const privateFnEvents = allLogs
994
1968
  .filter(log => PrivateFunctionBroadcastedEvent.isPrivateFunctionBroadcastedEvent(log))
995
1969
  .map(log => PrivateFunctionBroadcastedEvent.fromLog(log));
996
- const unconstrainedFnEvents = allLogs
997
- .filter(log => UnconstrainedFunctionBroadcastedEvent.isUnconstrainedFunctionBroadcastedEvent(log))
998
- .map(log => UnconstrainedFunctionBroadcastedEvent.fromLog(log));
1970
+ const utilityFnEvents = allLogs
1971
+ .filter(log => UtilityFunctionBroadcastedEvent.isUtilityFunctionBroadcastedEvent(log))
1972
+ .map(log => UtilityFunctionBroadcastedEvent.fromLog(log));
999
1973
 
1000
1974
  // Group all events by contract class id
1001
1975
  for (const [classIdString, classEvents] of Object.entries(
1002
- groupBy([...privateFnEvents, ...unconstrainedFnEvents], e => e.contractClassId.toString()),
1976
+ groupBy([...privateFnEvents, ...utilityFnEvents], e => e.contractClassId.toString()),
1003
1977
  )) {
1004
1978
  const contractClassId = Fr.fromHexString(classIdString);
1005
1979
  const contractClass = await this.getContractClass(contractClassId);
@@ -1008,27 +1982,27 @@ class ArchiverStoreHelper
1008
1982
  continue;
1009
1983
  }
1010
1984
 
1011
- // Split private and unconstrained functions, and filter out invalid ones
1985
+ // Split private and utility functions, and filter out invalid ones
1012
1986
  const allFns = classEvents.map(e => e.toFunctionWithMembershipProof());
1013
1987
  const privateFns = allFns.filter(
1014
- (fn): fn is ExecutablePrivateFunctionWithMembershipProof => 'unconstrainedFunctionsArtifactTreeRoot' in fn,
1988
+ (fn): fn is ExecutablePrivateFunctionWithMembershipProof => 'utilityFunctionsTreeRoot' in fn,
1015
1989
  );
1016
- const unconstrainedFns = allFns.filter(
1017
- (fn): fn is UnconstrainedFunctionWithMembershipProof => 'privateFunctionsArtifactTreeRoot' in fn,
1990
+ const utilityFns = allFns.filter(
1991
+ (fn): fn is UtilityFunctionWithMembershipProof => 'privateFunctionsArtifactTreeRoot' in fn,
1018
1992
  );
1019
1993
 
1020
1994
  const privateFunctionsWithValidity = await Promise.all(
1021
1995
  privateFns.map(async fn => ({ fn, valid: await isValidPrivateFunctionMembershipProof(fn, contractClass) })),
1022
1996
  );
1023
1997
  const validPrivateFns = privateFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn);
1024
- const unconstrainedFunctionsWithValidity = await Promise.all(
1025
- unconstrainedFns.map(async fn => ({
1998
+ const utilityFunctionsWithValidity = await Promise.all(
1999
+ utilityFns.map(async fn => ({
1026
2000
  fn,
1027
- valid: await isValidUnconstrainedFunctionMembershipProof(fn, contractClass),
2001
+ valid: await isValidUtilityFunctionMembershipProof(fn, contractClass),
1028
2002
  })),
1029
2003
  );
1030
- const validUnconstrainedFns = unconstrainedFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn);
1031
- const validFnCount = validPrivateFns.length + validUnconstrainedFns.length;
2004
+ const validUtilityFns = utilityFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn);
2005
+ const validFnCount = validPrivateFns.length + validUtilityFns.length;
1032
2006
  if (validFnCount !== allFns.length) {
1033
2007
  this.#log.warn(`Skipping ${allFns.length - validFnCount} invalid functions`);
1034
2008
  }
@@ -1037,98 +2011,186 @@ class ArchiverStoreHelper
1037
2011
  if (validFnCount > 0) {
1038
2012
  this.#log.verbose(`Storing ${validFnCount} functions for contract class ${contractClassId.toString()}`);
1039
2013
  }
1040
- return await this.store.addFunctions(contractClassId, validPrivateFns, validUnconstrainedFns);
2014
+ return await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns);
1041
2015
  }
1042
2016
  return true;
1043
2017
  }
1044
2018
 
1045
- async addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean> {
1046
- const opResults = await Promise.all([
1047
- this.store.addLogs(blocks.map(block => block.data)),
1048
- // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
1049
- ...blocks.map(async block => {
1050
- const contractClassLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
1051
- // ContractInstanceDeployed event logs are broadcast in privateLogs.
1052
- const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
1053
- const publicLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
1054
- return (
1055
- await Promise.all([
1056
- this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Store),
1057
- this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Store),
1058
- this.#updateUpdatedContractInstances(publicLogs, block.data.number, Operation.Store),
1059
- this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.data.number),
1060
- ])
1061
- ).every(Boolean);
1062
- }),
1063
- this.store.addNullifiers(blocks.map(block => block.data)),
1064
- this.store.addBlocks(blocks),
1065
- ]);
2019
+ private async addBlockDataToDB(block: L2BlockNew) {
2020
+ const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
2021
+ // ContractInstancePublished event logs are broadcast in privateLogs.
2022
+ const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
2023
+ const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
2024
+
2025
+ return (
2026
+ await Promise.all([
2027
+ this.#updatePublishedContractClasses(contractClassLogs, block.number, Operation.Store),
2028
+ this.#updateDeployedContractInstances(privateLogs, block.number, Operation.Store),
2029
+ this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Store),
2030
+ this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.number),
2031
+ ])
2032
+ ).every(Boolean);
2033
+ }
2034
+
2035
+ public addBlocks(blocks: L2BlockNew[], pendingChainValidationStatus?: ValidateCheckpointResult): Promise<boolean> {
2036
+ // Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
2037
+ // or if the previous block is not in the store.
2038
+ return this.store.transactionAsync(async () => {
2039
+ await this.store.addBlocks(blocks);
2040
+
2041
+ const opResults = await Promise.all([
2042
+ // Update the pending chain validation status if provided
2043
+ pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
2044
+ // Add any logs emitted during the retrieved blocks
2045
+ this.store.addLogs(blocks),
2046
+ // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
2047
+ ...blocks.map(block => {
2048
+ return this.addBlockDataToDB(block);
2049
+ }),
2050
+ ]);
2051
+
2052
+ return opResults.every(Boolean);
2053
+ });
2054
+ }
1066
2055
 
1067
- return opResults.every(Boolean);
2056
+ public addCheckpoints(
2057
+ checkpoints: PublishedCheckpoint[],
2058
+ pendingChainValidationStatus?: ValidateCheckpointResult,
2059
+ ): Promise<boolean> {
2060
+ // Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
2061
+ // or if the previous block is not in the store.
2062
+ return this.store.transactionAsync(async () => {
2063
+ await this.store.addCheckpoints(checkpoints);
2064
+ const allBlocks = checkpoints.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks);
2065
+
2066
+ const opResults = await Promise.all([
2067
+ // Update the pending chain validation status if provided
2068
+ pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
2069
+ // Add any logs emitted during the retrieved blocks
2070
+ this.store.addLogs(allBlocks),
2071
+ // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
2072
+ ...allBlocks.map(block => {
2073
+ return this.addBlockDataToDB(block);
2074
+ }),
2075
+ ]);
2076
+
2077
+ return opResults.every(Boolean);
2078
+ });
1068
2079
  }
1069
2080
 
1070
- async unwindBlocks(from: number, blocksToUnwind: number): Promise<boolean> {
1071
- const last = await this.getSynchedL2BlockNumber();
2081
+ public async unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise<boolean> {
2082
+ if (checkpointsToUnwind <= 0) {
2083
+ throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
2084
+ }
2085
+
2086
+ const last = await this.getSynchedCheckpointNumber();
1072
2087
  if (from != last) {
1073
- throw new Error(`Can only remove from the tip`);
2088
+ throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`);
1074
2089
  }
1075
2090
 
1076
- // from - blocksToUnwind = the new head, so + 1 for what we need to remove
1077
- const blocks = await this.getBlocks(from - blocksToUnwind + 1, blocksToUnwind);
2091
+ const blocks = [];
2092
+ const lastCheckpointNumber = from + checkpointsToUnwind - 1;
2093
+ for (let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++) {
2094
+ const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpointNumber);
2095
+ if (!blocksForCheckpoint) {
2096
+ continue;
2097
+ }
2098
+ blocks.push(...blocksForCheckpoint);
2099
+ }
1078
2100
 
1079
2101
  const opResults = await Promise.all([
2102
+ // Prune rolls back to the last proven block, which is by definition valid
2103
+ this.store.setPendingChainValidationStatus({ valid: true }),
1080
2104
  // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
1081
2105
  ...blocks.map(async block => {
1082
- const contractClassLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
1083
- // ContractInstanceDeployed event logs are broadcast in privateLogs.
1084
- const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
1085
- const publicLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
2106
+ const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs);
2107
+ // ContractInstancePublished event logs are broadcast in privateLogs.
2108
+ const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs);
2109
+ const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs);
1086
2110
 
1087
2111
  return (
1088
2112
  await Promise.all([
1089
- this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Delete),
1090
- this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Delete),
1091
- this.#updateUpdatedContractInstances(publicLogs, block.data.number, Operation.Delete),
2113
+ this.#updatePublishedContractClasses(contractClassLogs, block.number, Operation.Delete),
2114
+ this.#updateDeployedContractInstances(privateLogs, block.number, Operation.Delete),
2115
+ this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Delete),
1092
2116
  ])
1093
2117
  ).every(Boolean);
1094
2118
  }),
1095
2119
 
1096
- this.store.deleteLogs(blocks.map(b => b.data)),
1097
- this.store.unwindBlocks(from, blocksToUnwind),
2120
+ this.store.deleteLogs(blocks),
2121
+ this.store.unwindCheckpoints(from, checkpointsToUnwind),
1098
2122
  ]);
1099
2123
 
1100
2124
  return opResults.every(Boolean);
1101
2125
  }
1102
2126
 
1103
- getBlocks(from: number, limit: number): Promise<L1Published<L2Block>[]> {
1104
- return this.store.getBlocks(from, limit);
2127
+ getCheckpointData(checkpointNumber: CheckpointNumber): Promise<CheckpointData | undefined> {
2128
+ return this.store.getCheckpointData(checkpointNumber);
2129
+ }
2130
+
2131
+ getRangeOfCheckpoints(from: CheckpointNumber, limit: number): Promise<CheckpointData[]> {
2132
+ return this.store.getRangeOfCheckpoints(from, limit);
2133
+ }
2134
+
2135
+ getCheckpointedL2BlockNumber(): Promise<BlockNumber> {
2136
+ return this.store.getCheckpointedL2BlockNumber();
2137
+ }
2138
+ getSynchedCheckpointNumber(): Promise<CheckpointNumber> {
2139
+ return this.store.getSynchedCheckpointNumber();
1105
2140
  }
1106
- getBlockHeaders(from: number, limit: number): Promise<BlockHeader[]> {
2141
+ setCheckpointSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
2142
+ return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
2143
+ }
2144
+ getCheckpointedBlock(number: BlockNumber): Promise<CheckpointedL2Block | undefined> {
2145
+ return this.store.getCheckpointedBlock(number);
2146
+ }
2147
+ getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
2148
+ return this.store.getCheckpointedBlockByHash(blockHash);
2149
+ }
2150
+ getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
2151
+ return this.store.getCheckpointedBlockByArchive(archive);
2152
+ }
2153
+ getBlockHeaders(from: BlockNumber, limit: number): Promise<BlockHeader[]> {
1107
2154
  return this.store.getBlockHeaders(from, limit);
1108
2155
  }
1109
- getTxEffect(txHash: TxHash): Promise<InBlock<TxEffect> | undefined> {
2156
+ getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
2157
+ return this.store.getBlockHeaderByHash(blockHash);
2158
+ }
2159
+ getBlockHeaderByArchive(archive: Fr): Promise<BlockHeader | undefined> {
2160
+ return this.store.getBlockHeaderByArchive(archive);
2161
+ }
2162
+ getBlockByHash(blockHash: Fr): Promise<L2BlockNew | undefined> {
2163
+ return this.store.getBlockByHash(blockHash);
2164
+ }
2165
+ getBlockByArchive(archive: Fr): Promise<L2BlockNew | undefined> {
2166
+ return this.store.getBlockByArchive(archive);
2167
+ }
2168
+ getLatestBlockNumber(): Promise<BlockNumber> {
2169
+ return this.store.getLatestBlockNumber();
2170
+ }
2171
+ getBlocksForCheckpoint(checkpointNumber: CheckpointNumber): Promise<L2BlockNew[] | undefined> {
2172
+ return this.store.getBlocksForCheckpoint(checkpointNumber);
2173
+ }
2174
+ getTxEffect(txHash: TxHash): Promise<IndexedTxEffect | undefined> {
1110
2175
  return this.store.getTxEffect(txHash);
1111
2176
  }
1112
2177
  getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
1113
2178
  return this.store.getSettledTxReceipt(txHash);
1114
2179
  }
1115
- addL1ToL2Messages(messages: DataRetrieval<InboxLeaf>): Promise<boolean> {
2180
+ addL1ToL2Messages(messages: InboxMessage[]): Promise<void> {
1116
2181
  return this.store.addL1ToL2Messages(messages);
1117
2182
  }
1118
- getL1ToL2Messages(blockNumber: bigint): Promise<Fr[]> {
1119
- return this.store.getL1ToL2Messages(blockNumber);
2183
+ getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
2184
+ return this.store.getL1ToL2Messages(checkpointNumber);
1120
2185
  }
1121
2186
  getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined> {
1122
2187
  return this.store.getL1ToL2MessageIndex(l1ToL2Message);
1123
2188
  }
1124
- getPrivateLogs(from: number, limit: number): Promise<PrivateLog[]> {
1125
- return this.store.getPrivateLogs(from, limit);
1126
- }
1127
- getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
1128
- return this.store.getLogsByTags(tags);
2189
+ getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
2190
+ return this.store.getPrivateLogsByTags(tags);
1129
2191
  }
1130
- findNullifiersIndexesWithBlock(blockNumber: number, nullifiers: Fr[]): Promise<(InBlock<bigint> | undefined)[]> {
1131
- return this.store.findNullifiersIndexesWithBlock(blockNumber, nullifiers);
2192
+ getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
2193
+ return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
1132
2194
  }
1133
2195
  getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
1134
2196
  return this.store.getPublicLogs(filter);
@@ -1136,20 +2198,23 @@ class ArchiverStoreHelper
1136
2198
  getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
1137
2199
  return this.store.getContractClassLogs(filter);
1138
2200
  }
1139
- getSynchedL2BlockNumber(): Promise<number> {
1140
- return this.store.getSynchedL2BlockNumber();
2201
+ getSynchedL2BlockNumber(): Promise<BlockNumber> {
2202
+ return this.store.getLatestBlockNumber();
2203
+ }
2204
+ getProvenCheckpointNumber(): Promise<CheckpointNumber> {
2205
+ return this.store.getProvenCheckpointNumber();
1141
2206
  }
1142
- getProvenL2BlockNumber(): Promise<number> {
1143
- return this.store.getProvenL2BlockNumber();
2207
+ getProvenBlockNumber(): Promise<BlockNumber> {
2208
+ return this.store.getProvenBlockNumber();
1144
2209
  }
1145
- setProvenL2BlockNumber(l2BlockNumber: number): Promise<void> {
1146
- return this.store.setProvenL2BlockNumber(l2BlockNumber);
2210
+ setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise<void> {
2211
+ return this.store.setProvenCheckpointNumber(checkpointNumber);
1147
2212
  }
1148
2213
  setBlockSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
1149
- return this.store.setBlockSynchedL1BlockNumber(l1BlockNumber);
2214
+ return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
1150
2215
  }
1151
- setMessageSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void> {
1152
- return this.store.setMessageSynchedL1BlockNumber(l1BlockNumber);
2216
+ setMessageSynchedL1Block(l1Block: L1BlockId): Promise<void> {
2217
+ return this.store.setMessageSynchedL1Block(l1Block);
1153
2218
  }
1154
2219
  getSynchPoint(): Promise<ArchiverL1SynchPoint> {
1155
2220
  return this.store.getSynchPoint();
@@ -1160,22 +2225,41 @@ class ArchiverStoreHelper
1160
2225
  getBytecodeCommitment(contractClassId: Fr): Promise<Fr | undefined> {
1161
2226
  return this.store.getBytecodeCommitment(contractClassId);
1162
2227
  }
1163
- getContractInstance(address: AztecAddress): Promise<ContractInstanceWithAddress | undefined> {
1164
- return this.store.getContractInstance(address);
2228
+ getContractInstance(address: AztecAddress, timestamp: UInt64): Promise<ContractInstanceWithAddress | undefined> {
2229
+ return this.store.getContractInstance(address, timestamp);
1165
2230
  }
1166
2231
  getContractClassIds(): Promise<Fr[]> {
1167
2232
  return this.store.getContractClassIds();
1168
2233
  }
1169
- registerContractFunctionSignatures(address: AztecAddress, signatures: string[]): Promise<void> {
1170
- return this.store.registerContractFunctionSignatures(address, signatures);
2234
+ registerContractFunctionSignatures(signatures: string[]): Promise<void> {
2235
+ return this.store.registerContractFunctionSignatures(signatures);
1171
2236
  }
1172
- getContractFunctionName(address: AztecAddress, selector: FunctionSelector): Promise<string | undefined> {
1173
- return this.store.getContractFunctionName(address, selector);
2237
+ getDebugFunctionName(address: AztecAddress, selector: FunctionSelector): Promise<string | undefined> {
2238
+ return this.store.getDebugFunctionName(address, selector);
1174
2239
  }
1175
2240
  getTotalL1ToL2MessageCount(): Promise<bigint> {
1176
2241
  return this.store.getTotalL1ToL2MessageCount();
1177
2242
  }
1178
- estimateSize(): Promise<{ mappingSize: number; actualSize: number; numItems: number }> {
2243
+ estimateSize(): Promise<{ mappingSize: number; physicalFileSize: number; actualSize: number; numItems: number }> {
1179
2244
  return this.store.estimateSize();
1180
2245
  }
2246
+ rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber: CheckpointNumber): Promise<void> {
2247
+ return this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
2248
+ }
2249
+ iterateL1ToL2Messages(range: CustomRange<bigint> = {}): AsyncIterableIterator<InboxMessage> {
2250
+ return this.store.iterateL1ToL2Messages(range);
2251
+ }
2252
+ removeL1ToL2Messages(startIndex: bigint): Promise<void> {
2253
+ return this.store.removeL1ToL2Messages(startIndex);
2254
+ }
2255
+ getLastL1ToL2Message(): Promise<InboxMessage | undefined> {
2256
+ return this.store.getLastL1ToL2Message();
2257
+ }
2258
+ getPendingChainValidationStatus(): Promise<ValidateCheckpointResult | undefined> {
2259
+ return this.store.getPendingChainValidationStatus();
2260
+ }
2261
+ setPendingChainValidationStatus(status: ValidateCheckpointResult | undefined): Promise<void> {
2262
+ this.#log.debug(`Setting pending chain validation status to valid ${status?.valid}`, status);
2263
+ return this.store.setPendingChainValidationStatus(status);
2264
+ }
1181
2265
  }