@aztec/archiver 3.0.0-nightly.20251127 → 3.0.0-nightly.20251128

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dest/archiver/archiver.d.ts +24 -17
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +218 -160
  4. package/dest/archiver/archiver_store.d.ts +1 -1
  5. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +5 -4
  8. package/dest/archiver/config.d.ts +1 -1
  9. package/dest/archiver/data_retrieval.d.ts +15 -13
  10. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  11. package/dest/archiver/data_retrieval.js +56 -55
  12. package/dest/archiver/errors.d.ts +1 -1
  13. package/dest/archiver/errors.d.ts.map +1 -1
  14. package/dest/archiver/index.d.ts +1 -1
  15. package/dest/archiver/instrumentation.d.ts +3 -3
  16. package/dest/archiver/instrumentation.d.ts.map +1 -1
  17. package/dest/archiver/kv_archiver_store/block_store.d.ts +1 -1
  18. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +1 -1
  20. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  21. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +1 -1
  22. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  23. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +2 -2
  24. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  25. package/dest/archiver/kv_archiver_store/log_store.d.ts +1 -1
  26. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  27. package/dest/archiver/kv_archiver_store/message_store.d.ts +1 -1
  28. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  29. package/dest/archiver/structs/data_retrieval.d.ts +1 -1
  30. package/dest/archiver/structs/inbox_message.d.ts +1 -1
  31. package/dest/archiver/structs/published.d.ts +3 -2
  32. package/dest/archiver/structs/published.d.ts.map +1 -1
  33. package/dest/archiver/validation.d.ts +10 -4
  34. package/dest/archiver/validation.d.ts.map +1 -1
  35. package/dest/archiver/validation.js +25 -17
  36. package/dest/factory.d.ts +1 -1
  37. package/dest/index.d.ts +2 -2
  38. package/dest/index.d.ts.map +1 -1
  39. package/dest/index.js +1 -1
  40. package/dest/rpc/index.d.ts +2 -2
  41. package/dest/test/index.d.ts +1 -1
  42. package/dest/test/mock_archiver.d.ts +1 -1
  43. package/dest/test/mock_archiver.d.ts.map +1 -1
  44. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  45. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  46. package/dest/test/mock_l2_block_source.d.ts +6 -5
  47. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  48. package/dest/test/mock_structs.d.ts +1 -1
  49. package/package.json +16 -16
  50. package/src/archiver/archiver.ts +290 -192
  51. package/src/archiver/archiver_store_test_suite.ts +5 -4
  52. package/src/archiver/data_retrieval.ts +71 -68
  53. package/src/archiver/instrumentation.ts +2 -2
  54. package/src/archiver/structs/published.ts +2 -1
  55. package/src/archiver/validation.ts +40 -19
  56. package/src/index.ts +1 -1
  57. package/src/test/mock_l2_block_source.ts +5 -4
@@ -4,6 +4,7 @@ import {
4
4
  PRIVATE_LOG_SIZE_IN_FIELDS,
5
5
  } from '@aztec/constants';
6
6
  import { makeTuple } from '@aztec/foundation/array';
7
+ import { EpochNumber } from '@aztec/foundation/branded-types';
7
8
  import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
8
9
  import { times, timesParallel } from '@aztec/foundation/collection';
9
10
  import { randomInt } from '@aztec/foundation/crypto';
@@ -1189,7 +1190,7 @@ export function describeArchiverDataStore(
1189
1190
  valid: false,
1190
1191
  block: randomBlockInfo(1),
1191
1192
  committee: [EthAddress.random(), EthAddress.random()],
1192
- epoch: 123n,
1193
+ epoch: EpochNumber(123),
1193
1194
  seed: 456n,
1194
1195
  attestors: [EthAddress.random()],
1195
1196
  attestations: [CommitteeAttestation.random()],
@@ -1208,7 +1209,7 @@ export function describeArchiverDataStore(
1208
1209
  block: randomBlockInfo(2),
1209
1210
  committee: [EthAddress.random()],
1210
1211
  attestors: [EthAddress.random()],
1211
- epoch: 789n,
1212
+ epoch: EpochNumber(789),
1212
1213
  seed: 101n,
1213
1214
  attestations: [CommitteeAttestation.random()],
1214
1215
  reason: 'invalid-attestation',
@@ -1227,7 +1228,7 @@ export function describeArchiverDataStore(
1227
1228
  valid: false,
1228
1229
  block: randomBlockInfo(3),
1229
1230
  committee: [EthAddress.random()],
1230
- epoch: 999n,
1231
+ epoch: EpochNumber(999),
1231
1232
  seed: 888n,
1232
1233
  attestors: [EthAddress.random()],
1233
1234
  attestations: [CommitteeAttestation.random()],
@@ -1246,7 +1247,7 @@ export function describeArchiverDataStore(
1246
1247
  valid: false,
1247
1248
  block: randomBlockInfo(4),
1248
1249
  committee: [],
1249
- epoch: 0n,
1250
+ epoch: EpochNumber(0),
1250
1251
  seed: 0n,
1251
1252
  attestors: [],
1252
1253
  attestations: [],
@@ -21,7 +21,8 @@ import type { ViemSignature } from '@aztec/foundation/eth-signature';
21
21
  import { Fr } from '@aztec/foundation/fields';
22
22
  import { type Logger, createLogger } from '@aztec/foundation/log';
23
23
  import { type InboxAbi, RollupAbi } from '@aztec/l1-artifacts';
24
- import { Body, CommitteeAttestation, L2Block, L2BlockHeader, PublishedL2Block } from '@aztec/stdlib/block';
24
+ import { Body, CommitteeAttestation, L2BlockNew } from '@aztec/stdlib/block';
25
+ import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
25
26
  import { Proof } from '@aztec/stdlib/proofs';
26
27
  import { CheckpointHeader } from '@aztec/stdlib/rollup';
27
28
  import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
@@ -42,7 +43,8 @@ import type { DataRetrieval } from './structs/data_retrieval.js';
42
43
  import type { InboxMessage } from './structs/inbox_message.js';
43
44
  import type { L1PublishedData } from './structs/published.js';
44
45
 
45
- export type RetrievedL2Block = {
46
+ export type RetrievedCheckpoint = {
47
+ checkpointNumber: number;
46
48
  archiveRoot: Fr;
47
49
  stateReference: StateReference;
48
50
  header: CheckpointHeader;
@@ -53,7 +55,8 @@ export type RetrievedL2Block = {
53
55
  attestations: CommitteeAttestation[];
54
56
  };
55
57
 
56
- export async function retrievedBlockToPublishedL2Block({
58
+ export async function retrievedToPublishedCheckpoint({
59
+ checkpointNumber,
57
60
  archiveRoot,
58
61
  stateReference,
59
62
  header: checkpointHeader,
@@ -62,7 +65,7 @@ export async function retrievedBlockToPublishedL2Block({
62
65
  chainId,
63
66
  version,
64
67
  attestations,
65
- }: RetrievedL2Block): Promise<PublishedL2Block> {
68
+ }: RetrievedCheckpoint): Promise<PublishedCheckpoint> {
66
69
  const { blocks: blocksBlobData } = checkpointBlobData;
67
70
 
68
71
  // The lastArchiveRoot of a block is the new archive for the previous block.
@@ -76,7 +79,7 @@ export async function retrievedBlockToPublishedL2Block({
76
79
  const l1toL2MessageTreeRoot = blocksBlobData[0].l1ToL2MessageRoot!;
77
80
 
78
81
  const spongeBlob = SpongeBlob.init();
79
- const l2Blocks: L2Block[] = [];
82
+ const l2Blocks: L2BlockNew[] = [];
80
83
  for (let i = 0; i < blocksBlobData.length; i++) {
81
84
  const blockBlobData = blocksBlobData[i];
82
85
  const { blockEndMarker, blockEndStateField, lastArchiveRoot, noteHashRoot, nullifierRoot, publicDataRoot } =
@@ -115,7 +118,7 @@ export async function retrievedBlockToPublishedL2Block({
115
118
  const clonedSpongeBlob = spongeBlob.clone();
116
119
  const spongeBlobHash = await clonedSpongeBlob.squeeze();
117
120
 
118
- const blockHeader = BlockHeader.from({
121
+ const header = BlockHeader.from({
119
122
  lastArchive: new AppendOnlyTreeSnapshot(lastArchiveRoot, l2BlockNumber),
120
123
  state,
121
124
  spongeBlobHash,
@@ -124,31 +127,30 @@ export async function retrievedBlockToPublishedL2Block({
124
127
  totalManaUsed: new Fr(blockEndStateField.totalManaUsed),
125
128
  });
126
129
 
127
- const header = L2BlockHeader.from({
128
- ...blockHeader,
129
- blockHeadersHash: checkpointHeader.blockHeadersHash,
130
- contentCommitment: checkpointHeader.contentCommitment,
131
- });
132
-
133
130
  const newArchive = new AppendOnlyTreeSnapshot(newArchiveRoots[i], l2BlockNumber + 1);
134
131
 
135
- l2Blocks.push(new L2Block(newArchive, header, body));
132
+ l2Blocks.push(new L2BlockNew(newArchive, header, body));
136
133
  }
137
134
 
138
- const lastBlock = l2Blocks[l2Blocks.length - 1];
135
+ const lastBlock = l2Blocks.at(-1)!;
139
136
  if (!lastBlock.header.state.equals(stateReference)) {
140
137
  throw new Error(
141
138
  'The claimed state reference submitted to L1 does not match the state reference of the last block.',
142
139
  );
143
140
  }
144
141
 
145
- // TODO(#17027)
146
- // There's only one block per checkpoint at the moment.
147
- return PublishedL2Block.fromFields({ block: l2Blocks[0], l1, attestations });
142
+ const checkpoint = Checkpoint.from({
143
+ archive: new AppendOnlyTreeSnapshot(archiveRoot, lastBlock.number + 1),
144
+ header: checkpointHeader,
145
+ blocks: l2Blocks,
146
+ number: checkpointNumber,
147
+ });
148
+
149
+ return PublishedCheckpoint.from({ checkpoint, l1, attestations });
148
150
  }
149
151
 
150
152
  /**
151
- * Fetches new L2 blocks.
153
+ * Fetches new checkpoints.
152
154
  * @param publicClient - The viem public client to use for transaction retrieval.
153
155
  * @param rollupAddress - The address of the rollup contract.
154
156
  * @param searchStartBlock - The block number to use for starting the search.
@@ -156,15 +158,15 @@ export async function retrievedBlockToPublishedL2Block({
156
158
  * @param expectedNextL2BlockNum - The next L2 block number that we expect to find.
157
159
  * @returns An array of block; as well as the next eth block to search from.
158
160
  */
159
- export async function retrieveBlocksFromRollup(
161
+ export async function retrieveCheckpointsFromRollup(
160
162
  rollup: GetContractReturnType<typeof RollupAbi, ViemPublicClient>,
161
163
  publicClient: ViemPublicClient,
162
164
  blobSinkClient: BlobSinkClientInterface,
163
165
  searchStartBlock: bigint,
164
166
  searchEndBlock: bigint,
165
167
  logger: Logger = createLogger('archiver'),
166
- ): Promise<RetrievedL2Block[]> {
167
- const retrievedBlocks: RetrievedL2Block[] = [];
168
+ ): Promise<RetrievedCheckpoint[]> {
169
+ const retrievedCheckpoints: RetrievedCheckpoint[] = [];
168
170
 
169
171
  let rollupConstants: { chainId: Fr; version: Fr; targetCommitteeSize: number } | undefined;
170
172
 
@@ -172,7 +174,7 @@ export async function retrieveBlocksFromRollup(
172
174
  if (searchStartBlock > searchEndBlock) {
173
175
  break;
174
176
  }
175
- const l2BlockProposedLogs = (
177
+ const checkpointProposedLogs = (
176
178
  await rollup.getEvents.CheckpointProposed(
177
179
  {},
178
180
  {
@@ -182,13 +184,13 @@ export async function retrieveBlocksFromRollup(
182
184
  )
183
185
  ).filter(log => log.blockNumber! >= searchStartBlock && log.blockNumber! <= searchEndBlock);
184
186
 
185
- if (l2BlockProposedLogs.length === 0) {
187
+ if (checkpointProposedLogs.length === 0) {
186
188
  break;
187
189
  }
188
190
 
189
- const lastLog = l2BlockProposedLogs[l2BlockProposedLogs.length - 1];
191
+ const lastLog = checkpointProposedLogs.at(-1)!;
190
192
  logger.debug(
191
- `Got ${l2BlockProposedLogs.length} L2 block processed logs for L2 blocks ${l2BlockProposedLogs[0].args.checkpointNumber}-${lastLog.args.checkpointNumber} between L1 blocks ${searchStartBlock}-${searchEndBlock}`,
193
+ `Got ${checkpointProposedLogs.length} processed logs for checkpoints ${checkpointProposedLogs[0].args.checkpointNumber}-${lastLog.args.checkpointNumber} between L1 blocks ${searchStartBlock}-${searchEndBlock}`,
192
194
  );
193
195
 
194
196
  if (rollupConstants === undefined) {
@@ -204,52 +206,52 @@ export async function retrieveBlocksFromRollup(
204
206
  };
205
207
  }
206
208
 
207
- const newBlocks = await processL2BlockProposedLogs(
209
+ const newCheckpoints = await processCheckpointProposedLogs(
208
210
  rollup,
209
211
  publicClient,
210
212
  blobSinkClient,
211
- l2BlockProposedLogs,
213
+ checkpointProposedLogs,
212
214
  rollupConstants,
213
215
  logger,
214
216
  );
215
- retrievedBlocks.push(...newBlocks);
217
+ retrievedCheckpoints.push(...newCheckpoints);
216
218
  searchStartBlock = lastLog.blockNumber! + 1n;
217
219
  } while (searchStartBlock <= searchEndBlock);
218
220
 
219
- // The asyncpool from processL2BlockProposedLogs will not necessarily return the blocks in order, so we sort them before returning.
220
- return retrievedBlocks.sort((a, b) => Number(a.l1.blockNumber - b.l1.blockNumber));
221
+ // The asyncPool from processCheckpointProposedLogs will not necessarily return the checkpoints in order, so we sort them before returning.
222
+ return retrievedCheckpoints.sort((a, b) => Number(a.l1.blockNumber - b.l1.blockNumber));
221
223
  }
222
224
 
223
225
  /**
224
- * Processes newly received L2BlockProposed logs.
226
+ * Processes newly received CheckpointProposed logs.
225
227
  * @param rollup - The rollup contract
226
228
  * @param publicClient - The viem public client to use for transaction retrieval.
227
- * @param logs - L2BlockProposed logs.
228
- * @returns - An array blocks.
229
+ * @param logs - CheckpointProposed logs.
230
+ * @returns - An array of checkpoints.
229
231
  */
230
- async function processL2BlockProposedLogs(
232
+ async function processCheckpointProposedLogs(
231
233
  rollup: GetContractReturnType<typeof RollupAbi, ViemPublicClient>,
232
234
  publicClient: ViemPublicClient,
233
235
  blobSinkClient: BlobSinkClientInterface,
234
236
  logs: GetContractEventsReturnType<typeof RollupAbi, 'CheckpointProposed'>,
235
237
  { chainId, version, targetCommitteeSize }: { chainId: Fr; version: Fr; targetCommitteeSize: number },
236
238
  logger: Logger,
237
- ): Promise<RetrievedL2Block[]> {
238
- const retrievedBlocks: RetrievedL2Block[] = [];
239
+ ): Promise<RetrievedCheckpoint[]> {
240
+ const retrievedCheckpoints: RetrievedCheckpoint[] = [];
239
241
  await asyncPool(10, logs, async log => {
240
- const l2BlockNumber = Number(log.args.checkpointNumber!);
242
+ const checkpointNumber = Number(log.args.checkpointNumber!);
241
243
  const archive = log.args.archive!;
242
- const archiveFromChain = await rollup.read.archiveAt([BigInt(l2BlockNumber)]);
244
+ const archiveFromChain = await rollup.read.archiveAt([BigInt(checkpointNumber)]);
243
245
  const blobHashes = log.args.versionedBlobHashes!.map(blobHash => Buffer.from(blobHash.slice(2), 'hex'));
244
246
 
245
- // The value from the event and contract will match only if the block is in the chain.
247
+ // The value from the event and contract will match only if the checkpoint is in the chain.
246
248
  if (archive === archiveFromChain) {
247
- const block = await getBlockFromRollupTx(
249
+ const checkpoint = await getCheckpointFromRollupTx(
248
250
  publicClient,
249
251
  blobSinkClient,
250
252
  log.transactionHash!,
251
253
  blobHashes,
252
- l2BlockNumber,
254
+ checkpointNumber,
253
255
  rollup.address,
254
256
  targetCommitteeSize,
255
257
  logger,
@@ -261,22 +263,22 @@ async function processL2BlockProposedLogs(
261
263
  timestamp: await getL1BlockTime(publicClient, log.blockNumber),
262
264
  };
263
265
 
264
- retrievedBlocks.push({ ...block, l1, chainId, version });
265
- logger.trace(`Retrieved L2 block ${l2BlockNumber} from L1 tx ${log.transactionHash}`, {
266
+ retrievedCheckpoints.push({ ...checkpoint, l1, chainId, version });
267
+ logger.trace(`Retrieved checkpoint ${checkpointNumber} from L1 tx ${log.transactionHash}`, {
266
268
  l1BlockNumber: log.blockNumber,
267
- l2BlockNumber,
269
+ checkpointNumber,
268
270
  archive: archive.toString(),
269
- attestations: block.attestations,
271
+ attestations: checkpoint.attestations,
270
272
  });
271
273
  } else {
272
- logger.warn(`Ignoring L2 block ${l2BlockNumber} due to archive root mismatch`, {
274
+ logger.warn(`Ignoring checkpoint ${checkpointNumber} due to archive root mismatch`, {
273
275
  actual: archive,
274
276
  expected: archiveFromChain,
275
277
  });
276
278
  }
277
279
  });
278
280
 
279
- return retrievedBlocks;
281
+ return retrievedCheckpoints;
280
282
  }
281
283
 
282
284
  export async function getL1BlockTime(publicClient: ViemPublicClient, blockNumber: bigint): Promise<bigint> {
@@ -335,25 +337,25 @@ function extractRollupProposeCalldata(multicall3Data: Hex, rollupAddress: Hex):
335
337
  }
336
338
 
337
339
  /**
338
- * Gets block from the calldata of an L1 transaction.
339
- * Assumes that the block was published from an EOA.
340
+ * Gets checkpoint from the calldata of an L1 transaction.
341
+ * Assumes that the checkpoint was published from an EOA.
340
342
  * TODO: Add retries and error management.
341
343
  * @param publicClient - The viem public client to use for transaction retrieval.
342
344
  * @param txHash - Hash of the tx that published it.
343
- * @param l2BlockNumber - L2 block number.
344
- * @returns L2 block from the calldata, deserialized
345
+ * @param checkpointNumber - Checkpoint number.
346
+ * @returns Checkpoint from the calldata, deserialized
345
347
  */
346
- async function getBlockFromRollupTx(
348
+ async function getCheckpointFromRollupTx(
347
349
  publicClient: ViemPublicClient,
348
350
  blobSinkClient: BlobSinkClientInterface,
349
351
  txHash: `0x${string}`,
350
352
  blobHashes: Buffer[], // TODO(md): buffer32?
351
- l2BlockNumber: number,
353
+ checkpointNumber: number,
352
354
  rollupAddress: Hex,
353
355
  targetCommitteeSize: number,
354
356
  logger: Logger,
355
- ): Promise<Omit<RetrievedL2Block, 'l1' | 'chainId' | 'version'>> {
356
- logger.trace(`Fetching L2 block ${l2BlockNumber} from rollup tx ${txHash}`);
357
+ ): Promise<Omit<RetrievedCheckpoint, 'l1' | 'chainId' | 'version'>> {
358
+ logger.trace(`Fetching checkpoint ${checkpointNumber} from rollup tx ${txHash}`);
357
359
  const { input: forwarderData, blockHash } = await publicClient.getTransaction({ hash: txHash });
358
360
 
359
361
  const rollupData = extractRollupProposeCalldata(forwarderData, rollupAddress);
@@ -385,7 +387,7 @@ async function getBlockFromRollupTx(
385
387
  const attestations = CommitteeAttestation.fromPacked(packedAttestations, targetCommitteeSize);
386
388
 
387
389
  logger.trace(`Recovered propose calldata from tx ${txHash}`, {
388
- l2BlockNumber,
390
+ checkpointNumber,
389
391
  archive: decodedArgs.archive,
390
392
  stateReference: decodedArgs.stateReference,
391
393
  header: decodedArgs.header,
@@ -399,7 +401,7 @@ async function getBlockFromRollupTx(
399
401
  const header = CheckpointHeader.fromViem(decodedArgs.header);
400
402
  const blobBodies = await blobSinkClient.getBlobSidecar(blockHash, blobHashes);
401
403
  if (blobBodies.length === 0) {
402
- throw new NoBlobBodiesFoundError(l2BlockNumber);
404
+ throw new NoBlobBodiesFoundError(checkpointNumber);
403
405
  }
404
406
 
405
407
  let checkpointBlobData: CheckpointBlobData;
@@ -420,6 +422,7 @@ async function getBlockFromRollupTx(
420
422
  const stateReference = StateReference.fromViem(decodedArgs.stateReference);
421
423
 
422
424
  return {
425
+ checkpointNumber,
423
426
  archiveRoot,
424
427
  stateReference,
425
428
  header,
@@ -492,7 +495,7 @@ export async function retrieveL2ProofVerifiedEvents(
492
495
  rollupAddress: EthAddress,
493
496
  searchStartBlock: bigint,
494
497
  searchEndBlock?: bigint,
495
- ): Promise<{ l1BlockNumber: bigint; l2BlockNumber: number; proverId: Fr; txHash: Hex }[]> {
498
+ ): Promise<{ l1BlockNumber: bigint; checkpointNumber: number; proverId: Fr; txHash: Hex }[]> {
496
499
  const logs = await publicClient.getLogs({
497
500
  address: rollupAddress.toString(),
498
501
  fromBlock: searchStartBlock,
@@ -503,7 +506,7 @@ export async function retrieveL2ProofVerifiedEvents(
503
506
 
504
507
  return logs.map(log => ({
505
508
  l1BlockNumber: log.blockNumber,
506
- l2BlockNumber: Number(log.args.checkpointNumber),
509
+ checkpointNumber: Number(log.args.checkpointNumber),
507
510
  proverId: Fr.fromHexString(log.args.proverId),
508
511
  txHash: log.transactionHash,
509
512
  }));
@@ -515,14 +518,14 @@ export async function retrieveL2ProofsFromRollup(
515
518
  rollupAddress: EthAddress,
516
519
  searchStartBlock: bigint,
517
520
  searchEndBlock?: bigint,
518
- ): Promise<DataRetrieval<{ proof: Proof; proverId: Fr; l2BlockNumber: number; txHash: `0x${string}` }>> {
521
+ ): Promise<DataRetrieval<{ proof: Proof; proverId: Fr; checkpointNumber: number; txHash: `0x${string}` }>> {
519
522
  const logs = await retrieveL2ProofVerifiedEvents(publicClient, rollupAddress, searchStartBlock, searchEndBlock);
520
- const retrievedData: { proof: Proof; proverId: Fr; l2BlockNumber: number; txHash: `0x${string}` }[] = [];
523
+ const retrievedData: { proof: Proof; proverId: Fr; checkpointNumber: number; txHash: `0x${string}` }[] = [];
521
524
  const lastProcessedL1BlockNumber = logs.length > 0 ? logs.at(-1)!.l1BlockNumber : searchStartBlock - 1n;
522
525
 
523
- for (const { txHash, proverId, l2BlockNumber } of logs) {
526
+ for (const { txHash, proverId, checkpointNumber } of logs) {
524
527
  const proofData = await getProofFromSubmitProofTx(publicClient, txHash, proverId);
525
- retrievedData.push({ proof: proofData.proof, proverId: proofData.proverId, l2BlockNumber, txHash });
528
+ retrievedData.push({ proof: proofData.proof, proverId: proofData.proverId, checkpointNumber, txHash });
526
529
  }
527
530
  return {
528
531
  retrievedData,
@@ -530,26 +533,26 @@ export async function retrieveL2ProofsFromRollup(
530
533
  };
531
534
  }
532
535
 
533
- export type SubmitBlockProof = {
536
+ export type SubmitEpochProof = {
534
537
  archiveRoot: Fr;
535
538
  proverId: Fr;
536
539
  proof: Proof;
537
540
  };
538
541
 
539
542
  /**
540
- * Gets block metadata (header and archive snapshot) from the calldata of an L1 transaction.
543
+ * Gets epoch proof metadata (archive root and proof) from the calldata of an L1 transaction.
541
544
  * Assumes that the block was published from an EOA.
542
545
  * TODO: Add retries and error management.
543
546
  * @param publicClient - The viem public client to use for transaction retrieval.
544
547
  * @param txHash - Hash of the tx that published it.
545
- * @param l2BlockNum - L2 block number.
546
- * @returns L2 block metadata (header and archive) from the calldata, deserialized
548
+ * @param expectedProverId - Expected prover ID.
549
+ * @returns Epoch proof metadata from the calldata, deserialized.
547
550
  */
548
551
  export async function getProofFromSubmitProofTx(
549
552
  publicClient: ViemPublicClient,
550
553
  txHash: `0x${string}`,
551
554
  expectedProverId: Fr,
552
- ): Promise<SubmitBlockProof> {
555
+ ): Promise<SubmitEpochProof> {
553
556
  const { input: data } = await publicClient.getTransaction({ hash: txHash });
554
557
  const { functionName, args } = decodeFunctionData({ abi: RollupAbi, data });
555
558
 
@@ -1,5 +1,5 @@
1
1
  import { createLogger } from '@aztec/foundation/log';
2
- import type { L2Block } from '@aztec/stdlib/block';
2
+ import type { L2BlockNew } from '@aztec/stdlib/block';
3
3
  import {
4
4
  Attributes,
5
5
  type Gauge,
@@ -139,7 +139,7 @@ export class ArchiverInstrumentation {
139
139
  return this.telemetry.isEnabled();
140
140
  }
141
141
 
142
- public processNewBlocks(syncTimePerBlock: number, blocks: L2Block[]) {
142
+ public processNewBlocks(syncTimePerBlock: number, blocks: L2BlockNew[]) {
143
143
  this.syncDurationPerBlock.record(Math.ceil(syncTimePerBlock));
144
144
  this.blockHeight.record(Math.max(...blocks.map(b => b.number)));
145
145
  this.syncBlockCount.add(blocks.length);
@@ -1 +1,2 @@
1
- export type { PublishedL2Block, L1PublishedData } from '@aztec/stdlib/block';
1
+ export type { PublishedL2Block } from '@aztec/stdlib/block';
2
+ export type { L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
@@ -1,45 +1,63 @@
1
1
  import type { EpochCache } from '@aztec/epoch-cache';
2
+ import { EpochNumber } from '@aztec/foundation/branded-types';
2
3
  import { compactArray } from '@aztec/foundation/collection';
3
4
  import type { Logger } from '@aztec/foundation/log';
4
5
  import {
5
- type PublishedL2Block,
6
+ type AttestationInfo,
6
7
  type ValidateBlockNegativeResult,
7
8
  type ValidateBlockResult,
8
- getAttestationInfoFromPublishedL2Block,
9
+ getAttestationInfoFromPayload,
9
10
  } from '@aztec/stdlib/block';
11
+ import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
10
12
  import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
13
+ import { ConsensusPayload } from '@aztec/stdlib/p2p';
11
14
 
12
15
  export type { ValidateBlockResult };
13
16
 
14
17
  /**
15
- * Validates the attestations submitted for the given block.
18
+ * Extracts attestation information from a published checkpoint.
19
+ * Returns info for each attestation, preserving array indices.
20
+ */
21
+ export function getAttestationInfoFromPublishedCheckpoint({
22
+ checkpoint,
23
+ attestations,
24
+ }: PublishedCheckpoint): AttestationInfo[] {
25
+ const payload = ConsensusPayload.fromCheckpoint(checkpoint);
26
+ return getAttestationInfoFromPayload(payload, attestations);
27
+ }
28
+
29
+ /**
30
+ * Validates the attestations submitted for the given checkpoint.
16
31
  * Returns true if the attestations are valid and sufficient, false otherwise.
17
32
  */
18
- export async function validateBlockAttestations(
19
- publishedBlock: PublishedL2Block,
33
+ export async function validateCheckpointAttestations(
34
+ publishedCheckpoint: PublishedCheckpoint,
20
35
  epochCache: EpochCache,
21
36
  constants: Pick<L1RollupConstants, 'epochDuration'>,
22
37
  logger?: Logger,
23
38
  ): Promise<ValidateBlockResult> {
24
- const attestorInfos = getAttestationInfoFromPublishedL2Block(publishedBlock);
39
+ const attestorInfos = getAttestationInfoFromPublishedCheckpoint(publishedCheckpoint);
25
40
  const attestors = compactArray(attestorInfos.map(info => ('address' in info ? info.address : undefined)));
26
- const { block } = publishedBlock;
27
- const blockHash = await block.hash().then(hash => hash.toString());
28
- const archiveRoot = block.archive.root.toString();
29
- const slot = block.header.getSlot();
30
- const epoch = getEpochAtSlot(slot, constants);
41
+ const { checkpoint, attestations } = publishedCheckpoint;
42
+ const headerHash = checkpoint.header.hash();
43
+ const archiveRoot = checkpoint.archive.root.toString();
44
+ const slot = checkpoint.header.slotNumber.toBigInt();
45
+ const epoch: EpochNumber = getEpochAtSlot(slot, constants);
31
46
  const { committee, seed } = await epochCache.getCommitteeForEpoch(epoch);
32
- const logData = { blockNumber: block.number, slot, epoch, blockHash, archiveRoot };
47
+ const logData = { checkpointNumber: checkpoint.number, slot, epoch, headerHash, archiveRoot };
33
48
 
34
- logger?.debug(`Validating attestations for block ${block.number} at slot ${slot} in epoch ${epoch}`, {
49
+ logger?.debug(`Validating attestations for checkpoint ${checkpoint.number} at slot ${slot} in epoch ${epoch}`, {
35
50
  committee: (committee ?? []).map(member => member.toString()),
36
51
  recoveredAttestors: attestorInfos,
37
- postedAttestations: publishedBlock.attestations.map(a => (a.address.isZero() ? a.signature : a.address).toString()),
52
+ postedAttestations: attestations.map(a => (a.address.isZero() ? a.signature : a.address).toString()),
38
53
  ...logData,
39
54
  });
40
55
 
41
56
  if (!committee || committee.length === 0) {
42
- logger?.warn(`No committee found for epoch ${epoch} at slot ${slot}. Accepting block without validation.`, logData);
57
+ logger?.warn(
58
+ `No committee found for epoch ${epoch} at slot ${slot}. Accepting checkpoint without validation.`,
59
+ logData,
60
+ );
43
61
  return { valid: true };
44
62
  }
45
63
 
@@ -48,12 +66,12 @@ export async function validateBlockAttestations(
48
66
  const failedValidationResult = <TReason extends ValidateBlockNegativeResult['reason']>(reason: TReason) => ({
49
67
  valid: false as const,
50
68
  reason,
51
- block: publishedBlock.block.toBlockInfo(),
69
+ block: checkpoint.blocks[0].toBlockInfo(),
52
70
  committee,
53
71
  seed,
54
72
  epoch,
55
73
  attestors,
56
- attestations: publishedBlock.attestations,
74
+ attestations,
57
75
  });
58
76
 
59
77
  for (let i = 0; i < attestorInfos.length; i++) {
@@ -90,7 +108,7 @@ export async function validateBlockAttestations(
90
108
 
91
109
  const validAttestationCount = attestorInfos.filter(info => info.status === 'recovered-from-signature').length;
92
110
  if (validAttestationCount < requiredAttestationCount) {
93
- logger?.warn(`Insufficient attestations for block at slot ${slot}`, {
111
+ logger?.warn(`Insufficient attestations for checkpoint at slot ${slot}`, {
94
112
  requiredAttestations: requiredAttestationCount,
95
113
  actualAttestations: validAttestationCount,
96
114
  ...logData,
@@ -98,6 +116,9 @@ export async function validateBlockAttestations(
98
116
  return failedValidationResult('insufficient-attestations');
99
117
  }
100
118
 
101
- logger?.debug(`Block attestations validated successfully for block ${block.number} at slot ${slot}`, logData);
119
+ logger?.debug(
120
+ `Checkpoint attestations validated successfully for checkpoint ${checkpoint.number} at slot ${slot}`,
121
+ logData,
122
+ );
102
123
  return { valid: true };
103
124
  }
package/src/index.ts CHANGED
@@ -2,4 +2,4 @@ export * from './archiver/index.js';
2
2
  export * from './factory.js';
3
3
  export * from './rpc/index.js';
4
4
 
5
- export { retrieveBlocksFromRollup, retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js';
5
+ export { retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js';
@@ -1,5 +1,6 @@
1
1
  import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants';
2
2
  import { DefaultL1ContractsConfig } from '@aztec/ethereum';
3
+ import { EpochNumber } from '@aztec/foundation/branded-types';
3
4
  import { Buffer32 } from '@aztec/foundation/buffer';
4
5
  import { EthAddress } from '@aztec/foundation/eth-address';
5
6
  import { Fr } from '@aztec/foundation/fields';
@@ -182,7 +183,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
182
183
  return Promise.resolve(this.l2Blocks.at(typeof number === 'number' ? number - 1 : -1)?.getBlockHeader());
183
184
  }
184
185
 
185
- getBlocksForEpoch(epochNumber: bigint): Promise<L2Block[]> {
186
+ getBlocksForEpoch(epochNumber: EpochNumber): Promise<L2Block[]> {
186
187
  const epochDuration = DefaultL1ContractsConfig.aztecEpochDuration;
187
188
  const [start, end] = getSlotRangeForEpoch(epochNumber, { epochDuration });
188
189
  const blocks = this.l2Blocks.filter(b => {
@@ -192,7 +193,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
192
193
  return Promise.resolve(blocks);
193
194
  }
194
195
 
195
- async getBlockHeadersForEpoch(epochNumber: bigint): Promise<BlockHeader[]> {
196
+ async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise<BlockHeader[]> {
196
197
  const blocks = await this.getBlocksForEpoch(epochNumber);
197
198
  return blocks.map(b => b.getBlockHeader());
198
199
  }
@@ -268,7 +269,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
268
269
  };
269
270
  }
270
271
 
271
- getL2EpochNumber(): Promise<bigint> {
272
+ getL2EpochNumber(): Promise<EpochNumber> {
272
273
  throw new Error('Method not implemented.');
273
274
  }
274
275
 
@@ -276,7 +277,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
276
277
  throw new Error('Method not implemented.');
277
278
  }
278
279
 
279
- isEpochComplete(_epochNumber: bigint): Promise<boolean> {
280
+ isEpochComplete(_epochNumber: EpochNumber): Promise<boolean> {
280
281
  throw new Error('Method not implemented.');
281
282
  }
282
283