@aztec/archiver 0.51.0 → 0.52.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/README.md +1 -1
  2. package/dest/archiver/archiver.d.ts +10 -4
  3. package/dest/archiver/archiver.d.ts.map +1 -1
  4. package/dest/archiver/archiver.js +87 -41
  5. package/dest/archiver/archiver_store.d.ts +7 -4
  6. package/dest/archiver/archiver_store.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  8. package/dest/archiver/archiver_store_test_suite.js +46 -34
  9. package/dest/archiver/data_retrieval.d.ts +3 -14
  10. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  11. package/dest/archiver/data_retrieval.js +8 -8
  12. package/dest/archiver/eth_log_handlers.d.ts +8 -6
  13. package/dest/archiver/eth_log_handlers.d.ts.map +1 -1
  14. package/dest/archiver/eth_log_handlers.js +25 -16
  15. package/dest/archiver/index.d.ts +1 -0
  16. package/dest/archiver/index.d.ts.map +1 -1
  17. package/dest/archiver/index.js +1 -1
  18. package/dest/archiver/instrumentation.d.ts +10 -0
  19. package/dest/archiver/instrumentation.d.ts.map +1 -1
  20. package/dest/archiver/instrumentation.js +32 -2
  21. package/dest/archiver/kv_archiver_store/block_body_store.d.ts +1 -1
  22. package/dest/archiver/kv_archiver_store/block_body_store.d.ts.map +1 -1
  23. package/dest/archiver/kv_archiver_store/block_store.d.ts +5 -7
  24. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  25. package/dest/archiver/kv_archiver_store/block_store.js +20 -28
  26. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +5 -4
  27. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  28. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +10 -6
  29. package/dest/archiver/kv_archiver_store/message_store.d.ts +1 -1
  30. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  31. package/dest/archiver/kv_archiver_store/proven_store.d.ts +14 -0
  32. package/dest/archiver/kv_archiver_store/proven_store.d.ts.map +1 -0
  33. package/dest/archiver/kv_archiver_store/proven_store.js +30 -0
  34. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +6 -4
  35. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  36. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +16 -10
  37. package/dest/archiver/structs/data_retrieval.d.ts +27 -0
  38. package/dest/archiver/structs/data_retrieval.d.ts.map +1 -0
  39. package/dest/archiver/structs/data_retrieval.js +2 -0
  40. package/dest/archiver/structs/published.d.ts +11 -0
  41. package/dest/archiver/structs/published.d.ts.map +1 -0
  42. package/dest/archiver/structs/published.js +2 -0
  43. package/dest/index.d.ts.map +1 -1
  44. package/dest/index.js +3 -2
  45. package/package.json +10 -10
  46. package/src/archiver/archiver.ts +115 -53
  47. package/src/archiver/archiver_store.ts +7 -4
  48. package/src/archiver/archiver_store_test_suite.ts +66 -54
  49. package/src/archiver/data_retrieval.ts +13 -25
  50. package/src/archiver/eth_log_handlers.ts +32 -19
  51. package/src/archiver/index.ts +1 -0
  52. package/src/archiver/instrumentation.ts +39 -1
  53. package/src/archiver/kv_archiver_store/block_body_store.ts +1 -1
  54. package/src/archiver/kv_archiver_store/block_store.ts +24 -33
  55. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +12 -7
  56. package/src/archiver/kv_archiver_store/message_store.ts +1 -1
  57. package/src/archiver/kv_archiver_store/proven_store.ts +34 -0
  58. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +24 -15
  59. package/src/archiver/structs/data_retrieval.ts +27 -0
  60. package/src/archiver/structs/published.ts +11 -0
  61. package/src/index.ts +2 -1
@@ -15,7 +15,8 @@ import {
15
15
  } from '@aztec/types/contracts';
16
16
 
17
17
  import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js';
18
- import { type DataRetrieval } from './data_retrieval.js';
18
+ import { type DataRetrieval } from './structs/data_retrieval.js';
19
+ import { type L1Published } from './structs/published.js';
19
20
 
20
21
  /**
21
22
  * @param testName - The name of the test suite.
@@ -24,24 +25,24 @@ import { type DataRetrieval } from './data_retrieval.js';
24
25
  export function describeArchiverDataStore(testName: string, getStore: () => ArchiverDataStore) {
25
26
  describe(testName, () => {
26
27
  let store: ArchiverDataStore;
27
- let blocks: DataRetrieval<L2Block>;
28
+ let blocks: L1Published<L2Block>[];
28
29
  let blockBodies: DataRetrieval<Body>;
29
- const blockTests: [number, number, () => L2Block[]][] = [
30
- [1, 1, () => blocks.retrievedData.slice(0, 1)],
31
- [10, 1, () => blocks.retrievedData.slice(9, 10)],
32
- [1, 10, () => blocks.retrievedData.slice(0, 10)],
33
- [2, 5, () => blocks.retrievedData.slice(1, 6)],
34
- [5, 2, () => blocks.retrievedData.slice(4, 6)],
30
+ const blockTests: [number, number, () => L1Published<L2Block>[]][] = [
31
+ [1, 1, () => blocks.slice(0, 1)],
32
+ [10, 1, () => blocks.slice(9, 10)],
33
+ [1, 10, () => blocks.slice(0, 10)],
34
+ [2, 5, () => blocks.slice(1, 6)],
35
+ [5, 2, () => blocks.slice(4, 6)],
35
36
  ];
36
37
 
37
38
  beforeEach(() => {
38
39
  store = getStore();
39
- blocks = {
40
- lastProcessedL1BlockNumber: 5n,
41
- retrievedData: Array.from({ length: 10 }).map((_, i) => L2Block.random(i + 1)),
42
- };
40
+ blocks = times(10, i => ({
41
+ data: L2Block.random(i + 1),
42
+ l1: { blockNumber: BigInt(i + 10), blockHash: `0x${i}`, timestamp: BigInt(i * 1000) },
43
+ }));
43
44
  blockBodies = {
44
- retrievedData: blocks.retrievedData.map(block => block.body),
45
+ retrievedData: blocks.map(block => block.data.body),
45
46
  lastProcessedL1BlockNumber: 4n,
46
47
  };
47
48
  });
@@ -80,7 +81,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
80
81
  });
81
82
 
82
83
  it('resets `from` to the first block if it is out of range', async () => {
83
- await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.retrievedData.slice(0, 1));
84
+ await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.slice(0, 1));
84
85
  });
85
86
  });
86
87
 
@@ -91,7 +92,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
91
92
 
92
93
  it("returns the most recently added block's number", async () => {
93
94
  await store.addBlocks(blocks);
94
- await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.retrievedData.at(-1)!.number);
95
+ await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.at(-1)!.data.number);
95
96
  });
96
97
  });
97
98
 
@@ -101,15 +102,17 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
101
102
  blocksSynchedTo: 0n,
102
103
  messagesSynchedTo: 0n,
103
104
  blockBodiesSynchedTo: 0n,
105
+ provenLogsSynchedTo: 0n,
104
106
  } satisfies ArchiverL1SynchPoint);
105
107
  });
106
108
 
107
109
  it('returns the L1 block number in which the most recent L2 block was published', async () => {
108
110
  await store.addBlocks(blocks);
109
111
  await expect(store.getSynchPoint()).resolves.toEqual({
110
- blocksSynchedTo: blocks.lastProcessedL1BlockNumber,
112
+ blocksSynchedTo: 19n,
111
113
  messagesSynchedTo: 0n,
112
114
  blockBodiesSynchedTo: 0n,
115
+ provenLogsSynchedTo: 0n,
113
116
  } satisfies ArchiverL1SynchPoint);
114
117
  });
115
118
 
@@ -119,6 +122,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
119
122
  blocksSynchedTo: 0n,
120
123
  messagesSynchedTo: 0n,
121
124
  blockBodiesSynchedTo: blockBodies.lastProcessedL1BlockNumber,
125
+ provenLogsSynchedTo: 0n,
122
126
  } satisfies ArchiverL1SynchPoint);
123
127
  });
124
128
 
@@ -131,18 +135,30 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
131
135
  blocksSynchedTo: 0n,
132
136
  messagesSynchedTo: 1n,
133
137
  blockBodiesSynchedTo: 0n,
138
+ provenLogsSynchedTo: 0n,
139
+ } satisfies ArchiverL1SynchPoint);
140
+ });
141
+
142
+ it('returns the L1 block number that most recently logged a proven block', async () => {
143
+ await store.setProvenL2BlockNumber({ lastProcessedL1BlockNumber: 3n, retrievedData: 5 });
144
+ await expect(store.getSynchPoint()).resolves.toEqual({
145
+ blocksSynchedTo: 0n,
146
+ messagesSynchedTo: 0n,
147
+ blockBodiesSynchedTo: 0n,
148
+ provenLogsSynchedTo: 3n,
134
149
  } satisfies ArchiverL1SynchPoint);
135
150
  });
136
151
  });
137
152
 
138
153
  describe('addLogs', () => {
139
154
  it('adds encrypted & unencrypted logs', async () => {
155
+ const block = blocks[0].data;
140
156
  await expect(
141
157
  store.addLogs(
142
- blocks.retrievedData[0].body.noteEncryptedLogs,
143
- blocks.retrievedData[0].body.encryptedLogs,
144
- blocks.retrievedData[0].body.unencryptedLogs,
145
- blocks.retrievedData[0].number,
158
+ block.body.noteEncryptedLogs,
159
+ block.body.encryptedLogs,
160
+ block.body.unencryptedLogs,
161
+ block.number,
146
162
  ),
147
163
  ).resolves.toEqual(true);
148
164
  });
@@ -155,12 +171,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
155
171
  ])('getLogs (%s)', (_, logType) => {
156
172
  beforeEach(async () => {
157
173
  await Promise.all(
158
- blocks.retrievedData.map(block =>
174
+ blocks.map(block =>
159
175
  store.addLogs(
160
- block.body.noteEncryptedLogs,
161
- block.body.encryptedLogs,
162
- block.body.unencryptedLogs,
163
- block.number,
176
+ block.data.body.noteEncryptedLogs,
177
+ block.data.body.encryptedLogs,
178
+ block.data.body.unencryptedLogs,
179
+ block.data.number,
164
180
  ),
165
181
  ),
166
182
  );
@@ -170,12 +186,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
170
186
  const expectedLogs = getExpectedBlocks().map(block => {
171
187
  switch (logType) {
172
188
  case LogType.ENCRYPTED:
173
- return block.body.encryptedLogs;
189
+ return block.data.body.encryptedLogs;
174
190
  case LogType.NOTEENCRYPTED:
175
- return block.body.noteEncryptedLogs;
191
+ return block.data.body.noteEncryptedLogs;
176
192
  case LogType.UNENCRYPTED:
177
193
  default:
178
- return block.body.unencryptedLogs;
194
+ return block.data.body.unencryptedLogs;
179
195
  }
180
196
  });
181
197
  const actualLogs = await store.getLogs(from, limit, logType);
@@ -186,12 +202,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
186
202
  describe('getTxEffect', () => {
187
203
  beforeEach(async () => {
188
204
  await Promise.all(
189
- blocks.retrievedData.map(block =>
205
+ blocks.map(block =>
190
206
  store.addLogs(
191
- block.body.noteEncryptedLogs,
192
- block.body.encryptedLogs,
193
- block.body.unencryptedLogs,
194
- block.number,
207
+ block.data.body.noteEncryptedLogs,
208
+ block.data.body.encryptedLogs,
209
+ block.data.body.unencryptedLogs,
210
+ block.data.number,
195
211
  ),
196
212
  ),
197
213
  );
@@ -200,11 +216,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
200
216
  });
201
217
 
202
218
  it.each([
203
- () => blocks.retrievedData[0].body.txEffects[0],
204
- () => blocks.retrievedData[9].body.txEffects[3],
205
- () => blocks.retrievedData[3].body.txEffects[1],
206
- () => blocks.retrievedData[5].body.txEffects[2],
207
- () => blocks.retrievedData[1].body.txEffects[0],
219
+ () => blocks[0].data.body.txEffects[0],
220
+ () => blocks[9].data.body.txEffects[3],
221
+ () => blocks[3].data.body.txEffects[1],
222
+ () => blocks[5].data.body.txEffects[2],
223
+ () => blocks[1].data.body.txEffects[0],
208
224
  ])('retrieves a previously stored transaction', async getExpectedTx => {
209
225
  const expectedTx = getExpectedTx();
210
226
  const actualTx = await store.getTxEffect(expectedTx.txHash);
@@ -339,28 +355,24 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
339
355
  const numPublicFunctionCalls = 3;
340
356
  const numUnencryptedLogs = 2;
341
357
  const numBlocks = 10;
342
- let blocks: DataRetrieval<L2Block>;
358
+ let blocks: L1Published<L2Block>[];
343
359
 
344
360
  beforeEach(async () => {
345
- blocks = {
346
- lastProcessedL1BlockNumber: 4n,
347
- retrievedData: Array(numBlocks)
348
- .fill(0)
349
- .map((_, index: number) =>
350
- L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs),
351
- ),
352
- };
361
+ blocks = times(numBlocks, (index: number) => ({
362
+ data: L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs),
363
+ l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) },
364
+ }));
353
365
 
354
366
  await store.addBlocks(blocks);
355
367
  await store.addBlockBodies(blockBodies);
356
368
 
357
369
  await Promise.all(
358
- blocks.retrievedData.map(block =>
370
+ blocks.map(block =>
359
371
  store.addLogs(
360
- block.body.noteEncryptedLogs,
361
- block.body.encryptedLogs,
362
- block.body.unencryptedLogs,
363
- block.number,
372
+ block.data.body.noteEncryptedLogs,
373
+ block.data.body.encryptedLogs,
374
+ block.data.body.unencryptedLogs,
375
+ block.data.number,
364
376
  ),
365
377
  ),
366
378
  );
@@ -370,7 +382,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
370
382
  // get random tx
371
383
  const targetBlockIndex = randomInt(numBlocks);
372
384
  const targetTxIndex = randomInt(txsPerBlock);
373
- const targetTxHash = blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].txHash;
385
+ const targetTxHash = blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash;
374
386
 
375
387
  const response = await store.getUnencryptedLogs({ txHash: targetTxHash });
376
388
  const logs = response.logs;
@@ -414,7 +426,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
414
426
  const targetFunctionLogIndex = randomInt(numPublicFunctionCalls);
415
427
  const targetLogIndex = randomInt(numUnencryptedLogs);
416
428
  const targetContractAddress =
417
- blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[
429
+ blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[
418
430
  targetFunctionLogIndex
419
431
  ].logs[targetLogIndex].contractAddress;
420
432
 
@@ -7,27 +7,15 @@ import { RollupAbi } from '@aztec/l1-artifacts';
7
7
  import { type PublicClient, getAbiItem } from 'viem';
8
8
 
9
9
  import {
10
- getL2BlockProcessedLogs,
10
+ getL2BlockProposedLogs,
11
11
  getMessageSentLogs,
12
12
  getTxsPublishedLogs,
13
- processL2BlockProcessedLogs,
13
+ processL2BlockProposedLogs,
14
14
  processMessageSentLogs,
15
15
  processTxsPublishedLogs,
16
16
  } from './eth_log_handlers.js';
17
-
18
- /**
19
- * Data retrieved from logs
20
- */
21
- export type DataRetrieval<T> = {
22
- /**
23
- * Blocknumber of the last L1 block from which we obtained data.
24
- */
25
- lastProcessedL1BlockNumber: bigint;
26
- /**
27
- * The data returned.
28
- */
29
- retrievedData: T[];
30
- };
17
+ import { type DataRetrieval } from './structs/data_retrieval.js';
18
+ import { type L1PublishedData } from './structs/published.js';
31
19
 
32
20
  /**
33
21
  * Fetches new L2 block metadata (header, archive snapshot).
@@ -47,37 +35,37 @@ export async function retrieveBlockMetadataFromRollup(
47
35
  searchEndBlock: bigint,
48
36
  expectedNextL2BlockNum: bigint,
49
37
  logger: DebugLogger = createDebugLogger('aztec:archiver'),
50
- ): Promise<DataRetrieval<[Header, AppendOnlyTreeSnapshot]>> {
51
- const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = [];
38
+ ): Promise<[Header, AppendOnlyTreeSnapshot, L1PublishedData][]> {
39
+ const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, L1PublishedData][] = [];
52
40
  do {
53
41
  if (searchStartBlock > searchEndBlock) {
54
42
  break;
55
43
  }
56
- const l2BlockProcessedLogs = await getL2BlockProcessedLogs(
44
+ const L2BlockProposedLogs = await getL2BlockProposedLogs(
57
45
  publicClient,
58
46
  rollupAddress,
59
47
  searchStartBlock,
60
48
  searchEndBlock,
61
49
  );
62
- if (l2BlockProcessedLogs.length === 0) {
50
+ if (L2BlockProposedLogs.length === 0) {
63
51
  break;
64
52
  }
65
53
 
66
- const lastLog = l2BlockProcessedLogs[l2BlockProcessedLogs.length - 1];
54
+ const lastLog = L2BlockProposedLogs[L2BlockProposedLogs.length - 1];
67
55
  logger.debug(
68
- `Got L2 block processed logs for ${l2BlockProcessedLogs[0].blockNumber}-${lastLog.blockNumber} between ${searchStartBlock}-${searchEndBlock} L1 blocks`,
56
+ `Got L2 block processed logs for ${L2BlockProposedLogs[0].blockNumber}-${lastLog.blockNumber} between ${searchStartBlock}-${searchEndBlock} L1 blocks`,
69
57
  );
70
58
 
71
- const newBlockMetadata = await processL2BlockProcessedLogs(
59
+ const newBlockMetadata = await processL2BlockProposedLogs(
72
60
  publicClient,
73
61
  expectedNextL2BlockNum,
74
- l2BlockProcessedLogs,
62
+ L2BlockProposedLogs,
75
63
  );
76
64
  retrievedBlockMetadata.push(...newBlockMetadata);
77
65
  searchStartBlock = lastLog.blockNumber! + 1n;
78
66
  expectedNextL2BlockNum += BigInt(newBlockMetadata.length);
79
67
  } while (blockUntilSynced && searchStartBlock <= searchEndBlock);
80
- return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedBlockMetadata };
68
+ return retrievedBlockMetadata;
81
69
  }
82
70
 
83
71
  /**
@@ -16,6 +16,8 @@ import {
16
16
  slice,
17
17
  } from 'viem';
18
18
 
19
+ import { type L1PublishedData } from './structs/published.js';
20
+
19
21
  /**
20
22
  * Processes newly received MessageSent (L1 to L2) logs.
21
23
  * @param logs - MessageSent logs.
@@ -33,18 +35,18 @@ export function processMessageSentLogs(
33
35
  }
34
36
 
35
37
  /**
36
- * Processes newly received L2BlockProcessed logs.
38
+ * Processes newly received L2BlockProposed logs.
37
39
  * @param publicClient - The viem public client to use for transaction retrieval.
38
40
  * @param expectedL2BlockNumber - The next expected L2 block number.
39
- * @param logs - L2BlockProcessed logs.
41
+ * @param logs - L2BlockProposed logs.
40
42
  * @returns - An array of tuples representing block metadata including the header, archive tree snapshot.
41
43
  */
42
- export async function processL2BlockProcessedLogs(
44
+ export async function processL2BlockProposedLogs(
43
45
  publicClient: PublicClient,
44
46
  expectedL2BlockNumber: bigint,
45
- logs: Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProcessed'>[],
46
- ): Promise<[Header, AppendOnlyTreeSnapshot][]> {
47
- const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = [];
47
+ logs: Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProposed'>[],
48
+ ): Promise<[Header, AppendOnlyTreeSnapshot, L1PublishedData][]> {
49
+ const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, L1PublishedData][] = [];
48
50
  for (const log of logs) {
49
51
  const blockNum = log.args.blockNumber;
50
52
  if (blockNum !== expectedL2BlockNumber) {
@@ -57,13 +59,24 @@ export async function processL2BlockProcessedLogs(
57
59
  log.args.blockNumber,
58
60
  );
59
61
 
60
- retrievedBlockMetadata.push([header, archive]);
62
+ const l1: L1PublishedData = {
63
+ blockNumber: log.blockNumber,
64
+ blockHash: log.blockHash,
65
+ timestamp: await getL1BlockTime(publicClient, log.blockNumber),
66
+ };
67
+
68
+ retrievedBlockMetadata.push([header, archive, l1]);
61
69
  expectedL2BlockNumber++;
62
70
  }
63
71
 
64
72
  return retrievedBlockMetadata;
65
73
  }
66
74
 
75
+ export async function getL1BlockTime(publicClient: PublicClient, blockNumber: bigint): Promise<bigint> {
76
+ const block = await publicClient.getBlock({ blockNumber, includeTransactions: false });
77
+ return block.timestamp;
78
+ }
79
+
67
80
  export async function processTxsPublishedLogs(
68
81
  publicClient: PublicClient,
69
82
  logs: Log<bigint, number, false, undefined, true, typeof AvailabilityOracleAbi, 'TxsPublished'>[],
@@ -97,7 +110,7 @@ async function getBlockMetadataFromRollupTx(
97
110
  data,
98
111
  });
99
112
 
100
- if (!(functionName === 'process' || functionName === 'publishAndProcess')) {
113
+ if (!(functionName === 'propose')) {
101
114
  throw new Error(`Unexpected method called ${functionName}`);
102
115
  }
103
116
  const [headerHex, archiveRootHex, _] = args! as readonly [Hex, Hex, Hex];
@@ -122,7 +135,7 @@ async function getBlockMetadataFromRollupTx(
122
135
 
123
136
  /**
124
137
  * Gets block bodies from calldata of an L1 transaction, and deserializes them into Body objects.
125
- * @note Assumes that the block was published using `publishAndProcess` or `publish`.
138
+ * @note Assumes that the block was published using `propose` or `publish`.
126
139
  * TODO: Add retries and error management.
127
140
  * @param publicClient - The viem public client to use for transaction retrieval.
128
141
  * @param txHash - Hash of the tx that published it.
@@ -133,16 +146,16 @@ async function getBlockBodiesFromAvailabilityOracleTx(
133
146
  txHash: `0x${string}`,
134
147
  ): Promise<Body> {
135
148
  const { input: data } = await publicClient.getTransaction({ hash: txHash });
136
- const DATA_INDEX = [4, 3, 0];
137
149
 
138
150
  // @note Use `forge inspect Rollup methodIdentifiers to get this,
139
151
  // If using `forge sig` you will get an INVALID value for the case with a struct.
140
152
  // [
141
- // "publishAndProcess(bytes calldata _header,bytes32 _archive,bytes32 _blockHash,SignatureLib.Signature[] memory _signatures,bytes calldata _body)",
142
- // "publishAndProcess(bytes calldata _header,bytes32 _archive,bytes32 _blockHash,bytes calldata _body)",
153
+ // "propose(bytes,bytes32,bytes32,(bool,uint8,bytes32,bytes32)[],bytes)": "08978fe9",
154
+ // "propose(bytes,bytes32,bytes32,bytes)": "81e6f472",
143
155
  // "publish(bytes calldata _body)"
144
156
  // ]
145
- const SUPPORTED_SIGS = ['0x64450c6c', '0xde36c478', '0x7fd28346'];
157
+ const DATA_INDEX = [4, 3, 0];
158
+ const SUPPORTED_SIGS = ['0x08978fe9', '0x81e6f472', '0x7fd28346'];
146
159
 
147
160
  const signature = slice(data, 0, 4);
148
161
 
@@ -150,7 +163,7 @@ async function getBlockBodiesFromAvailabilityOracleTx(
150
163
  throw new Error(`Unexpected method called ${signature}`);
151
164
  }
152
165
 
153
- if (signature === SUPPORTED_SIGS[2]) {
166
+ if (signature === SUPPORTED_SIGS[SUPPORTED_SIGS.length - 1]) {
154
167
  const { args } = decodeFunctionData({
155
168
  abi: AvailabilityOracleAbi,
156
169
  data,
@@ -171,24 +184,24 @@ async function getBlockBodiesFromAvailabilityOracleTx(
171
184
  }
172
185
 
173
186
  /**
174
- * Gets relevant `L2BlockProcessed` logs from chain.
187
+ * Gets relevant `L2BlockProposed` logs from chain.
175
188
  * @param publicClient - The viem public client to use for transaction retrieval.
176
189
  * @param rollupAddress - The address of the rollup contract.
177
190
  * @param fromBlock - First block to get logs from (inclusive).
178
191
  * @param toBlock - Last block to get logs from (inclusive).
179
- * @returns An array of `L2BlockProcessed` logs.
192
+ * @returns An array of `L2BlockProposed` logs.
180
193
  */
181
- export function getL2BlockProcessedLogs(
194
+ export function getL2BlockProposedLogs(
182
195
  publicClient: PublicClient,
183
196
  rollupAddress: EthAddress,
184
197
  fromBlock: bigint,
185
198
  toBlock: bigint,
186
- ): Promise<Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProcessed'>[]> {
199
+ ): Promise<Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProposed'>[]> {
187
200
  return publicClient.getLogs({
188
201
  address: getAddress(rollupAddress.toString()),
189
202
  event: getAbiItem({
190
203
  abi: RollupAbi,
191
- name: 'L2BlockProcessed',
204
+ name: 'L2BlockProposed',
192
205
  }),
193
206
  fromBlock,
194
207
  toBlock: toBlock + 1n, // the toBlock argument in getLogs is exclusive
@@ -1,5 +1,6 @@
1
1
  export * from './archiver.js';
2
2
  export * from './config.js';
3
+ export { type L1Published, type L1PublishedData } from './structs/published.js';
3
4
  export { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_store.js';
4
5
  export { ArchiverDataStore } from './archiver_store.js';
5
6
  export { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js';
@@ -1,20 +1,27 @@
1
1
  import { type L2Block } from '@aztec/circuit-types';
2
+ import { createDebugLogger } from '@aztec/foundation/log';
2
3
  import {
3
4
  Attributes,
4
5
  type Gauge,
5
6
  type Histogram,
6
7
  Metrics,
7
8
  type TelemetryClient,
9
+ type UpDownCounter,
8
10
  ValueType,
9
11
  exponentialBuckets,
12
+ millisecondBuckets,
10
13
  } from '@aztec/telemetry-client';
11
14
 
12
15
  export class ArchiverInstrumentation {
13
16
  private blockHeight: Gauge;
14
17
  private blockSize: Gauge;
15
18
  private syncDuration: Histogram;
19
+ private proofsSubmittedDelay: Histogram;
20
+ private proofsSubmittedCount: UpDownCounter;
16
21
 
17
- constructor(telemetry: TelemetryClient) {
22
+ private log = createDebugLogger('aztec:archiver:instrumentation');
23
+
24
+ constructor(private telemetry: TelemetryClient) {
18
25
  const meter = telemetry.getMeter('Archiver');
19
26
  this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT, {
20
27
  description: 'The height of the latest block processed by the archiver',
@@ -34,6 +41,24 @@ export class ArchiverInstrumentation {
34
41
  explicitBucketBoundaries: exponentialBuckets(1, 16),
35
42
  },
36
43
  });
44
+
45
+ this.proofsSubmittedCount = meter.createUpDownCounter(Metrics.ARCHIVER_ROLLUP_PROOF_COUNT, {
46
+ description: 'Number of proofs submitted',
47
+ valueType: ValueType.INT,
48
+ });
49
+
50
+ this.proofsSubmittedDelay = meter.createHistogram(Metrics.ARCHIVER_ROLLUP_PROOF_DELAY, {
51
+ unit: 'ms',
52
+ description: 'Time after a block is submitted until its proof is published',
53
+ valueType: ValueType.INT,
54
+ advice: {
55
+ explicitBucketBoundaries: millisecondBuckets(1, 80), // 10ms -> ~3hs
56
+ },
57
+ });
58
+ }
59
+
60
+ public isEnabled(): boolean {
61
+ return this.telemetry.isEnabled();
37
62
  }
38
63
 
39
64
  public processNewBlocks(syncTimePerBlock: number, blocks: L2Block[]) {
@@ -47,4 +72,17 @@ export class ArchiverInstrumentation {
47
72
  public updateLastProvenBlock(blockNumber: number) {
48
73
  this.blockHeight.record(blockNumber, { [Attributes.STATUS]: 'proven' });
49
74
  }
75
+
76
+ public processProofsVerified(logs: { proverId: string; l2BlockNumber: bigint; delay: bigint }[]) {
77
+ for (const log of logs) {
78
+ this.log.debug('Recording proof verified event', log);
79
+ this.proofsSubmittedCount.add(1, {
80
+ [Attributes.ROLLUP_PROVER_ID]: log.proverId,
81
+ [Attributes.PROOF_TIMED_OUT]: log.delay > 20n * 60n * 1000n,
82
+ });
83
+ this.proofsSubmittedDelay.record(Math.ceil(Number(log.delay)), {
84
+ [Attributes.ROLLUP_PROVER_ID]: log.proverId,
85
+ });
86
+ }
87
+ }
50
88
  }
@@ -2,7 +2,7 @@ import { Body } from '@aztec/circuit-types';
2
2
  import { createDebugLogger } from '@aztec/foundation/log';
3
3
  import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store';
4
4
 
5
- import { type DataRetrieval } from '../data_retrieval.js';
5
+ import { type DataRetrieval } from '../structs/data_retrieval.js';
6
6
 
7
7
  export class BlockBodyStore {
8
8
  /** Map block body hash to block body */