@aztec/archiver 0.65.2 → 0.66.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dest/archiver/archiver.d.ts +18 -22
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +143 -99
  4. package/dest/archiver/archiver_store.d.ts +7 -8
  5. package/dest/archiver/archiver_store.d.ts.map +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +126 -150
  8. package/dest/archiver/config.d.ts +6 -12
  9. package/dest/archiver/config.d.ts.map +1 -1
  10. package/dest/archiver/config.js +6 -1
  11. package/dest/archiver/data_retrieval.d.ts +2 -3
  12. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  13. package/dest/archiver/data_retrieval.js +14 -15
  14. package/dest/archiver/instrumentation.d.ts +2 -7
  15. package/dest/archiver/instrumentation.d.ts.map +1 -1
  16. package/dest/archiver/instrumentation.js +3 -6
  17. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +7 -8
  18. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +7 -8
  20. package/dest/archiver/kv_archiver_store/log_store.d.ts +7 -8
  21. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  22. package/dest/archiver/kv_archiver_store/log_store.js +55 -95
  23. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +8 -10
  24. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  25. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +50 -57
  26. package/dest/index.d.ts +2 -2
  27. package/dest/index.d.ts.map +1 -1
  28. package/dest/index.js +3 -42
  29. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  30. package/dest/test/mock_l2_block_source.js +2 -2
  31. package/package.json +11 -13
  32. package/src/archiver/archiver.ts +199 -191
  33. package/src/archiver/archiver_store.ts +6 -13
  34. package/src/archiver/archiver_store_test_suite.ts +160 -186
  35. package/src/archiver/config.ts +12 -12
  36. package/src/archiver/data_retrieval.ts +12 -17
  37. package/src/archiver/instrumentation.ts +3 -5
  38. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +7 -14
  39. package/src/archiver/kv_archiver_store/log_store.ts +68 -118
  40. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +51 -65
  41. package/src/index.ts +5 -59
  42. package/src/test/mock_l2_block_source.ts +1 -2
  43. package/dest/archiver/epoch_helpers.d.ts +0 -20
  44. package/dest/archiver/epoch_helpers.d.ts.map +0 -1
  45. package/dest/archiver/epoch_helpers.js +0 -34
  46. package/src/archiver/epoch_helpers.ts +0 -54
@@ -1,12 +1,9 @@
1
1
  import {
2
- type FromLogType,
3
2
  type GetUnencryptedLogsResponse,
4
3
  type InBlock,
5
4
  type InboxLeaf,
6
5
  type L2Block,
7
- type L2BlockL2Logs,
8
6
  type LogFilter,
9
- type LogType,
10
7
  type TxEffect,
11
8
  type TxHash,
12
9
  type TxReceipt,
@@ -18,6 +15,7 @@ import {
18
15
  type ExecutablePrivateFunctionWithMembershipProof,
19
16
  type Fr,
20
17
  type Header,
18
+ type PrivateLog,
21
19
  type UnconstrainedFunctionWithMembershipProof,
22
20
  } from '@aztec/circuits.js';
23
21
  import { type ContractArtifact, type FunctionSelector } from '@aztec/foundation/abi';
@@ -142,17 +140,12 @@ export interface ArchiverDataStore {
142
140
  getTotalL1ToL2MessageCount(): Promise<bigint>;
143
141
 
144
142
  /**
145
- * Gets up to `limit` amount of logs starting from `from`.
146
- * @param from - Number of the L2 block to which corresponds the first logs to be returned.
147
- * @param limit - The number of logs to return.
148
- * @param logType - Specifies whether to return encrypted or unencrypted logs.
149
- * @returns The requested logs.
143
+ * Retrieves all private logs from up to `limit` blocks, starting from the block number `from`.
144
+ * @param from - The block number from which to begin retrieving logs.
145
+ * @param limit - The maximum number of blocks to retrieve logs from.
146
+ * @returns An array of private logs from the specified range of blocks.
150
147
  */
151
- getLogs<TLogType extends LogType>(
152
- from: number,
153
- limit: number,
154
- logType: TLogType,
155
- ): Promise<L2BlockL2Logs<FromLogType<TLogType>>[]>;
148
+ getPrivateLogs(from: number, limit: number): Promise<PrivateLog[]>;
156
149
 
157
150
  /**
158
151
  * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
@@ -1,4 +1,14 @@
1
- import { InboxLeaf, L2Block, LogId, LogType, TxHash, wrapInBlock } from '@aztec/circuit-types';
1
+ import {
2
+ InboxLeaf,
3
+ L2Block,
4
+ LogId,
5
+ TxEffect,
6
+ TxHash,
7
+ UnencryptedFunctionL2Logs,
8
+ UnencryptedL2Log,
9
+ UnencryptedTxL2Logs,
10
+ wrapInBlock,
11
+ } from '@aztec/circuit-types';
2
12
  import '@aztec/circuit-types/jest';
3
13
  import {
4
14
  AztecAddress,
@@ -8,6 +18,8 @@ import {
8
18
  INITIAL_L2_BLOCK_NUM,
9
19
  L1_TO_L2_MSG_SUBTREE_HEIGHT,
10
20
  MAX_NULLIFIERS_PER_TX,
21
+ PRIVATE_LOG_SIZE_IN_FIELDS,
22
+ PrivateLog,
11
23
  SerializableContractInstance,
12
24
  computePublicBytecodeCommitment,
13
25
  } from '@aztec/circuits.js';
@@ -16,7 +28,6 @@ import {
16
28
  makeExecutablePrivateFunctionWithMembershipProof,
17
29
  makeUnconstrainedFunctionWithMembershipProof,
18
30
  } from '@aztec/circuits.js/testing';
19
- import { toBufferBE } from '@aztec/foundation/bigint-buffer';
20
31
  import { times } from '@aztec/foundation/collection';
21
32
  import { randomBytes, randomInt } from '@aztec/foundation/crypto';
22
33
 
@@ -155,55 +166,41 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
155
166
  });
156
167
 
157
168
  describe('addLogs', () => {
158
- it('adds encrypted & unencrypted logs', async () => {
169
+ it('adds private & unencrypted logs', async () => {
159
170
  const block = blocks[0].data;
160
171
  await expect(store.addLogs([block])).resolves.toEqual(true);
161
172
  });
162
173
  });
163
174
 
164
175
  describe('deleteLogs', () => {
165
- it('deletes encrypted & unencrypted logs', async () => {
176
+ it('deletes private & unencrypted logs', async () => {
166
177
  const block = blocks[0].data;
167
178
  await store.addBlocks([blocks[0]]);
168
179
  await expect(store.addLogs([block])).resolves.toEqual(true);
169
180
 
170
- expect((await store.getLogs(1, 1, LogType.NOTEENCRYPTED))[0]).toEqual(block.body.noteEncryptedLogs);
171
- expect((await store.getLogs(1, 1, LogType.ENCRYPTED))[0]).toEqual(block.body.encryptedLogs);
172
- expect((await store.getLogs(1, 1, LogType.UNENCRYPTED))[0]).toEqual(block.body.unencryptedLogs);
181
+ expect((await store.getPrivateLogs(1, 1)).length).toEqual(
182
+ block.body.txEffects.map(txEffect => txEffect.privateLogs).flat().length,
183
+ );
184
+ expect((await store.getUnencryptedLogs({ fromBlock: 1 })).logs.length).toEqual(
185
+ block.body.unencryptedLogs.getTotalLogCount(),
186
+ );
173
187
 
174
188
  // This one is a pain for memory as we would never want to just delete memory in the middle.
175
189
  await store.deleteLogs([block]);
176
190
 
177
- expect((await store.getLogs(1, 1, LogType.NOTEENCRYPTED))[0]).toEqual(undefined);
178
- expect((await store.getLogs(1, 1, LogType.ENCRYPTED))[0]).toEqual(undefined);
179
- expect((await store.getLogs(1, 1, LogType.UNENCRYPTED))[0]).toEqual(undefined);
191
+ expect((await store.getPrivateLogs(1, 1)).length).toEqual(0);
192
+ expect((await store.getUnencryptedLogs({ fromBlock: 1 })).logs.length).toEqual(0);
180
193
  });
181
194
  });
182
195
 
183
- describe.each([
184
- ['note_encrypted', LogType.NOTEENCRYPTED],
185
- ['encrypted', LogType.ENCRYPTED],
186
- ['unencrypted', LogType.UNENCRYPTED],
187
- ])('getLogs (%s)', (_, logType) => {
188
- beforeEach(async () => {
189
- await store.addBlocks(blocks);
190
- await store.addLogs(blocks.map(b => b.data));
191
- });
196
+ describe('getPrivateLogs', () => {
197
+ it('gets added private logs', async () => {
198
+ const block = blocks[0].data;
199
+ await store.addBlocks([blocks[0]]);
200
+ await store.addLogs([block]);
192
201
 
193
- it.each(blockTests)('retrieves previously stored logs', async (from, limit, getExpectedBlocks) => {
194
- const expectedLogs = getExpectedBlocks().map(block => {
195
- switch (logType) {
196
- case LogType.ENCRYPTED:
197
- return block.data.body.encryptedLogs;
198
- case LogType.NOTEENCRYPTED:
199
- return block.data.body.noteEncryptedLogs;
200
- case LogType.UNENCRYPTED:
201
- default:
202
- return block.data.body.unencryptedLogs;
203
- }
204
- });
205
- const actualLogs = await store.getLogs(from, limit, logType);
206
- expect(actualLogs[0].txLogs[0]).toEqual(expectedLogs[0].txLogs[0]);
202
+ const privateLogs = await store.getPrivateLogs(1, 1);
203
+ expect(privateLogs).toEqual(block.body.txEffects.map(txEffect => txEffect.privateLogs).flat());
207
204
  });
208
205
  });
209
206
 
@@ -373,178 +370,155 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
373
370
  });
374
371
 
375
372
  describe('getLogsByTags', () => {
376
- const txsPerBlock = 4;
377
- const numPrivateFunctionCalls = 3;
378
- const numPublicFunctionCalls = 1;
379
- const numEncryptedLogsPerFn = 2;
380
- const numUnencryptedLogsPerFn = 1;
381
- const numBlocks = 10;
382
- let blocks: L1Published<L2Block>[];
383
- let encryptedLogTags: { [i: number]: { [j: number]: Buffer[] } } = {};
384
- let unencryptedLogTags: { [i: number]: { [j: number]: Buffer[] } } = {};
385
-
386
- beforeEach(async () => {
387
- blocks = times(numBlocks, (index: number) => ({
388
- data: L2Block.random(
389
- index + 1,
390
- txsPerBlock,
391
- numPrivateFunctionCalls,
392
- numPublicFunctionCalls,
393
- numEncryptedLogsPerFn,
394
- numUnencryptedLogsPerFn,
395
- ),
396
- l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) },
397
- }));
398
- // Last block has the note encrypted log tags of the first tx copied from the previous block
399
- blocks[numBlocks - 1].data.body.noteEncryptedLogs.txLogs[0].functionLogs.forEach((fnLogs, fnIndex) => {
400
- fnLogs.logs.forEach((log, logIndex) => {
401
- const previousLogData =
402
- blocks[numBlocks - 2].data.body.noteEncryptedLogs.txLogs[0].functionLogs[fnIndex].logs[logIndex].data;
403
- previousLogData.copy(log.data, 0, 0, 32);
404
- });
405
- });
406
- // Last block has invalid tags in the second tx
407
- const tooBig = toBufferBE(Fr.MODULUS, 32);
408
- blocks[numBlocks - 1].data.body.noteEncryptedLogs.txLogs[1].functionLogs.forEach(fnLogs => {
409
- fnLogs.logs.forEach(log => {
410
- tooBig.copy(log.data, 0, 0, 32);
411
- });
412
- });
373
+ const numBlocks = 3;
374
+ const numTxsPerBlock = 4;
375
+ const numPrivateLogsPerTx = 3;
376
+ const numUnencryptedLogsPerTx = 2;
413
377
 
414
- await store.addBlocks(blocks);
415
- await store.addLogs(blocks.map(b => b.data));
378
+ let blocks: L1Published<L2Block>[];
416
379
 
417
- encryptedLogTags = {};
418
- unencryptedLogTags = {};
419
- blocks.forEach((b, blockIndex) => {
420
- if (!encryptedLogTags[blockIndex]) {
421
- encryptedLogTags[blockIndex] = {};
422
- }
423
- if (!unencryptedLogTags[blockIndex]) {
424
- unencryptedLogTags[blockIndex] = {};
425
- }
426
- b.data.body.noteEncryptedLogs.txLogs.forEach((txLogs, txIndex) => {
427
- if (!encryptedLogTags[blockIndex][txIndex]) {
428
- encryptedLogTags[blockIndex][txIndex] = [];
429
- }
430
- encryptedLogTags[blockIndex][txIndex].push(...txLogs.unrollLogs().map(log => log.data.subarray(0, 32)));
431
- });
432
- b.data.body.unencryptedLogs.txLogs.forEach((txLogs, txIndex) => {
433
- if (!unencryptedLogTags[blockIndex][txIndex]) {
434
- unencryptedLogTags[blockIndex][txIndex] = [];
435
- }
436
- unencryptedLogTags[blockIndex][txIndex].push(...txLogs.unrollLogs().map(log => log.data.subarray(0, 32)));
437
- });
438
- });
439
- });
380
+ const makeTag = (blockNumber: number, txIndex: number, logIndex: number, isPublic = false) =>
381
+ new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * (isPublic ? 123 : 1));
440
382
 
441
- it('is possible to batch request encrypted logs of a tx via tags', async () => {
442
- // get random tx from any block that's not the last one
443
- const targetBlockIndex = randomInt(numBlocks - 2);
444
- const targetTxIndex = randomInt(txsPerBlock);
383
+ const makePrivateLog = (tag: Fr) =>
384
+ PrivateLog.fromFields([tag, ...times(PRIVATE_LOG_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i))]);
445
385
 
446
- const logsByTags = await store.getLogsByTags(
447
- encryptedLogTags[targetBlockIndex][targetTxIndex].map(buffer => new Fr(buffer)),
448
- );
386
+ const makePublicLog = (tag: Fr) =>
387
+ Buffer.concat([tag.toBuffer(), ...times(tag.toNumber() % 60, i => new Fr(tag.toNumber() + i).toBuffer())]);
449
388
 
450
- const expectedResponseSize = numPrivateFunctionCalls * numEncryptedLogsPerFn;
451
- expect(logsByTags.length).toEqual(expectedResponseSize);
452
-
453
- logsByTags.forEach((logsByTag, logIndex) => {
454
- expect(logsByTag).toHaveLength(1);
455
- const [scopedLog] = logsByTag;
456
- expect(scopedLog.txHash).toEqual(blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash);
457
- expect(scopedLog.logData).toEqual(
458
- blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex].data,
459
- );
389
+ const mockPrivateLogs = (blockNumber: number, txIndex: number) => {
390
+ return times(numPrivateLogsPerTx, (logIndex: number) => {
391
+ const tag = makeTag(blockNumber, txIndex, logIndex);
392
+ return makePrivateLog(tag);
460
393
  });
461
- });
462
-
463
- // TODO: Allow this test when #9835 is fixed and tags can be correctly decoded
464
- it.skip('is possible to batch request all logs (encrypted and unencrypted) of a tx via tags', async () => {
465
- // get random tx from any block that's not the last one
466
- const targetBlockIndex = randomInt(numBlocks - 2);
467
- const targetTxIndex = randomInt(txsPerBlock);
468
-
469
- const logsByTags = await store.getLogsByTags(
470
- encryptedLogTags[targetBlockIndex][targetTxIndex]
471
- .concat(unencryptedLogTags[targetBlockIndex][targetTxIndex])
472
- .map(buffer => new Fr(buffer)),
473
- );
394
+ };
474
395
 
475
- const expectedResponseSize =
476
- numPrivateFunctionCalls * numEncryptedLogsPerFn + numPublicFunctionCalls * numUnencryptedLogsPerFn;
477
- expect(logsByTags.length).toEqual(expectedResponseSize);
478
-
479
- const encryptedLogsByTags = logsByTags.slice(0, numPrivateFunctionCalls * numEncryptedLogsPerFn);
480
- const unencryptedLogsByTags = logsByTags.slice(numPrivateFunctionCalls * numEncryptedLogsPerFn);
481
- encryptedLogsByTags.forEach((logsByTag, logIndex) => {
482
- expect(logsByTag).toHaveLength(1);
483
- const [scopedLog] = logsByTag;
484
- expect(scopedLog.txHash).toEqual(blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash);
485
- expect(scopedLog.logData).toEqual(
486
- blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex].data,
487
- );
396
+ const mockUnencryptedLogs = (blockNumber: number, txIndex: number) => {
397
+ const logs = times(numUnencryptedLogsPerTx, (logIndex: number) => {
398
+ const tag = makeTag(blockNumber, txIndex, logIndex, /* isPublic */ true);
399
+ const log = makePublicLog(tag);
400
+ return new UnencryptedL2Log(AztecAddress.fromNumber(txIndex), log);
488
401
  });
489
- unencryptedLogsByTags.forEach((logsByTag, logIndex) => {
490
- expect(logsByTag).toHaveLength(1);
491
- const [scopedLog] = logsByTag;
492
- expect(scopedLog.logData).toEqual(
493
- blocks[targetBlockIndex].data.body.unencryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex].data,
494
- );
402
+ return new UnencryptedTxL2Logs([new UnencryptedFunctionL2Logs(logs)]);
403
+ };
404
+
405
+ const mockBlockWithLogs = (blockNumber: number): L1Published<L2Block> => {
406
+ const block = L2Block.random(blockNumber);
407
+ block.header.globalVariables.blockNumber = new Fr(blockNumber);
408
+
409
+ block.body.txEffects = times(numTxsPerBlock, (txIndex: number) => {
410
+ const txEffect = TxEffect.random();
411
+ txEffect.privateLogs = mockPrivateLogs(blockNumber, txIndex);
412
+ txEffect.unencryptedLogs = mockUnencryptedLogs(blockNumber, txIndex);
413
+ return txEffect;
495
414
  });
496
- });
497
415
 
498
- it('is possible to batch request logs of different blocks via tags', async () => {
499
- // get first tx of first block and second tx of second block
500
- const logsByTags = await store.getLogsByTags(
501
- [...encryptedLogTags[0][0], ...encryptedLogTags[1][1]].map(buffer => new Fr(buffer)),
502
- );
416
+ return {
417
+ data: block,
418
+ l1: { blockNumber: BigInt(blockNumber), blockHash: `0x${blockNumber}`, timestamp: BigInt(blockNumber) },
419
+ };
420
+ };
503
421
 
504
- const expectedResponseSize = 2 * numPrivateFunctionCalls * numEncryptedLogsPerFn;
505
- expect(logsByTags.length).toEqual(expectedResponseSize);
422
+ beforeEach(async () => {
423
+ blocks = times(numBlocks, (index: number) => mockBlockWithLogs(index));
506
424
 
507
- logsByTags.forEach(logsByTag => expect(logsByTag).toHaveLength(1));
425
+ await store.addBlocks(blocks);
426
+ await store.addLogs(blocks.map(b => b.data));
508
427
  });
509
428
 
510
- it('is possible to batch request logs that have the same tag but different content', async () => {
511
- // get first tx of last block
512
- const logsByTags = await store.getLogsByTags(encryptedLogTags[numBlocks - 1][0].map(buffer => new Fr(buffer)));
429
+ it('is possible to batch request private logs via tags', async () => {
430
+ const tags = [makeTag(1, 1, 2), makeTag(0, 2, 0)];
431
+
432
+ const logsByTags = await store.getLogsByTags(tags);
433
+
434
+ expect(logsByTags).toEqual([
435
+ [
436
+ expect.objectContaining({
437
+ blockNumber: 1,
438
+ logData: makePrivateLog(tags[0]).toBuffer(),
439
+ isFromPublic: false,
440
+ }),
441
+ ],
442
+ [
443
+ expect.objectContaining({
444
+ blockNumber: 0,
445
+ logData: makePrivateLog(tags[1]).toBuffer(),
446
+ isFromPublic: false,
447
+ }),
448
+ ],
449
+ ]);
450
+ });
513
451
 
514
- const expectedResponseSize = numPrivateFunctionCalls * numEncryptedLogsPerFn;
515
- expect(logsByTags.length).toEqual(expectedResponseSize);
452
+ // TODO: Allow this test when #9835 is fixed and tags can be correctly decoded
453
+ it.skip('is possible to batch request all logs (private and unencrypted) via tags', async () => {
454
+ // Tag(0, 0, 0) is shared with the first private log and the first unencrypted log.
455
+ const tags = [makeTag(0, 0, 0)];
456
+
457
+ const logsByTags = await store.getLogsByTags(tags);
458
+
459
+ expect(logsByTags).toEqual([
460
+ [
461
+ expect.objectContaining({
462
+ blockNumber: 0,
463
+ logData: makePrivateLog(tags[0]).toBuffer(),
464
+ isFromPublic: false,
465
+ }),
466
+ expect.objectContaining({
467
+ blockNumber: 0,
468
+ logData: makePublicLog(tags[0]),
469
+ isFromPublic: true,
470
+ }),
471
+ ],
472
+ ]);
473
+ });
516
474
 
517
- logsByTags.forEach(logsByTag => {
518
- expect(logsByTag).toHaveLength(2);
519
- const [tag0, tag1] = logsByTag.map(scopedLog => new Fr(scopedLog.logData.subarray(0, 32)));
520
- expect(tag0).toEqual(tag1);
521
- });
475
+ it('is possible to batch request logs that have the same tag but different content', async () => {
476
+ const tags = [makeTag(1, 2, 1)];
477
+
478
+ // Create a block containing logs that have the same tag as the blocks before.
479
+ const newBlockNumber = numBlocks;
480
+ const newBlock = mockBlockWithLogs(newBlockNumber);
481
+ const newLog = newBlock.data.body.txEffects[1].privateLogs[1];
482
+ newLog.fields[0] = tags[0];
483
+ newBlock.data.body.txEffects[1].privateLogs[1] = newLog;
484
+ await store.addBlocks([newBlock]);
485
+ await store.addLogs([newBlock.data]);
486
+
487
+ const logsByTags = await store.getLogsByTags(tags);
488
+
489
+ expect(logsByTags).toEqual([
490
+ [
491
+ expect.objectContaining({
492
+ blockNumber: 1,
493
+ logData: makePrivateLog(tags[0]).toBuffer(),
494
+ isFromPublic: false,
495
+ }),
496
+ expect.objectContaining({
497
+ blockNumber: newBlockNumber,
498
+ logData: newLog.toBuffer(),
499
+ isFromPublic: false,
500
+ }),
501
+ ],
502
+ ]);
522
503
  });
523
504
 
524
505
  it('is possible to request logs for non-existing tags and determine their position', async () => {
525
- // get random tx from any block that's not the last one
526
- const targetBlockIndex = randomInt(numBlocks - 2);
527
- const targetTxIndex = randomInt(txsPerBlock);
528
-
529
- const logsByTags = await store.getLogsByTags([
530
- Fr.random(),
531
- ...encryptedLogTags[targetBlockIndex][targetTxIndex].slice(1).map(buffer => new Fr(buffer)),
506
+ const tags = [makeTag(99, 88, 77), makeTag(1, 1, 1)];
507
+
508
+ const logsByTags = await store.getLogsByTags(tags);
509
+
510
+ expect(logsByTags).toEqual([
511
+ [
512
+ // No logs for the first tag.
513
+ ],
514
+ [
515
+ expect.objectContaining({
516
+ blockNumber: 1,
517
+ logData: makePrivateLog(tags[1]).toBuffer(),
518
+ isFromPublic: false,
519
+ }),
520
+ ],
532
521
  ]);
533
-
534
- const expectedResponseSize = numPrivateFunctionCalls * numEncryptedLogsPerFn;
535
- expect(logsByTags.length).toEqual(expectedResponseSize);
536
-
537
- const [emptyLogsByTag, ...populatedLogsByTags] = logsByTags;
538
- expect(emptyLogsByTag).toHaveLength(0);
539
-
540
- populatedLogsByTags.forEach((logsByTag, logIndex) => {
541
- expect(logsByTag).toHaveLength(1);
542
- const [scopedLog] = logsByTag;
543
- expect(scopedLog.txHash).toEqual(blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash);
544
- expect(scopedLog.logData).toEqual(
545
- blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex + 1].data,
546
- );
547
- });
548
522
  });
549
523
  });
550
524
 
@@ -557,7 +531,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
557
531
 
558
532
  beforeEach(async () => {
559
533
  blocks = times(numBlocks, (index: number) => ({
560
- data: L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs),
534
+ data: L2Block.random(index + 1, txsPerBlock, numPublicFunctionCalls, numUnencryptedLogs),
561
535
  l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) },
562
536
  }));
563
537
 
@@ -18,24 +18,19 @@ import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } fr
18
18
  * The archiver configuration.
19
19
  */
20
20
  export type ArchiverConfig = {
21
- /**
22
- * URL for an archiver service. If set, will return an archiver client as opposed to starting a new one.
23
- */
21
+ /** URL for an archiver service. If set, will return an archiver client as opposed to starting a new one. */
24
22
  archiverUrl?: string;
25
23
 
26
- /**
27
- * The polling interval in ms for retrieving new L2 blocks and encrypted logs.
28
- */
24
+ /** The polling interval in ms for retrieving new L2 blocks and encrypted logs. */
29
25
  archiverPollingIntervalMS?: number;
30
26
 
31
- /**
32
- * The polling interval viem uses in ms
33
- */
27
+ /** The number of L2 blocks the archiver will attempt to download at a time. */
28
+ archiverBatchSize?: number;
29
+
30
+ /** The polling interval viem uses in ms */
34
31
  viemPollingIntervalMS?: number;
35
32
 
36
- /**
37
- * The deployed L1 contract addresses
38
- */
33
+ /** The deployed L1 contract addresses */
39
34
  l1Contracts: L1ContractAddresses;
40
35
 
41
36
  /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */
@@ -54,6 +49,11 @@ export const archiverConfigMappings: ConfigMappingsType<ArchiverConfig> = {
54
49
  description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.',
55
50
  ...numberConfigHelper(1_000),
56
51
  },
52
+ archiverBatchSize: {
53
+ env: 'ARCHIVER_BATCH_SIZE',
54
+ description: 'The number of L2 blocks the archiver will attempt to download at a time.',
55
+ ...numberConfigHelper(100),
56
+ },
57
57
  maxLogs: {
58
58
  env: 'ARCHIVER_MAX_LOGS',
59
59
  description: 'The max number of logs that can be obtained in 1 "getUnencryptedLogs" call.',
@@ -1,5 +1,6 @@
1
1
  import { Body, InboxLeaf, L2Block } from '@aztec/circuit-types';
2
2
  import { AppendOnlyTreeSnapshot, Fr, Header, Proof } from '@aztec/circuits.js';
3
+ import { asyncPool } from '@aztec/foundation/async-pool';
3
4
  import { type EthAddress } from '@aztec/foundation/eth-address';
4
5
  import { type ViemSignature } from '@aztec/foundation/eth-signature';
5
6
  import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log';
@@ -25,16 +26,14 @@ import { type L1Published, type L1PublishedData } from './structs/published.js';
25
26
  * Fetches new L2 blocks.
26
27
  * @param publicClient - The viem public client to use for transaction retrieval.
27
28
  * @param rollupAddress - The address of the rollup contract.
28
- * @param blockUntilSynced - If true, blocks until the archiver has fully synced.
29
29
  * @param searchStartBlock - The block number to use for starting the search.
30
30
  * @param searchEndBlock - The highest block number that we should search up to.
31
31
  * @param expectedNextL2BlockNum - The next L2 block number that we expect to find.
32
32
  * @returns An array of block; as well as the next eth block to search from.
33
33
  */
34
- export async function retrieveBlockFromRollup(
34
+ export async function retrieveBlocksFromRollup(
35
35
  rollup: GetContractReturnType<typeof RollupAbi, PublicClient<HttpTransport, Chain>>,
36
36
  publicClient: PublicClient,
37
- blockUntilSynced: boolean,
38
37
  searchStartBlock: bigint,
39
38
  searchEndBlock: bigint,
40
39
  logger: DebugLogger = createDebugLogger('aztec:archiver'),
@@ -58,13 +57,13 @@ export async function retrieveBlockFromRollup(
58
57
 
59
58
  const lastLog = l2BlockProposedLogs[l2BlockProposedLogs.length - 1];
60
59
  logger.debug(
61
- `Got L2 block processed logs for ${l2BlockProposedLogs[0].blockNumber}-${lastLog.blockNumber} between ${searchStartBlock}-${searchEndBlock} L1 blocks`,
60
+ `Got ${l2BlockProposedLogs.length} L2 block processed logs for L2 blocks ${l2BlockProposedLogs[0].args.blockNumber}-${lastLog.args.blockNumber} between L1 blocks ${searchStartBlock}-${searchEndBlock}`,
62
61
  );
63
62
 
64
63
  const newBlocks = await processL2BlockProposedLogs(rollup, publicClient, l2BlockProposedLogs, logger);
65
64
  retrievedBlocks.push(...newBlocks);
66
65
  searchStartBlock = lastLog.blockNumber! + 1n;
67
- } while (blockUntilSynced && searchStartBlock <= searchEndBlock);
66
+ } while (searchStartBlock <= searchEndBlock);
68
67
  return retrievedBlocks;
69
68
  }
70
69
 
@@ -82,14 +81,13 @@ export async function processL2BlockProposedLogs(
82
81
  logger: DebugLogger,
83
82
  ): Promise<L1Published<L2Block>[]> {
84
83
  const retrievedBlocks: L1Published<L2Block>[] = [];
85
- for (const log of logs) {
84
+ await asyncPool(10, logs, async log => {
86
85
  const l2BlockNumber = log.args.blockNumber!;
87
86
  const archive = log.args.archive!;
88
87
  const archiveFromChain = await rollup.read.archiveAt([l2BlockNumber]);
89
88
 
90
89
  // The value from the event and contract will match only if the block is in the chain.
91
90
  if (archive === archiveFromChain) {
92
- // TODO: Fetch blocks from calldata in parallel
93
91
  const block = await getBlockFromRollupTx(publicClient, log.transactionHash!, l2BlockNumber);
94
92
 
95
93
  const l1: L1PublishedData = {
@@ -100,11 +98,12 @@ export async function processL2BlockProposedLogs(
100
98
 
101
99
  retrievedBlocks.push({ data: block, l1 });
102
100
  } else {
103
- logger.warn(
104
- `Archive mismatch matching, ignoring block ${l2BlockNumber} with archive: ${archive}, expected ${archiveFromChain}`,
105
- );
101
+ logger.warn(`Ignoring L2 block ${l2BlockNumber} due to archive root mismatch`, {
102
+ actual: archive,
103
+ expected: archiveFromChain,
104
+ });
106
105
  }
107
- }
106
+ });
108
107
 
109
108
  return retrievedBlocks;
110
109
  }
@@ -129,10 +128,7 @@ async function getBlockFromRollupTx(
129
128
  l2BlockNum: bigint,
130
129
  ): Promise<L2Block> {
131
130
  const { input: data } = await publicClient.getTransaction({ hash: txHash });
132
- const { functionName, args } = decodeFunctionData({
133
- abi: RollupAbi,
134
- data,
135
- });
131
+ const { functionName, args } = decodeFunctionData({ abi: RollupAbi, data });
136
132
 
137
133
  const allowedMethods = ['propose', 'proposeAndClaim'];
138
134
 
@@ -184,7 +180,6 @@ async function getBlockFromRollupTx(
184
180
  */
185
181
  export async function retrieveL1ToL2Messages(
186
182
  inbox: GetContractReturnType<typeof InboxAbi, PublicClient<HttpTransport, Chain>>,
187
- blockUntilSynced: boolean,
188
183
  searchStartBlock: bigint,
189
184
  searchEndBlock: bigint,
190
185
  ): Promise<DataRetrieval<InboxLeaf>> {
@@ -213,7 +208,7 @@ export async function retrieveL1ToL2Messages(
213
208
 
214
209
  // handles the case when there are no new messages:
215
210
  searchStartBlock = (messageSentLogs.findLast(msgLog => !!msgLog)?.blockNumber || searchStartBlock) + 1n;
216
- } while (blockUntilSynced && searchStartBlock <= searchEndBlock);
211
+ } while (searchStartBlock <= searchEndBlock);
217
212
  return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedL1ToL2Messages };
218
213
  }
219
214
 
@@ -5,6 +5,7 @@ import {
5
5
  type Gauge,
6
6
  type Histogram,
7
7
  LmdbMetrics,
8
+ type LmdbStatsCallback,
8
9
  Metrics,
9
10
  type TelemetryClient,
10
11
  type UpDownCounter,
@@ -23,7 +24,7 @@ export class ArchiverInstrumentation {
23
24
 
24
25
  private log = createDebugLogger('aztec:archiver:instrumentation');
25
26
 
26
- constructor(private telemetry: TelemetryClient) {
27
+ constructor(private telemetry: TelemetryClient, lmdbStats?: LmdbStatsCallback) {
27
28
  const meter = telemetry.getMeter('Archiver');
28
29
  this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT, {
29
30
  description: 'The height of the latest block processed by the archiver',
@@ -72,13 +73,10 @@ export class ArchiverInstrumentation {
72
73
  name: Metrics.ARCHIVER_DB_NUM_ITEMS,
73
74
  description: 'Num items in the archiver database',
74
75
  },
76
+ lmdbStats,
75
77
  );
76
78
  }
77
79
 
78
- public recordDBMetrics(metrics: { mappingSize: number; numItems: number; actualSize: number }) {
79
- this.dbMetrics.recordDBMetrics(metrics);
80
- }
81
-
82
80
  public isEnabled(): boolean {
83
81
  return this.telemetry.isEnabled();
84
82
  }