@aztec/archiver 0.70.0 → 0.72.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dest/archiver/archiver.d.ts +4 -4
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +19 -13
  4. package/dest/archiver/archiver_store.d.ts +4 -4
  5. package/dest/archiver/archiver_store.d.ts.map +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +97 -59
  8. package/dest/archiver/config.d.ts +1 -1
  9. package/dest/archiver/config.d.ts.map +1 -1
  10. package/dest/archiver/config.js +3 -3
  11. package/dest/archiver/data_retrieval.js +2 -2
  12. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +4 -4
  13. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  14. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +4 -4
  15. package/dest/archiver/kv_archiver_store/log_store.d.ts +5 -5
  16. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  17. package/dest/archiver/kv_archiver_store/log_store.js +83 -51
  18. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +7 -7
  19. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  20. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +55 -56
  21. package/dest/factory.d.ts +2 -2
  22. package/dest/factory.d.ts.map +1 -1
  23. package/dest/factory.js +4 -4
  24. package/dest/rpc/index.d.ts +1 -1
  25. package/dest/rpc/index.d.ts.map +1 -1
  26. package/dest/rpc/index.js +5 -5
  27. package/dest/test/mock_archiver.d.ts +1 -1
  28. package/dest/test/mock_archiver.d.ts.map +1 -1
  29. package/dest/test/mock_archiver.js +2 -1
  30. package/dest/test/mock_l2_block_source.d.ts +1 -1
  31. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  32. package/dest/test/mock_l2_block_source.js +3 -3
  33. package/package.json +13 -13
  34. package/src/archiver/archiver.ts +24 -15
  35. package/src/archiver/archiver_store.ts +5 -4
  36. package/src/archiver/archiver_store_test_suite.ts +103 -70
  37. package/src/archiver/config.ts +3 -3
  38. package/src/archiver/data_retrieval.ts +1 -1
  39. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +6 -5
  40. package/src/archiver/kv_archiver_store/log_store.ts +108 -61
  41. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +62 -59
  42. package/src/factory.ts +3 -4
  43. package/src/rpc/index.ts +4 -4
  44. package/src/test/mock_archiver.ts +1 -0
  45. package/src/test/mock_l2_block_source.ts +2 -2
@@ -1,6 +1,7 @@
1
1
  import { type BlobSinkClientInterface } from '@aztec/blob-sink/client';
2
2
  import {
3
- type GetUnencryptedLogsResponse,
3
+ type GetContractClassLogsResponse,
4
+ type GetPublicLogsResponse,
4
5
  type InBlock,
5
6
  type InboxLeaf,
6
7
  type L1RollupConstants,
@@ -381,13 +382,21 @@ export class Archiver implements ArchiveSource, Traceable {
381
382
  localBlockForDestinationProvenBlockNumber &&
382
383
  provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString()
383
384
  ) {
384
- await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
385
- // if we are here then we must have a valid proven epoch number
386
- await this.store.setProvenL2EpochNumber(Number(provenEpochNumber));
387
- this.log.info(`Updated proven chain to block ${provenBlockNumber} (epoch ${provenEpochNumber})`, {
388
- provenBlockNumber,
389
- provenEpochNumber,
390
- });
385
+ const [localProvenEpochNumber, localProvenBlockNumber] = await Promise.all([
386
+ this.store.getProvenL2EpochNumber(),
387
+ this.store.getProvenL2BlockNumber(),
388
+ ]);
389
+ if (
390
+ localProvenEpochNumber !== Number(provenEpochNumber) ||
391
+ localProvenBlockNumber !== Number(provenBlockNumber)
392
+ ) {
393
+ await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
394
+ await this.store.setProvenL2EpochNumber(Number(provenEpochNumber));
395
+ this.log.info(`Updated proven chain to block ${provenBlockNumber} (epoch ${provenEpochNumber})`, {
396
+ provenBlockNumber,
397
+ provenEpochNumber,
398
+ });
399
+ }
391
400
  }
392
401
  this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
393
402
  };
@@ -709,12 +718,12 @@ export class Archiver implements ArchiveSource, Traceable {
709
718
  }
710
719
 
711
720
  /**
712
- * Gets unencrypted logs based on the provided filter.
721
+ * Gets public logs based on the provided filter.
713
722
  * @param filter - The filter to apply to the logs.
714
723
  * @returns The requested logs.
715
724
  */
716
- getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
717
- return this.store.getUnencryptedLogs(filter);
725
+ getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
726
+ return this.store.getPublicLogs(filter);
718
727
  }
719
728
 
720
729
  /**
@@ -722,7 +731,7 @@ export class Archiver implements ArchiveSource, Traceable {
722
731
  * @param filter - The filter to apply to the logs.
723
732
  * @returns The requested logs.
724
733
  */
725
- getContractClassLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
734
+ getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
726
735
  return this.store.getContractClassLogs(filter);
727
736
  }
728
737
 
@@ -1059,10 +1068,10 @@ class ArchiverStoreHelper
1059
1068
  findNullifiersIndexesWithBlock(blockNumber: number, nullifiers: Fr[]): Promise<(InBlock<bigint> | undefined)[]> {
1060
1069
  return this.store.findNullifiersIndexesWithBlock(blockNumber, nullifiers);
1061
1070
  }
1062
- getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
1063
- return this.store.getUnencryptedLogs(filter);
1071
+ getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
1072
+ return this.store.getPublicLogs(filter);
1064
1073
  }
1065
- getContractClassLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
1074
+ getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
1066
1075
  return this.store.getContractClassLogs(filter);
1067
1076
  }
1068
1077
  getSynchedL2BlockNumber(): Promise<number> {
@@ -1,5 +1,6 @@
1
1
  import {
2
- type GetUnencryptedLogsResponse,
2
+ type GetContractClassLogsResponse,
3
+ type GetPublicLogsResponse,
3
4
  type InBlock,
4
5
  type InboxLeaf,
5
6
  type L2Block,
@@ -156,18 +157,18 @@ export interface ArchiverDataStore {
156
157
  getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]>;
157
158
 
158
159
  /**
159
- * Gets unencrypted logs based on the provided filter.
160
+ * Gets public logs based on the provided filter.
160
161
  * @param filter - The filter to apply to the logs.
161
162
  * @returns The requested logs.
162
163
  */
163
- getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse>;
164
+ getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse>;
164
165
 
165
166
  /**
166
167
  * Gets contract class logs based on the provided filter.
167
168
  * @param filter - The filter to apply to the logs.
168
169
  * @returns The requested logs.
169
170
  */
170
- getContractClassLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse>;
171
+ getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse>;
171
172
 
172
173
  /**
173
174
  * Gets the number of the latest L2 block processed.
@@ -1,14 +1,4 @@
1
- import {
2
- InboxLeaf,
3
- L2Block,
4
- LogId,
5
- TxEffect,
6
- TxHash,
7
- UnencryptedFunctionL2Logs,
8
- UnencryptedL2Log,
9
- UnencryptedTxL2Logs,
10
- wrapInBlock,
11
- } from '@aztec/circuit-types';
1
+ import { InboxLeaf, L2Block, LogId, TxEffect, TxHash, wrapInBlock } from '@aztec/circuit-types';
12
2
  import '@aztec/circuit-types/jest';
13
3
  import {
14
4
  AztecAddress,
@@ -19,7 +9,9 @@ import {
19
9
  L1_TO_L2_MSG_SUBTREE_HEIGHT,
20
10
  MAX_NULLIFIERS_PER_TX,
21
11
  PRIVATE_LOG_SIZE_IN_FIELDS,
12
+ PUBLIC_LOG_DATA_SIZE_IN_FIELDS,
22
13
  PrivateLog,
14
+ PublicLog,
23
15
  SerializableContractInstance,
24
16
  computePublicBytecodeCommitment,
25
17
  } from '@aztec/circuits.js';
@@ -28,7 +20,7 @@ import {
28
20
  makeExecutablePrivateFunctionWithMembershipProof,
29
21
  makeUnconstrainedFunctionWithMembershipProof,
30
22
  } from '@aztec/circuits.js/testing';
31
- import { times } from '@aztec/foundation/collection';
23
+ import { times, timesParallel } from '@aztec/foundation/collection';
32
24
  import { randomInt } from '@aztec/foundation/crypto';
33
25
 
34
26
  import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js';
@@ -59,9 +51,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
59
51
  },
60
52
  });
61
53
 
62
- beforeEach(() => {
54
+ beforeEach(async () => {
63
55
  store = getStore();
64
- blocks = times(10, i => makeL1Published(L2Block.random(i + 1), i + 10));
56
+ blocks = await timesParallel(10, async i => makeL1Published(await L2Block.random(i + 1), i + 10));
65
57
  });
66
58
 
67
59
  describe('addBlocks', () => {
@@ -89,7 +81,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
89
81
  });
90
82
 
91
83
  it('can unwind multiple empty blocks', async () => {
92
- const emptyBlocks = times(10, i => makeL1Published(L2Block.random(i + 1, 0), i + 10));
84
+ const emptyBlocks = await timesParallel(10, async i => makeL1Published(await L2Block.random(i + 1, 0), i + 10));
93
85
  await store.addBlocks(emptyBlocks);
94
86
  expect(await store.getSynchedL2BlockNumber()).toBe(10);
95
87
 
@@ -166,14 +158,14 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
166
158
  });
167
159
 
168
160
  describe('addLogs', () => {
169
- it('adds private & unencrypted logs', async () => {
161
+ it('adds private & public logs', async () => {
170
162
  const block = blocks[0].data;
171
163
  await expect(store.addLogs([block])).resolves.toEqual(true);
172
164
  });
173
165
  });
174
166
 
175
167
  describe('deleteLogs', () => {
176
- it('deletes private & unencrypted logs', async () => {
168
+ it('deletes private & public logs', async () => {
177
169
  const block = blocks[0].data;
178
170
  await store.addBlocks([blocks[0]]);
179
171
  await expect(store.addLogs([block])).resolves.toEqual(true);
@@ -181,15 +173,15 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
181
173
  expect((await store.getPrivateLogs(1, 1)).length).toEqual(
182
174
  block.body.txEffects.map(txEffect => txEffect.privateLogs).flat().length,
183
175
  );
184
- expect((await store.getUnencryptedLogs({ fromBlock: 1 })).logs.length).toEqual(
185
- block.body.unencryptedLogs.getTotalLogCount(),
176
+ expect((await store.getPublicLogs({ fromBlock: 1 })).logs.length).toEqual(
177
+ block.body.txEffects.map(txEffect => txEffect.publicLogs).flat().length,
186
178
  );
187
179
 
188
180
  // This one is a pain for memory as we would never want to just delete memory in the middle.
189
181
  await store.deleteLogs([block]);
190
182
 
191
183
  expect((await store.getPrivateLogs(1, 1)).length).toEqual(0);
192
- expect((await store.getUnencryptedLogs({ fromBlock: 1 })).logs.length).toEqual(0);
184
+ expect((await store.getPublicLogs({ fromBlock: 1 })).logs.length).toEqual(0);
193
185
  });
194
186
  });
195
187
 
@@ -284,7 +276,8 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
284
276
  const blockNum = 10;
285
277
 
286
278
  beforeEach(async () => {
287
- contractInstance = { ...SerializableContractInstance.random(), address: AztecAddress.random() };
279
+ const randomInstance = await SerializableContractInstance.random();
280
+ contractInstance = { ...randomInstance, address: await AztecAddress.random() };
288
281
  await store.addContractInstances([contractInstance], blockNum);
289
282
  });
290
283
 
@@ -293,7 +286,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
293
286
  });
294
287
 
295
288
  it('returns undefined if contract instance is not found', async () => {
296
- await expect(store.getContractInstance(AztecAddress.random())).resolves.toBeUndefined();
289
+ await expect(store.getContractInstance(await AztecAddress.random())).resolves.toBeUndefined();
297
290
  });
298
291
 
299
292
  it('returns undefined if previously stored contract instances was deleted', async () => {
@@ -373,18 +366,34 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
373
366
  const numBlocks = 3;
374
367
  const numTxsPerBlock = 4;
375
368
  const numPrivateLogsPerTx = 3;
376
- const numUnencryptedLogsPerTx = 2;
369
+ const numPublicLogsPerTx = 2;
377
370
 
378
371
  let blocks: L1Published<L2Block>[];
379
372
 
380
373
  const makeTag = (blockNumber: number, txIndex: number, logIndex: number, isPublic = false) =>
381
374
  new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * (isPublic ? 123 : 1));
382
375
 
376
+ // See parseLogFromPublic
377
+ const makeLengthsField = (publicValuesLen: number, privateValuesLen: number, ciphertextLen: number) => {
378
+ const buf = Buffer.alloc(32);
379
+ buf.writeUint16BE(publicValuesLen, 24);
380
+ buf.writeUint16BE(privateValuesLen, 27);
381
+ buf.writeUint16BE(ciphertextLen, 30);
382
+ return Fr.fromBuffer(buf);
383
+ };
384
+
383
385
  const makePrivateLog = (tag: Fr) =>
384
386
  PrivateLog.fromFields([tag, ...times(PRIVATE_LOG_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i))]);
385
387
 
388
+ // The tag lives in field 1, not 0, of a public log
389
+ // See extractTaggedLogsFromPublic and noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr -> emit_log
386
390
  const makePublicLog = (tag: Fr) =>
387
- Buffer.concat([tag.toBuffer(), ...times(tag.toNumber() % 60, i => new Fr(tag.toNumber() + i).toBuffer())]);
391
+ PublicLog.fromFields([
392
+ AztecAddress.fromNumber(1).toField(), // log address
393
+ makeLengthsField(2, PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 3, 42), // field 0
394
+ tag, // field 1
395
+ ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)), // fields 2 to end
396
+ ]);
388
397
 
389
398
  const mockPrivateLogs = (blockNumber: number, txIndex: number) => {
390
399
  return times(numPrivateLogsPerTx, (logIndex: number) => {
@@ -393,23 +402,21 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
393
402
  });
394
403
  };
395
404
 
396
- const mockUnencryptedLogs = (blockNumber: number, txIndex: number) => {
397
- const logs = times(numUnencryptedLogsPerTx, (logIndex: number) => {
405
+ const mockPublicLogs = (blockNumber: number, txIndex: number) => {
406
+ return times(numPublicLogsPerTx, (logIndex: number) => {
398
407
  const tag = makeTag(blockNumber, txIndex, logIndex, /* isPublic */ true);
399
- const log = makePublicLog(tag);
400
- return new UnencryptedL2Log(AztecAddress.fromNumber(txIndex), log);
408
+ return makePublicLog(tag);
401
409
  });
402
- return new UnencryptedTxL2Logs([new UnencryptedFunctionL2Logs(logs)]);
403
410
  };
404
411
 
405
- const mockBlockWithLogs = (blockNumber: number): L1Published<L2Block> => {
406
- const block = L2Block.random(blockNumber);
412
+ const mockBlockWithLogs = async (blockNumber: number): Promise<L1Published<L2Block>> => {
413
+ const block = await L2Block.random(blockNumber);
407
414
  block.header.globalVariables.blockNumber = new Fr(blockNumber);
408
415
 
409
- block.body.txEffects = times(numTxsPerBlock, (txIndex: number) => {
410
- const txEffect = TxEffect.random();
416
+ block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => {
417
+ const txEffect = await TxEffect.random();
411
418
  txEffect.privateLogs = mockPrivateLogs(blockNumber, txIndex);
412
- txEffect.unencryptedLogs = mockUnencryptedLogs(blockNumber, txIndex);
419
+ txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex);
413
420
  return txEffect;
414
421
  });
415
422
 
@@ -420,7 +427,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
420
427
  };
421
428
 
422
429
  beforeEach(async () => {
423
- blocks = times(numBlocks, (index: number) => mockBlockWithLogs(index));
430
+ blocks = await timesParallel(numBlocks, (index: number) => mockBlockWithLogs(index));
424
431
 
425
432
  await store.addBlocks(blocks);
426
433
  await store.addLogs(blocks.map(b => b.data));
@@ -449,9 +456,8 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
449
456
  ]);
450
457
  });
451
458
 
452
- // TODO: Allow this test when #9835 is fixed and tags can be correctly decoded
453
- it.skip('is possible to batch request all logs (private and unencrypted) via tags', async () => {
454
- // Tag(0, 0, 0) is shared with the first private log and the first unencrypted log.
459
+ it('is possible to batch request all logs (private and public) via tags', async () => {
460
+ // Tag(0, 0, 0) is shared with the first private log and the first public log.
455
461
  const tags = [makeTag(0, 0, 0)];
456
462
 
457
463
  const logsByTags = await store.getLogsByTags(tags);
@@ -465,7 +471,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
465
471
  }),
466
472
  expect.objectContaining({
467
473
  blockNumber: 0,
468
- logData: makePublicLog(tags[0]),
474
+ logData: makePublicLog(tags[0]).toBuffer(),
469
475
  isFromPublic: true,
470
476
  }),
471
477
  ],
@@ -477,7 +483,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
477
483
 
478
484
  // Create a block containing logs that have the same tag as the blocks before.
479
485
  const newBlockNumber = numBlocks;
480
- const newBlock = mockBlockWithLogs(newBlockNumber);
486
+ const newBlock = await mockBlockWithLogs(newBlockNumber);
481
487
  const newLog = newBlock.data.body.txEffects[1].privateLogs[1];
482
488
  newLog.fields[0] = tags[0];
483
489
  newBlock.data.body.txEffects[1].privateLogs[1] = newLog;
@@ -520,18 +526,48 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
520
526
  ],
521
527
  ]);
522
528
  });
529
+
530
+ it('is not possible to add public logs by tag if they are invalid', async () => {
531
+ const tag = makeTag(99, 88, 77);
532
+ const invalidLogs = [
533
+ PublicLog.fromFields([
534
+ AztecAddress.fromNumber(1).toField(),
535
+ makeLengthsField(2, 3, 42), // This field claims we have 5 items, but we actually have more
536
+ tag,
537
+ ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)),
538
+ ]),
539
+ PublicLog.fromFields([
540
+ AztecAddress.fromNumber(1).toField(),
541
+ makeLengthsField(2, PUBLIC_LOG_DATA_SIZE_IN_FIELDS, 42), // This field claims we have more than the max items
542
+ tag,
543
+ ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)),
544
+ ]),
545
+ ];
546
+
547
+ // Create a block containing these invalid logs
548
+ const newBlockNumber = numBlocks;
549
+ const newBlock = await mockBlockWithLogs(newBlockNumber);
550
+ newBlock.data.body.txEffects[0].publicLogs = invalidLogs;
551
+ await store.addBlocks([newBlock]);
552
+ await store.addLogs([newBlock.data]);
553
+
554
+ const logsByTags = await store.getLogsByTags([tag]);
555
+
556
+ // Neither of the logs should have been added:
557
+ expect(logsByTags).toEqual([[]]);
558
+ });
523
559
  });
524
560
 
525
- describe('getUnencryptedLogs', () => {
561
+ describe('getPublicLogs', () => {
526
562
  const txsPerBlock = 4;
527
563
  const numPublicFunctionCalls = 3;
528
- const numUnencryptedLogs = 2;
564
+ const numPublicLogs = 2;
529
565
  const numBlocks = 10;
530
566
  let blocks: L1Published<L2Block>[];
531
567
 
532
568
  beforeEach(async () => {
533
- blocks = times(numBlocks, (index: number) => ({
534
- data: L2Block.random(index + 1, txsPerBlock, numPublicFunctionCalls, numUnencryptedLogs),
569
+ blocks = await timesParallel(numBlocks, async (index: number) => ({
570
+ data: await L2Block.random(index + 1, txsPerBlock, numPublicFunctionCalls, numPublicLogs),
535
571
  l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) },
536
572
  }));
537
573
 
@@ -550,7 +586,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
550
586
  store.deleteLogs(blocks.map(b => b.data)),
551
587
  ]);
552
588
 
553
- const response = await store.getUnencryptedLogs({ txHash: targetTxHash });
589
+ const response = await store.getPublicLogs({ txHash: targetTxHash });
554
590
  const logs = response.logs;
555
591
 
556
592
  expect(response.maxLogsHit).toBeFalsy();
@@ -563,12 +599,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
563
599
  const targetTxIndex = randomInt(txsPerBlock);
564
600
  const targetTxHash = blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash;
565
601
 
566
- const response = await store.getUnencryptedLogs({ txHash: targetTxHash });
602
+ const response = await store.getPublicLogs({ txHash: targetTxHash });
567
603
  const logs = response.logs;
568
604
 
569
605
  expect(response.maxLogsHit).toBeFalsy();
570
606
 
571
- const expectedNumLogs = numPublicFunctionCalls * numUnencryptedLogs;
607
+ const expectedNumLogs = numPublicFunctionCalls * numPublicLogs;
572
608
  expect(logs.length).toEqual(expectedNumLogs);
573
609
 
574
610
  const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM;
@@ -583,12 +619,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
583
619
  const fromBlock = 3;
584
620
  const toBlock = 7;
585
621
 
586
- const response = await store.getUnencryptedLogs({ fromBlock, toBlock });
622
+ const response = await store.getPublicLogs({ fromBlock, toBlock });
587
623
  const logs = response.logs;
588
624
 
589
625
  expect(response.maxLogsHit).toBeFalsy();
590
626
 
591
- const expectedNumLogs = txsPerBlock * numPublicFunctionCalls * numUnencryptedLogs * (toBlock - fromBlock);
627
+ const expectedNumLogs = txsPerBlock * numPublicFunctionCalls * numPublicLogs * (toBlock - fromBlock);
592
628
  expect(logs.length).toEqual(expectedNumLogs);
593
629
 
594
630
  for (const log of logs) {
@@ -602,14 +638,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
602
638
  // Get a random contract address from the logs
603
639
  const targetBlockIndex = randomInt(numBlocks);
604
640
  const targetTxIndex = randomInt(txsPerBlock);
605
- const targetFunctionLogIndex = randomInt(numPublicFunctionCalls);
606
- const targetLogIndex = randomInt(numUnencryptedLogs);
641
+ const targetLogIndex = randomInt(numPublicLogs * numPublicFunctionCalls);
607
642
  const targetContractAddress =
608
- blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[
609
- targetFunctionLogIndex
610
- ].logs[targetLogIndex].contractAddress;
643
+ blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].publicLogs[targetLogIndex].contractAddress;
611
644
 
612
- const response = await store.getUnencryptedLogs({ contractAddress: targetContractAddress });
645
+ const response = await store.getPublicLogs({ contractAddress: targetContractAddress });
613
646
 
614
647
  expect(response.maxLogsHit).toBeFalsy();
615
648
 
@@ -622,11 +655,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
622
655
  // Get a random log as reference
623
656
  const targetBlockIndex = randomInt(numBlocks);
624
657
  const targetTxIndex = randomInt(txsPerBlock);
625
- const targetLogIndex = randomInt(numUnencryptedLogs);
658
+ const targetLogIndex = randomInt(numPublicLogs);
626
659
 
627
660
  const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);
628
661
 
629
- const response = await store.getUnencryptedLogs({ afterLog });
662
+ const response = await store.getPublicLogs({ afterLog });
630
663
  const logs = response.logs;
631
664
 
632
665
  expect(response.maxLogsHit).toBeFalsy();
@@ -648,40 +681,40 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
648
681
  const txHash = TxHash.random();
649
682
  const afterLog = new LogId(1, 0, 0);
650
683
 
651
- const response = await store.getUnencryptedLogs({ txHash, afterLog });
684
+ const response = await store.getPublicLogs({ txHash, afterLog });
652
685
  expect(response.logs.length).toBeGreaterThan(1);
653
686
  });
654
687
 
655
688
  it('intersecting works', async () => {
656
- let logs = (await store.getUnencryptedLogs({ fromBlock: -10, toBlock: -5 })).logs;
689
+ let logs = (await store.getPublicLogs({ fromBlock: -10, toBlock: -5 })).logs;
657
690
  expect(logs.length).toBe(0);
658
691
 
659
692
  // "fromBlock" gets correctly trimmed to range and "toBlock" is exclusive
660
- logs = (await store.getUnencryptedLogs({ fromBlock: -10, toBlock: 5 })).logs;
693
+ logs = (await store.getPublicLogs({ fromBlock: -10, toBlock: 5 })).logs;
661
694
  let blockNumbers = new Set(logs.map(log => log.id.blockNumber));
662
695
  expect(blockNumbers).toEqual(new Set([1, 2, 3, 4]));
663
696
 
664
697
  // "toBlock" should be exclusive
665
- logs = (await store.getUnencryptedLogs({ fromBlock: 1, toBlock: 1 })).logs;
698
+ logs = (await store.getPublicLogs({ fromBlock: 1, toBlock: 1 })).logs;
666
699
  expect(logs.length).toBe(0);
667
700
 
668
- logs = (await store.getUnencryptedLogs({ fromBlock: 10, toBlock: 5 })).logs;
701
+ logs = (await store.getPublicLogs({ fromBlock: 10, toBlock: 5 })).logs;
669
702
  expect(logs.length).toBe(0);
670
703
 
671
704
  // both "fromBlock" and "toBlock" get correctly capped to range and logs from all blocks are returned
672
- logs = (await store.getUnencryptedLogs({ fromBlock: -100, toBlock: +100 })).logs;
705
+ logs = (await store.getPublicLogs({ fromBlock: -100, toBlock: +100 })).logs;
673
706
  blockNumbers = new Set(logs.map(log => log.id.blockNumber));
674
707
  expect(blockNumbers.size).toBe(numBlocks);
675
708
 
676
709
  // intersecting with "afterLog" works
677
- logs = (await store.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(4, 0, 0) })).logs;
710
+ logs = (await store.getPublicLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(4, 0, 0) })).logs;
678
711
  blockNumbers = new Set(logs.map(log => log.id.blockNumber));
679
712
  expect(blockNumbers).toEqual(new Set([4]));
680
713
 
681
- logs = (await store.getUnencryptedLogs({ toBlock: 5, afterLog: new LogId(5, 1, 0) })).logs;
714
+ logs = (await store.getPublicLogs({ toBlock: 5, afterLog: new LogId(5, 1, 0) })).logs;
682
715
  expect(logs.length).toBe(0);
683
716
 
684
- logs = (await store.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(100, 0, 0) })).logs;
717
+ logs = (await store.getPublicLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(100, 0, 0) })).logs;
685
718
  expect(logs.length).toBe(0);
686
719
  });
687
720
 
@@ -689,11 +722,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
689
722
  // Get a random log as reference
690
723
  const targetBlockIndex = randomInt(numBlocks);
691
724
  const targetTxIndex = randomInt(txsPerBlock);
692
- const targetLogIndex = randomInt(numUnencryptedLogs);
725
+ const targetLogIndex = randomInt(numPublicLogs);
693
726
 
694
727
  const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);
695
728
 
696
- const response = await store.getUnencryptedLogs({ afterLog, fromBlock: afterLog.blockNumber });
729
+ const response = await store.getPublicLogs({ afterLog, fromBlock: afterLog.blockNumber });
697
730
  const logs = response.logs;
698
731
 
699
732
  expect(response.maxLogsHit).toBeFalsy();
@@ -716,8 +749,8 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
716
749
  const numBlocks = 10;
717
750
  const nullifiersPerBlock = new Map<number, Fr[]>();
718
751
 
719
- beforeEach(() => {
720
- blocks = times(numBlocks, (index: number) => L2Block.random(index + 1, 1));
752
+ beforeEach(async () => {
753
+ blocks = await timesParallel(numBlocks, (index: number) => L2Block.random(index + 1, 1));
721
754
 
722
755
  blocks.forEach((block, blockIndex) => {
723
756
  nullifiersPerBlock.set(
@@ -36,7 +36,7 @@ export type ArchiverConfig = {
36
36
  /** The deployed L1 contract addresses */
37
37
  l1Contracts: L1ContractAddresses;
38
38
 
39
- /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */
39
+ /** The max number of logs that can be obtained in 1 "getPublicLogs" call. */
40
40
  maxLogs?: number;
41
41
  } & L1ReaderConfig &
42
42
  L1ContractsConfig;
@@ -55,7 +55,7 @@ export const archiverConfigMappings: ConfigMappingsType<ArchiverConfig> = {
55
55
  archiverPollingIntervalMS: {
56
56
  env: 'ARCHIVER_POLLING_INTERVAL_MS',
57
57
  description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.',
58
- ...numberConfigHelper(1_000),
58
+ ...numberConfigHelper(500),
59
59
  },
60
60
  archiverBatchSize: {
61
61
  env: 'ARCHIVER_BATCH_SIZE',
@@ -64,7 +64,7 @@ export const archiverConfigMappings: ConfigMappingsType<ArchiverConfig> = {
64
64
  },
65
65
  maxLogs: {
66
66
  env: 'ARCHIVER_MAX_LOGS',
67
- description: 'The max number of logs that can be obtained in 1 "getUnencryptedLogs" call.',
67
+ description: 'The max number of logs that can be obtained in 1 "getPublicLogs" call.',
68
68
  ...numberConfigHelper(1_000),
69
69
  },
70
70
  ...l1ReaderConfigMappings,
@@ -165,7 +165,7 @@ async function getBlockFromRollupTx(
165
165
  // TODO(#9101): The below reconstruction is currently redundant, but once we extract blobs will be the way to construct blocks.
166
166
  // The blob source will give us blockFields, and we must construct the body from them:
167
167
  // TODO(#8954): When logs are refactored into fields, we won't need to inject them here.
168
- const reconstructedBlock = Body.fromBlobFields(blockFields, blockBody.unencryptedLogs, blockBody.contractClassLogs);
168
+ const reconstructedBlock = Body.fromBlobFields(blockFields, blockBody.contractClassLogs);
169
169
 
170
170
  if (!reconstructedBlock.toBuffer().equals(blockBody.toBuffer())) {
171
171
  // TODO(#9101): Remove below check (without calldata there will be nothing to check against)
@@ -1,5 +1,6 @@
1
1
  import {
2
- type GetUnencryptedLogsResponse,
2
+ type GetContractClassLogsResponse,
3
+ type GetPublicLogsResponse,
3
4
  type InBlock,
4
5
  type InboxLeaf,
5
6
  type L2Block,
@@ -290,13 +291,13 @@ export class KVArchiverDataStore implements ArchiverDataStore {
290
291
  }
291
292
 
292
293
  /**
293
- * Gets unencrypted logs based on the provided filter.
294
+ * Gets public logs based on the provided filter.
294
295
  * @param filter - The filter to apply to the logs.
295
296
  * @returns The requested logs.
296
297
  */
297
- getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
298
+ getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
298
299
  try {
299
- return Promise.resolve(this.#logStore.getUnencryptedLogs(filter));
300
+ return Promise.resolve(this.#logStore.getPublicLogs(filter));
300
301
  } catch (err) {
301
302
  return Promise.reject(err);
302
303
  }
@@ -307,7 +308,7 @@ export class KVArchiverDataStore implements ArchiverDataStore {
307
308
  * @param filter - The filter to apply to the logs.
308
309
  * @returns The requested logs.
309
310
  */
310
- getContractClassLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
311
+ getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
311
312
  try {
312
313
  return Promise.resolve(this.#logStore.getContractClassLogs(filter));
313
314
  } catch (err) {