@aztec/archiver 0.71.0 → 0.73.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/dest/archiver/archiver.d.ts +6 -6
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +35 -20
  4. package/dest/archiver/archiver_store.d.ts +6 -6
  5. package/dest/archiver/archiver_store.d.ts.map +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  7. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  8. package/dest/archiver/archiver_store_test_suite.js +103 -65
  9. package/dest/archiver/config.d.ts +2 -2
  10. package/dest/archiver/config.d.ts.map +1 -1
  11. package/dest/archiver/config.js +5 -5
  12. package/dest/archiver/data_retrieval.d.ts +3 -2
  13. package/dest/archiver/data_retrieval.d.ts.map +1 -1
  14. package/dest/archiver/data_retrieval.js +69 -16
  15. package/dest/archiver/errors.d.ts +4 -0
  16. package/dest/archiver/errors.d.ts.map +1 -0
  17. package/dest/archiver/errors.js +6 -0
  18. package/dest/archiver/kv_archiver_store/block_store.d.ts +16 -16
  19. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  20. package/dest/archiver/kv_archiver_store/block_store.js +53 -53
  21. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +5 -5
  22. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  23. package/dest/archiver/kv_archiver_store/contract_class_store.js +13 -12
  24. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +3 -3
  25. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  26. package/dest/archiver/kv_archiver_store/contract_instance_store.js +3 -3
  27. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +7 -11
  28. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  29. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +41 -63
  30. package/dest/archiver/kv_archiver_store/log_store.d.ts +8 -8
  31. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  32. package/dest/archiver/kv_archiver_store/log_store.js +122 -89
  33. package/dest/archiver/kv_archiver_store/message_store.d.ts +6 -6
  34. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  35. package/dest/archiver/kv_archiver_store/message_store.js +16 -16
  36. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts +2 -2
  37. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts.map +1 -1
  38. package/dest/archiver/kv_archiver_store/nullifier_store.js +31 -22
  39. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +9 -9
  40. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  41. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +72 -71
  42. package/dest/factory.js +8 -8
  43. package/dest/rpc/index.d.ts +1 -1
  44. package/dest/rpc/index.d.ts.map +1 -1
  45. package/dest/rpc/index.js +5 -5
  46. package/dest/test/mock_archiver.d.ts +1 -1
  47. package/dest/test/mock_archiver.d.ts.map +1 -1
  48. package/dest/test/mock_archiver.js +2 -1
  49. package/dest/test/mock_l2_block_source.d.ts +3 -3
  50. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  51. package/dest/test/mock_l2_block_source.js +14 -11
  52. package/package.json +13 -13
  53. package/src/archiver/archiver.ts +46 -25
  54. package/src/archiver/archiver_store.ts +6 -5
  55. package/src/archiver/archiver_store_test_suite.ts +113 -77
  56. package/src/archiver/config.ts +6 -6
  57. package/src/archiver/data_retrieval.ts +94 -12
  58. package/src/archiver/errors.ts +5 -0
  59. package/src/archiver/kv_archiver_store/block_store.ts +66 -67
  60. package/src/archiver/kv_archiver_store/contract_class_store.ts +17 -15
  61. package/src/archiver/kv_archiver_store/contract_instance_store.ts +5 -5
  62. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +48 -66
  63. package/src/archiver/kv_archiver_store/log_store.ts +167 -112
  64. package/src/archiver/kv_archiver_store/message_store.ts +22 -22
  65. package/src/archiver/kv_archiver_store/nullifier_store.ts +48 -30
  66. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +100 -96
  67. package/src/factory.ts +11 -9
  68. package/src/rpc/index.ts +4 -4
  69. package/src/test/mock_archiver.ts +1 -0
  70. package/src/test/mock_l2_block_source.ts +20 -18
@@ -1,46 +1,55 @@
1
1
  import { type InBlock, type L2Block } from '@aztec/circuit-types';
2
2
  import { type Fr, MAX_NULLIFIERS_PER_TX } from '@aztec/circuits.js';
3
3
  import { createLogger } from '@aztec/foundation/log';
4
- import { type AztecKVStore, type AztecMap } from '@aztec/kv-store';
4
+ import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
5
5
 
6
6
  export class NullifierStore {
7
- #nullifiersToBlockNumber: AztecMap<string, number>;
8
- #nullifiersToBlockHash: AztecMap<string, string>;
9
- #nullifiersToIndex: AztecMap<string, number>;
7
+ #nullifiersToBlockNumber: AztecAsyncMap<string, number>;
8
+ #nullifiersToBlockHash: AztecAsyncMap<string, string>;
9
+ #nullifiersToIndex: AztecAsyncMap<string, number>;
10
10
  #log = createLogger('archiver:log_store');
11
11
 
12
- constructor(private db: AztecKVStore) {
12
+ constructor(private db: AztecAsyncKVStore) {
13
13
  this.#nullifiersToBlockNumber = db.openMap('archiver_nullifiers_to_block_number');
14
14
  this.#nullifiersToBlockHash = db.openMap('archiver_nullifiers_to_block_hash');
15
15
  this.#nullifiersToIndex = db.openMap('archiver_nullifiers_to_index');
16
16
  }
17
17
 
18
18
  async addNullifiers(blocks: L2Block[]): Promise<boolean> {
19
- await this.db.transaction(() => {
20
- blocks.forEach(block => {
21
- const dataStartIndexForBlock =
22
- block.header.state.partial.nullifierTree.nextAvailableLeafIndex -
23
- block.body.txEffects.length * MAX_NULLIFIERS_PER_TX;
24
- block.body.txEffects.forEach((txEffects, txIndex) => {
25
- const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NULLIFIERS_PER_TX;
26
- txEffects.nullifiers.forEach((nullifier, nullifierIndex) => {
27
- void this.#nullifiersToBlockNumber.set(nullifier.toString(), block.number);
28
- void this.#nullifiersToBlockHash.set(nullifier.toString(), block.hash().toString());
29
- void this.#nullifiersToIndex.set(nullifier.toString(), dataStartIndexForTx + nullifierIndex);
30
- });
31
- });
32
- });
19
+ const blockHashes = await Promise.all(blocks.map(block => block.hash()));
20
+ await this.db.transactionAsync(async () => {
21
+ await Promise.all(
22
+ blocks.map((block, i) => {
23
+ const dataStartIndexForBlock =
24
+ block.header.state.partial.nullifierTree.nextAvailableLeafIndex -
25
+ block.body.txEffects.length * MAX_NULLIFIERS_PER_TX;
26
+ return Promise.all(
27
+ block.body.txEffects.map((txEffects, txIndex) => {
28
+ const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NULLIFIERS_PER_TX;
29
+ return Promise.all(
30
+ txEffects.nullifiers.map(async (nullifier, nullifierIndex) => {
31
+ await this.#nullifiersToBlockNumber.set(nullifier.toString(), block.number);
32
+ await this.#nullifiersToBlockHash.set(nullifier.toString(), blockHashes[i].toString());
33
+ await this.#nullifiersToIndex.set(nullifier.toString(), dataStartIndexForTx + nullifierIndex);
34
+ }),
35
+ );
36
+ }),
37
+ );
38
+ }),
39
+ );
33
40
  });
34
41
  return true;
35
42
  }
36
43
 
37
44
  async deleteNullifiers(blocks: L2Block[]): Promise<boolean> {
38
- await this.db.transaction(() => {
45
+ await this.db.transactionAsync(async () => {
39
46
  for (const block of blocks) {
40
47
  for (const nullifier of block.body.txEffects.flatMap(tx => tx.nullifiers)) {
41
- void this.#nullifiersToBlockNumber.delete(nullifier.toString());
42
- void this.#nullifiersToBlockHash.delete(nullifier.toString());
43
- void this.#nullifiersToIndex.delete(nullifier.toString());
48
+ await Promise.all([
49
+ this.#nullifiersToBlockNumber.delete(nullifier.toString()),
50
+ this.#nullifiersToBlockHash.delete(nullifier.toString()),
51
+ this.#nullifiersToIndex.delete(nullifier.toString()),
52
+ ]);
44
53
  }
45
54
  }
46
55
  });
@@ -51,13 +60,22 @@ export class NullifierStore {
51
60
  blockNumber: number,
52
61
  nullifiers: Fr[],
53
62
  ): Promise<(InBlock<bigint> | undefined)[]> {
54
- const maybeNullifiers = await this.db.transaction(() => {
55
- return nullifiers.map(nullifier => ({
56
- data: this.#nullifiersToIndex.get(nullifier.toString()),
57
- l2BlockNumber: this.#nullifiersToBlockNumber.get(nullifier.toString()),
58
- l2BlockHash: this.#nullifiersToBlockHash.get(nullifier.toString()),
59
- }));
60
- });
63
+ const asStrings = nullifiers.map(x => x.toString());
64
+
65
+ const maybeNullifiers = await Promise.all(
66
+ asStrings.map(async nullifier => {
67
+ const [data, l2BlockNumber, l2BlockHash] = await Promise.all([
68
+ this.#nullifiersToIndex.getAsync(nullifier),
69
+ this.#nullifiersToBlockNumber.getAsync(nullifier),
70
+ this.#nullifiersToBlockHash.getAsync(nullifier),
71
+ ]);
72
+ return {
73
+ data,
74
+ l2BlockNumber,
75
+ l2BlockHash,
76
+ };
77
+ }),
78
+ );
61
79
  return maybeNullifiers.map(({ data, l2BlockNumber, l2BlockHash }) => {
62
80
  if (
63
81
  data === undefined ||
@@ -1,7 +1,9 @@
1
1
  import {
2
2
  type ContractClass2BlockL2Logs,
3
+ ExtendedPublicLog,
3
4
  ExtendedUnencryptedL2Log,
4
- type GetUnencryptedLogsResponse,
5
+ type GetContractClassLogsResponse,
6
+ type GetPublicLogsResponse,
5
7
  type InBlock,
6
8
  type InboxLeaf,
7
9
  type L2Block,
@@ -12,7 +14,6 @@ import {
12
14
  type TxHash,
13
15
  TxReceipt,
14
16
  TxScopedL2Log,
15
- type UnencryptedL2BlockL2Logs,
16
17
  wrapInBlock,
17
18
  } from '@aztec/circuit-types';
18
19
  import {
@@ -25,7 +26,9 @@ import {
25
26
  INITIAL_L2_BLOCK_NUM,
26
27
  MAX_NOTE_HASHES_PER_TX,
27
28
  MAX_NULLIFIERS_PER_TX,
29
+ PUBLIC_LOG_DATA_SIZE_IN_FIELDS,
28
30
  type PrivateLog,
31
+ type PublicLog,
29
32
  type UnconstrainedFunctionWithMembershipProof,
30
33
  } from '@aztec/circuits.js';
31
34
  import { FunctionSelector } from '@aztec/foundation/abi';
@@ -57,7 +60,7 @@ export class MemoryArchiverStore implements ArchiverDataStore {
57
60
 
58
61
  private privateLogsPerBlock: Map<number, PrivateLog[]> = new Map();
59
62
 
60
- private unencryptedLogsPerBlock: Map<number, UnencryptedL2BlockL2Logs> = new Map();
63
+ private publicLogsPerBlock: Map<number, PublicLog[]> = new Map();
61
64
 
62
65
  private contractClassLogsPerBlock: Map<number, ContractClass2BlockL2Logs> = new Map();
63
66
 
@@ -89,7 +92,7 @@ export class MemoryArchiverStore implements ArchiverDataStore {
89
92
  #log = createLogger('archiver:data-store');
90
93
 
91
94
  constructor(
92
- /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */
95
+ /** The max number of logs that can be obtained in 1 "getPublicLogs" call. */
93
96
  public readonly maxLogs: number,
94
97
  ) {}
95
98
 
@@ -188,14 +191,18 @@ export class MemoryArchiverStore implements ArchiverDataStore {
188
191
  * @param blocks - The L2 blocks to be added to the store and the last processed L1 block.
189
192
  * @returns True if the operation is successful.
190
193
  */
191
- public addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean> {
194
+ public async addBlocks(blocks: L1Published<L2Block>[]): Promise<boolean> {
192
195
  if (blocks.length === 0) {
193
196
  return Promise.resolve(true);
194
197
  }
195
198
 
196
199
  this.lastL1BlockNewBlocks = blocks[blocks.length - 1].l1.blockNumber;
197
200
  this.l2Blocks.push(...blocks);
198
- this.txEffects.push(...blocks.flatMap(b => b.data.body.txEffects.map(txEffect => wrapInBlock(txEffect, b.data))));
201
+ const flatTxEffects = blocks.flatMap(b => b.data.body.txEffects.map(txEffect => ({ block: b, txEffect })));
202
+ const wrappedTxEffects = await Promise.all(
203
+ flatTxEffects.map(flatTxEffect => wrapInBlock(flatTxEffect.txEffect, flatTxEffect.block.data)),
204
+ );
205
+ this.txEffects.push(...wrappedTxEffects);
199
206
 
200
207
  return Promise.resolve(true);
201
208
  }
@@ -249,43 +256,41 @@ export class MemoryArchiverStore implements ArchiverDataStore {
249
256
  const dataStartIndexForBlock =
250
257
  block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
251
258
  block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX;
252
- block.body.unencryptedLogs.txLogs.forEach((txLogs, txIndex) => {
253
- const txHash = block.body.txEffects[txIndex].txHash;
259
+ block.body.txEffects.forEach((txEffect, txIndex) => {
260
+ const txHash = txEffect.txHash;
254
261
  const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
255
- const logs = txLogs.unrollLogs();
256
- logs.forEach(log => {
257
- if (
258
- // TODO remove when #9835 and #9836 are fixed
259
- log.data.length <
260
- 32 * 33
261
- ) {
262
- this.#log.warn(`Skipping unencrypted log with invalid data length: ${log.data.length}`);
262
+ txEffect.publicLogs.forEach(log => {
263
+ // Check that each log stores 3 lengths in its first field. If not, it's not a tagged log:
264
+ // See macros/note/mod/ and see how finalization_log[0] is constructed, to understand this monstrosity. (It wasn't me).
265
+ // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here.
266
+ const firstFieldBuf = log.log[0].toBuffer();
267
+ if (!firstFieldBuf.subarray(0, 27).equals(Buffer.alloc(27)) || firstFieldBuf[29] !== 0) {
268
+ // See parseLogFromPublic - the first field of a tagged log is 8 bytes structured:
269
+ // [ publicLen[0], publicLen[1], 0, privateLen[0], privateLen[1]]
270
+ this.#log.warn(`Skipping public log with invalid first field: ${log.log[0]}`);
263
271
  return;
264
272
  }
265
- try {
266
- // TODO remove when #9835 and #9836 are fixed. The partial note logs are emitted as bytes, but encoded as Fields.
267
- // This means that for every 32 bytes of payload, we only have 1 byte of data.
268
- // Also, the tag is not stored in the first 32 bytes of the log, (that's the length of public fields now) but in the next 32.
269
- const correctedBuffer = Buffer.alloc(32);
270
- const initialOffset = 32;
271
- for (let i = 0; i < 32; i++) {
272
- const byte = Fr.fromBuffer(
273
- log.data.subarray(i * 32 + initialOffset, i * 32 + 32 + initialOffset),
274
- ).toNumber();
275
- correctedBuffer.writeUInt8(byte, i);
276
- }
277
- const tag = new Fr(correctedBuffer);
278
- this.#log.verbose(`Storing unencrypted tagged log with tag ${tag.toString()} in block ${block.number}`);
279
- const currentLogs = this.taggedLogs.get(tag.toString()) || [];
280
- this.taggedLogs.set(tag.toString(), [
281
- ...currentLogs,
282
- new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, /* isFromPublic */ true, log.data),
283
- ]);
284
- const currentTagsInBlock = this.logTagsPerBlock.get(block.number) || [];
285
- this.logTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]);
286
- } catch (err) {
287
- this.#log.warn(`Failed to add tagged log to store: ${err}`);
273
+ // Check that the length values line up with the log contents
274
+ const publicValuesLength = firstFieldBuf.subarray(-5).readUint16BE();
275
+ const privateValuesLength = firstFieldBuf.subarray(-5).readUint16BE(3);
276
+ // Add 1 for the first field holding lengths
277
+ const totalLogLength = 1 + publicValuesLength + privateValuesLength;
278
+ // Note that zeroes can be valid log values, so we can only assert that we do not go over the given length
279
+ if (totalLogLength > PUBLIC_LOG_DATA_SIZE_IN_FIELDS || log.log.slice(totalLogLength).find(f => !f.isZero())) {
280
+ this.#log.warn(`Skipping invalid tagged public log with first field: ${log.log[0]}`);
281
+ return;
288
282
  }
283
+
284
+ // The first elt stores lengths => tag is in fields[1]
285
+ const tag = log.log[1];
286
+ this.#log.verbose(`Storing public tagged log with tag ${tag.toString()} in block ${block.number}`);
287
+ const currentLogs = this.taggedLogs.get(tag.toString()) || [];
288
+ this.taggedLogs.set(tag.toString(), [
289
+ ...currentLogs,
290
+ new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, /* isFromPublic */ true, log.toBuffer()),
291
+ ]);
292
+ const currentTagsInBlock = this.logTagsPerBlock.get(block.number) || [];
293
+ this.logTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]);
289
294
  });
290
295
  });
291
296
  }
@@ -297,10 +302,10 @@ export class MemoryArchiverStore implements ArchiverDataStore {
297
302
  */
298
303
  addLogs(blocks: L2Block[]): Promise<boolean> {
299
304
  blocks.forEach(block => {
300
- void this.#storeTaggedLogsFromPrivate(block);
301
- void this.#storeTaggedLogsFromPublic(block);
305
+ this.#storeTaggedLogsFromPrivate(block);
306
+ this.#storeTaggedLogsFromPublic(block);
302
307
  this.privateLogsPerBlock.set(block.number, block.body.txEffects.map(txEffect => txEffect.privateLogs).flat());
303
- this.unencryptedLogsPerBlock.set(block.number, block.body.unencryptedLogs);
308
+ this.publicLogsPerBlock.set(block.number, block.body.txEffects.map(txEffect => txEffect.publicLogs).flat());
304
309
  this.contractClassLogsPerBlock.set(block.number, block.body.contractClassLogs);
305
310
  });
306
311
  return Promise.resolve(true);
@@ -316,7 +321,7 @@ export class MemoryArchiverStore implements ArchiverDataStore {
316
321
 
317
322
  blocks.forEach(block => {
318
323
  this.privateLogsPerBlock.delete(block.number);
319
- this.unencryptedLogsPerBlock.delete(block.number);
324
+ this.publicLogsPerBlock.delete(block.number);
320
325
  this.logTagsPerBlock.delete(block.number);
321
326
  this.contractClassLogsPerBlock.delete(block.number);
322
327
  });
@@ -324,22 +329,25 @@ export class MemoryArchiverStore implements ArchiverDataStore {
324
329
  return Promise.resolve(true);
325
330
  }
326
331
 
327
- addNullifiers(blocks: L2Block[]): Promise<boolean> {
328
- blocks.forEach(block => {
329
- const dataStartIndexForBlock =
330
- block.header.state.partial.nullifierTree.nextAvailableLeafIndex -
331
- block.body.txEffects.length * MAX_NULLIFIERS_PER_TX;
332
- block.body.txEffects.forEach((txEffects, txIndex) => {
333
- const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NULLIFIERS_PER_TX;
334
- txEffects.nullifiers.forEach((nullifier, nullifierIndex) => {
335
- this.blockScopedNullifiers.set(nullifier.toString(), {
336
- index: BigInt(dataStartIndexForTx + nullifierIndex),
337
- blockNumber: block.number,
338
- blockHash: block.hash().toString(),
332
+ async addNullifiers(blocks: L2Block[]): Promise<boolean> {
333
+ await Promise.all(
334
+ blocks.map(async block => {
335
+ const dataStartIndexForBlock =
336
+ block.header.state.partial.nullifierTree.nextAvailableLeafIndex -
337
+ block.body.txEffects.length * MAX_NULLIFIERS_PER_TX;
338
+ const blockHash = await block.hash();
339
+ block.body.txEffects.forEach((txEffects, txIndex) => {
340
+ const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NULLIFIERS_PER_TX;
341
+ txEffects.nullifiers.forEach((nullifier, nullifierIndex) => {
342
+ this.blockScopedNullifiers.set(nullifier.toString(), {
343
+ index: BigInt(dataStartIndexForTx + nullifierIndex),
344
+ blockNumber: block.number,
345
+ blockHash: blockHash.toString(),
346
+ });
339
347
  });
340
348
  });
341
- });
342
- });
349
+ }),
350
+ );
343
351
  return Promise.resolve(true);
344
352
  }
345
353
 
@@ -447,24 +455,22 @@ export class MemoryArchiverStore implements ArchiverDataStore {
447
455
  * @param txHash - The hash of a tx we try to get the receipt for.
448
456
  * @returns The requested tx receipt (or undefined if not found).
449
457
  */
450
- public getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
458
+ public async getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
451
459
  for (const block of this.l2Blocks) {
452
460
  for (const txEffect of block.data.body.txEffects) {
453
461
  if (txEffect.txHash.equals(txHash)) {
454
- return Promise.resolve(
455
- new TxReceipt(
456
- txHash,
457
- TxReceipt.statusFromRevertCode(txEffect.revertCode),
458
- '',
459
- txEffect.transactionFee.toBigInt(),
460
- L2BlockHash.fromField(block.data.hash()),
461
- block.data.number,
462
- ),
462
+ return new TxReceipt(
463
+ txHash,
464
+ TxReceipt.statusFromRevertCode(txEffect.revertCode),
465
+ '',
466
+ txEffect.transactionFee.toBigInt(),
467
+ L2BlockHash.fromField(await block.data.hash()),
468
+ block.data.number,
463
469
  );
464
470
  }
465
471
  }
466
472
  }
467
- return Promise.resolve(undefined);
473
+ return undefined;
468
474
  }
469
475
 
470
476
  /**
@@ -518,12 +524,12 @@ export class MemoryArchiverStore implements ArchiverDataStore {
518
524
  }
519
525
 
520
526
  /**
521
- * Gets unencrypted logs based on the provided filter.
527
+ * Gets public logs based on the provided filter.
522
528
  * @param filter - The filter to apply to the logs.
523
529
  * @returns The requested logs.
524
530
  * @remarks Works by doing an intersection of all params in the filter.
525
531
  */
526
- getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
532
+ getPublicLogs(filter: LogFilter): Promise<GetPublicLogsResponse> {
527
533
  let txHash: TxHash | undefined;
528
534
  let fromBlock = 0;
529
535
  let toBlock = this.l2Blocks.length + INITIAL_L2_BLOCK_NUM;
@@ -564,34 +570,34 @@ export class MemoryArchiverStore implements ArchiverDataStore {
564
570
 
565
571
  const contractAddress = filter.contractAddress;
566
572
 
567
- const logs: ExtendedUnencryptedL2Log[] = [];
573
+ const logs: ExtendedPublicLog[] = [];
568
574
 
569
575
  for (; fromBlock < toBlock; fromBlock++) {
570
576
  const block = this.l2Blocks[fromBlock - INITIAL_L2_BLOCK_NUM];
571
- const blockLogs = this.unencryptedLogsPerBlock.get(fromBlock);
577
+ const blockLogs = this.publicLogsPerBlock.get(fromBlock);
572
578
 
573
579
  if (blockLogs) {
574
- for (; txIndexInBlock < blockLogs.txLogs.length; txIndexInBlock++) {
575
- const txLogs = blockLogs.txLogs[txIndexInBlock].unrollLogs();
576
- for (; logIndexInTx < txLogs.length; logIndexInTx++) {
577
- const log = txLogs[logIndexInTx];
578
- if (
579
- (!txHash || block.data.body.txEffects[txIndexInBlock].txHash.equals(txHash)) &&
580
- (!contractAddress || log.contractAddress.equals(contractAddress))
581
- ) {
582
- logs.push(new ExtendedUnencryptedL2Log(new LogId(block.data.number, txIndexInBlock, logIndexInTx), log));
583
- if (logs.length === this.maxLogs) {
584
- return Promise.resolve({
585
- logs,
586
- maxLogsHit: true,
587
- });
588
- }
580
+ for (let logIndex = 0; logIndex < blockLogs.length; logIndex++) {
581
+ const log = blockLogs[logIndex];
582
+ const thisTxEffect = block.data.body.txEffects.filter(effect => effect.publicLogs.includes(log))[0];
583
+ const thisTxIndexInBlock = block.data.body.txEffects.indexOf(thisTxEffect);
584
+ const thisLogIndexInTx = thisTxEffect.publicLogs.indexOf(log);
585
+ if (
586
+ (!txHash || thisTxEffect.txHash.equals(txHash)) &&
587
+ (!contractAddress || log.contractAddress.equals(contractAddress)) &&
588
+ thisTxIndexInBlock >= txIndexInBlock &&
589
+ thisLogIndexInTx >= logIndexInTx
590
+ ) {
591
+ logs.push(new ExtendedPublicLog(new LogId(block.data.number, thisTxIndexInBlock, thisLogIndexInTx), log));
592
+ if (logs.length === this.maxLogs) {
593
+ return Promise.resolve({
594
+ logs,
595
+ maxLogsHit: true,
596
+ });
589
597
  }
590
598
  }
591
- logIndexInTx = 0;
592
599
  }
593
600
  }
594
- txIndexInBlock = 0;
595
601
  }
596
602
 
597
603
  return Promise.resolve({
@@ -607,7 +613,7 @@ export class MemoryArchiverStore implements ArchiverDataStore {
607
613
  * @returns The requested logs.
608
614
  * @remarks Works by doing an intersection of all params in the filter.
609
615
  */
610
- getContractClassLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
616
+ getContractClassLogs(filter: LogFilter): Promise<GetContractClassLogsResponse> {
611
617
  let txHash: TxHash | undefined;
612
618
  let fromBlock = 0;
613
619
  let toBlock = this.l2Blocks.length + INITIAL_L2_BLOCK_NUM;
@@ -734,20 +740,18 @@ export class MemoryArchiverStore implements ArchiverDataStore {
734
740
  return Promise.resolve(this.functionNames.get(selector.toString()));
735
741
  }
736
742
 
737
- public registerContractFunctionSignatures(_address: AztecAddress, signatures: string[]): Promise<void> {
743
+ public async registerContractFunctionSignatures(_address: AztecAddress, signatures: string[]): Promise<void> {
738
744
  for (const sig of signatures) {
739
745
  try {
740
- const selector = FunctionSelector.fromSignature(sig);
746
+ const selector = await FunctionSelector.fromSignature(sig);
741
747
  this.functionNames.set(selector.toString(), sig.slice(0, sig.indexOf('(')));
742
748
  } catch {
743
749
  this.#log.warn(`Failed to parse signature: ${sig}. Ignoring`);
744
750
  }
745
751
  }
746
-
747
- return Promise.resolve();
748
752
  }
749
753
 
750
- public estimateSize(): { mappingSize: number; actualSize: number; numItems: number } {
751
- return { mappingSize: 0, actualSize: 0, numItems: 0 };
754
+ public estimateSize(): Promise<{ mappingSize: number; actualSize: number; numItems: number }> {
755
+ return Promise.resolve({ mappingSize: 0, actualSize: 0, numItems: 0 });
752
756
  }
753
757
  }
package/src/factory.ts CHANGED
@@ -9,7 +9,7 @@ import { FunctionType, decodeFunctionSignature } from '@aztec/foundation/abi';
9
9
  import { createLogger } from '@aztec/foundation/log';
10
10
  import { type Maybe } from '@aztec/foundation/types';
11
11
  import { type DataStoreConfig } from '@aztec/kv-store/config';
12
- import { createStore } from '@aztec/kv-store/lmdb';
12
+ import { createStore } from '@aztec/kv-store/lmdb-v2';
13
13
  import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token';
14
14
  import { TokenBridgeContractArtifact } from '@aztec/noir-contracts.js/TokenBridge';
15
15
  import { protocolContractNames } from '@aztec/protocol-contracts';
@@ -41,7 +41,7 @@ export async function createArchiver(
41
41
  async function registerProtocolContracts(store: KVArchiverDataStore) {
42
42
  const blockNumber = 0;
43
43
  for (const name of protocolContractNames) {
44
- const contract = getCanonicalProtocolContract(name);
44
+ const contract = await getCanonicalProtocolContract(name);
45
45
  const contractClassPublic: ContractClassPublic = {
46
46
  ...contract.contractClass,
47
47
  privateFunctions: [],
@@ -53,7 +53,7 @@ async function registerProtocolContracts(store: KVArchiverDataStore) {
53
53
  .map(fn => decodeFunctionSignature(fn.name, fn.parameters));
54
54
 
55
55
  await store.registerContractFunctionSignatures(contract.address, publicFunctionSignatures);
56
- const bytecodeCommitment = computePublicBytecodeCommitment(contractClassPublic.packedBytecode);
56
+ const bytecodeCommitment = await computePublicBytecodeCommitment(contractClassPublic.packedBytecode);
57
57
  await store.addContractClasses([contractClassPublic], [bytecodeCommitment], blockNumber);
58
58
  await store.addContractInstances([contract.instance], blockNumber);
59
59
  }
@@ -67,11 +67,13 @@ async function registerProtocolContracts(store: KVArchiverDataStore) {
67
67
  async function registerCommonContracts(store: KVArchiverDataStore) {
68
68
  const blockNumber = 0;
69
69
  const artifacts = [TokenBridgeContractArtifact, TokenContractArtifact];
70
- const classes = artifacts.map(artifact => ({
71
- ...getContractClassFromArtifact(artifact),
72
- privateFunctions: [],
73
- unconstrainedFunctions: [],
74
- }));
75
- const bytecodeCommitments = classes.map(x => computePublicBytecodeCommitment(x.packedBytecode));
70
+ const classes = await Promise.all(
71
+ artifacts.map(async artifact => ({
72
+ ...(await getContractClassFromArtifact(artifact)),
73
+ privateFunctions: [],
74
+ unconstrainedFunctions: [],
75
+ })),
76
+ );
77
+ const bytecodeCommitments = await Promise.all(classes.map(x => computePublicBytecodeCommitment(x.packedBytecode)));
76
78
  await store.addContractClasses(classes, bytecodeCommitments, blockNumber);
77
79
  }
package/src/rpc/index.ts CHANGED
@@ -1,11 +1,11 @@
1
1
  import { type ArchiverApi, ArchiverApiSchema } from '@aztec/circuit-types';
2
- import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client';
3
- import { createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server';
2
+ import { createSafeJsonRpcClient } from '@aztec/foundation/json-rpc/client';
3
+ import { createTracedJsonRpcServer, makeTracedFetch } from '@aztec/telemetry-client';
4
4
 
5
- export function createArchiverClient(url: string, fetch = makeFetch([1, 2, 3], true)): ArchiverApi {
5
+ export function createArchiverClient(url: string, fetch = makeTracedFetch([1, 2, 3], true)): ArchiverApi {
6
6
  return createSafeJsonRpcClient<ArchiverApi>(url, ArchiverApiSchema, false, 'archiver', fetch);
7
7
  }
8
8
 
9
9
  export function createArchiverRpcServer(handler: ArchiverApi) {
10
- return createSafeJsonRpcServer(handler, ArchiverApiSchema);
10
+ return createTracedJsonRpcServer(handler, ArchiverApiSchema);
11
11
  }
@@ -51,5 +51,6 @@ export class MockPrefilledArchiver extends MockArchiver {
51
51
 
52
52
  const fromBlock = this.l2Blocks.length;
53
53
  this.addBlocks(this.precomputed.slice(fromBlock, fromBlock + numBlocks));
54
+ return Promise.resolve();
54
55
  }
55
56
  }
@@ -24,10 +24,10 @@ export class MockL2BlockSource implements L2BlockSource {
24
24
 
25
25
  private log = createLogger('archiver:mock_l2_block_source');
26
26
 
27
- public createBlocks(numBlocks: number) {
27
+ public async createBlocks(numBlocks: number) {
28
28
  for (let i = 0; i < numBlocks; i++) {
29
29
  const blockNum = this.l2Blocks.length + 1;
30
- const block = L2Block.random(blockNum);
30
+ const block = await L2Block.random(blockNum);
31
31
  this.l2Blocks.push(block);
32
32
  }
33
33
 
@@ -126,7 +126,7 @@ export class MockL2BlockSource implements L2BlockSource {
126
126
  * @param txHash - The hash of a transaction which resulted in the returned tx effect.
127
127
  * @returns The requested tx effect.
128
128
  */
129
- public getTxEffect(txHash: TxHash) {
129
+ public async getTxEffect(txHash: TxHash) {
130
130
  const match = this.l2Blocks
131
131
  .flatMap(b => b.body.txEffects.map(tx => [tx, b] as const))
132
132
  .find(([tx]) => tx.txHash.equals(txHash));
@@ -134,7 +134,7 @@ export class MockL2BlockSource implements L2BlockSource {
134
134
  return Promise.resolve(undefined);
135
135
  }
136
136
  const [txEffect, block] = match;
137
- return Promise.resolve({ data: txEffect, l2BlockNumber: block.number, l2BlockHash: block.hash().toString() });
137
+ return { data: txEffect, l2BlockNumber: block.number, l2BlockHash: (await block.hash()).toString() };
138
138
  }
139
139
 
140
140
  /**
@@ -142,24 +142,22 @@ export class MockL2BlockSource implements L2BlockSource {
142
142
  * @param txHash - The hash of a tx we try to get the receipt for.
143
143
  * @returns The requested tx receipt (or undefined if not found).
144
144
  */
145
- public getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
145
+ public async getSettledTxReceipt(txHash: TxHash): Promise<TxReceipt | undefined> {
146
146
  for (const block of this.l2Blocks) {
147
147
  for (const txEffect of block.body.txEffects) {
148
148
  if (txEffect.txHash.equals(txHash)) {
149
- return Promise.resolve(
150
- new TxReceipt(
151
- txHash,
152
- TxStatus.SUCCESS,
153
- '',
154
- txEffect.transactionFee.toBigInt(),
155
- L2BlockHash.fromField(block.hash()),
156
- block.number,
157
- ),
149
+ return new TxReceipt(
150
+ txHash,
151
+ TxStatus.SUCCESS,
152
+ '',
153
+ txEffect.transactionFee.toBigInt(),
154
+ L2BlockHash.fromField(await block.hash()),
155
+ block.number,
158
156
  );
159
157
  }
160
158
  }
161
159
  }
162
- return Promise.resolve(undefined);
160
+ return undefined;
163
161
  }
164
162
 
165
163
  async getL2Tips(): Promise<L2Tips> {
@@ -169,10 +167,14 @@ export class MockL2BlockSource implements L2BlockSource {
169
167
  await this.getProvenBlockNumber(),
170
168
  ] as const;
171
169
 
170
+ const latestBlock = this.l2Blocks[latest - 1];
171
+ const provenBlock = this.l2Blocks[proven - 1];
172
+ const finalizedBlock = this.l2Blocks[finalized - 1];
173
+
172
174
  return {
173
- latest: { number: latest, hash: this.l2Blocks[latest - 1]?.hash().toString() },
174
- proven: { number: proven, hash: this.l2Blocks[proven - 1]?.hash().toString() },
175
- finalized: { number: finalized, hash: this.l2Blocks[finalized - 1]?.hash().toString() },
175
+ latest: { number: latest, hash: (await latestBlock?.hash())?.toString() },
176
+ proven: { number: proven, hash: (await provenBlock?.hash())?.toString() },
177
+ finalized: { number: finalized, hash: (await finalizedBlock?.hash())?.toString() },
176
178
  };
177
179
  }
178
180