@aztec/archiver 0.61.0 → 0.63.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dest/archiver/archiver.d.ts +13 -5
  2. package/dest/archiver/archiver.d.ts.map +1 -1
  3. package/dest/archiver/archiver.js +49 -26
  4. package/dest/archiver/archiver_store.d.ts +10 -5
  5. package/dest/archiver/archiver_store.d.ts.map +1 -1
  6. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.js +64 -36
  8. package/dest/archiver/config.d.ts +2 -6
  9. package/dest/archiver/config.d.ts.map +1 -1
  10. package/dest/archiver/config.js +3 -6
  11. package/dest/archiver/epoch_helpers.d.ts +10 -10
  12. package/dest/archiver/epoch_helpers.d.ts.map +1 -1
  13. package/dest/archiver/epoch_helpers.js +9 -10
  14. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +10 -5
  15. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  16. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +17 -5
  17. package/dest/archiver/kv_archiver_store/log_store.d.ts +8 -2
  18. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  19. package/dest/archiver/kv_archiver_store/log_store.js +144 -57
  20. package/dest/archiver/kv_archiver_store/message_store.d.ts +2 -3
  21. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  22. package/dest/archiver/kv_archiver_store/message_store.js +7 -12
  23. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts +2 -3
  24. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts.map +1 -1
  25. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.js +4 -9
  26. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +15 -7
  27. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -1
  28. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +139 -35
  29. package/dest/factory.d.ts +5 -2
  30. package/dest/factory.d.ts.map +1 -1
  31. package/dest/factory.js +2 -2
  32. package/dest/rpc/index.d.ts +3 -2
  33. package/dest/rpc/index.d.ts.map +1 -1
  34. package/dest/rpc/index.js +10 -3
  35. package/dest/test/mock_archiver.d.ts +1 -1
  36. package/dest/test/mock_archiver.d.ts.map +1 -1
  37. package/dest/test/mock_archiver.js +3 -3
  38. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  39. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  40. package/dest/test/mock_l1_to_l2_message_source.js +2 -2
  41. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  42. package/dest/test/mock_l2_block_source.js +6 -4
  43. package/package.json +10 -10
  44. package/src/archiver/archiver.ts +60 -31
  45. package/src/archiver/archiver_store.ts +11 -5
  46. package/src/archiver/archiver_store_test_suite.ts +85 -40
  47. package/src/archiver/config.ts +11 -12
  48. package/src/archiver/epoch_helpers.ts +16 -12
  49. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +18 -6
  50. package/src/archiver/kv_archiver_store/log_store.ts +176 -58
  51. package/src/archiver/kv_archiver_store/message_store.ts +6 -12
  52. package/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts +3 -8
  53. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +155 -31
  54. package/src/factory.ts +6 -3
  55. package/src/rpc/index.ts +11 -2
  56. package/src/test/mock_archiver.ts +2 -2
  57. package/src/test/mock_l1_to_l2_message_source.ts +1 -1
  58. package/src/test/mock_l2_block_source.ts +5 -3
  59. package/dest/rpc/archiver_client.d.ts +0 -3
  60. package/dest/rpc/archiver_client.d.ts.map +0 -1
  61. package/dest/rpc/archiver_client.js +0 -12
  62. package/dest/rpc/archiver_server.d.ts +0 -9
  63. package/dest/rpc/archiver_server.d.ts.map +0 -1
  64. package/dest/rpc/archiver_server.js +0 -20
  65. package/src/rpc/archiver_client.ts +0 -29
  66. package/src/rpc/archiver_server.ts +0 -35
@@ -1,26 +1,30 @@
1
- import { AZTEC_EPOCH_DURATION, AZTEC_SLOT_DURATION } from '@aztec/circuits.js';
1
+ type TimeConstants = {
2
+ l1GenesisTime: bigint;
3
+ epochDuration: number;
4
+ slotDuration: number;
5
+ };
2
6
 
3
7
  /** Returns the slot number for a given timestamp. */
4
- export function getSlotAtTimestamp(ts: bigint, constants: { l1GenesisTime: bigint }) {
5
- return ts < constants.l1GenesisTime ? 0n : (ts - constants.l1GenesisTime) / BigInt(AZTEC_SLOT_DURATION);
8
+ export function getSlotAtTimestamp(ts: bigint, constants: Pick<TimeConstants, 'l1GenesisTime' | 'slotDuration'>) {
9
+ return ts < constants.l1GenesisTime ? 0n : (ts - constants.l1GenesisTime) / BigInt(constants.slotDuration);
6
10
  }
7
11
 
8
12
  /** Returns the epoch number for a given timestamp. */
9
- export function getEpochNumberAtTimestamp(ts: bigint, constants: { l1GenesisTime: bigint }) {
10
- return getSlotAtTimestamp(ts, constants) / BigInt(AZTEC_EPOCH_DURATION);
13
+ export function getEpochNumberAtTimestamp(ts: bigint, constants: TimeConstants) {
14
+ return getSlotAtTimestamp(ts, constants) / BigInt(constants.epochDuration);
11
15
  }
12
16
 
13
17
  /** Returns the range of slots (inclusive) for a given epoch number. */
14
- export function getSlotRangeForEpoch(epochNumber: bigint) {
15
- const startSlot = epochNumber * BigInt(AZTEC_EPOCH_DURATION);
16
- return [startSlot, startSlot + BigInt(AZTEC_EPOCH_DURATION) - 1n];
18
+ export function getSlotRangeForEpoch(epochNumber: bigint, constants: Pick<TimeConstants, 'epochDuration'>) {
19
+ const startSlot = epochNumber * BigInt(constants.epochDuration);
20
+ return [startSlot, startSlot + BigInt(constants.epochDuration) - 1n];
17
21
  }
18
22
 
19
23
  /** Returns the range of L1 timestamps (inclusive) for a given epoch number. */
20
- export function getTimestampRangeForEpoch(epochNumber: bigint, constants: { l1GenesisTime: bigint }) {
21
- const [startSlot, endSlot] = getSlotRangeForEpoch(epochNumber);
24
+ export function getTimestampRangeForEpoch(epochNumber: bigint, constants: TimeConstants) {
25
+ const [startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, constants);
22
26
  return [
23
- constants.l1GenesisTime + startSlot * BigInt(AZTEC_SLOT_DURATION),
24
- constants.l1GenesisTime + endSlot * BigInt(AZTEC_SLOT_DURATION),
27
+ constants.l1GenesisTime + startSlot * BigInt(constants.slotDuration),
28
+ constants.l1GenesisTime + endSlot * BigInt(constants.slotDuration),
25
29
  ];
26
30
  }
@@ -1,5 +1,4 @@
1
1
  import {
2
- type EncryptedL2NoteLog,
3
2
  type FromLogType,
4
3
  type GetUnencryptedLogsResponse,
5
4
  type InboxLeaf,
@@ -10,6 +9,7 @@ import {
10
9
  type TxEffect,
11
10
  type TxHash,
12
11
  type TxReceipt,
12
+ type TxScopedL2Log,
13
13
  } from '@aztec/circuit-types';
14
14
  import {
15
15
  type ContractClassPublic,
@@ -199,13 +199,12 @@ export class KVArchiverDataStore implements ArchiverDataStore {
199
199
  }
200
200
 
201
201
  /**
202
- * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`.
202
+ * Gets the L1 to L2 message index in the L1 to L2 message tree.
203
203
  * @param l1ToL2Message - The L1 to L2 message.
204
- * @param startIndex - The index to start searching from.
205
204
  * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
206
205
  */
207
- getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise<bigint | undefined> {
208
- return Promise.resolve(this.#messageStore.getL1ToL2MessageIndex(l1ToL2Message, startIndex));
206
+ getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined> {
207
+ return Promise.resolve(this.#messageStore.getL1ToL2MessageIndex(l1ToL2Message));
209
208
  }
210
209
 
211
210
  /**
@@ -246,7 +245,7 @@ export class KVArchiverDataStore implements ArchiverDataStore {
246
245
  * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
247
246
  * that tag.
248
247
  */
249
- getLogsByTags(tags: Fr[]): Promise<EncryptedL2NoteLog[][]> {
248
+ getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
250
249
  try {
251
250
  return this.#logStore.getLogsByTags(tags);
252
251
  } catch (err) {
@@ -267,6 +266,19 @@ export class KVArchiverDataStore implements ArchiverDataStore {
267
266
  }
268
267
  }
269
268
 
269
+ /**
270
+ * Gets contract class logs based on the provided filter.
271
+ * @param filter - The filter to apply to the logs.
272
+ * @returns The requested logs.
273
+ */
274
+ getContractClassLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
275
+ try {
276
+ return Promise.resolve(this.#logStore.getContractClassLogs(filter));
277
+ } catch (err) {
278
+ return Promise.reject(err);
279
+ }
280
+ }
281
+
270
282
  /**
271
283
  * Gets the number of the latest L2 block processed.
272
284
  * @returns The number of the latest L2 block processed.
@@ -1,6 +1,7 @@
1
1
  import {
2
+ type Body,
3
+ ContractClass2BlockL2Logs,
2
4
  EncryptedL2BlockL2Logs,
3
- EncryptedL2NoteLog,
4
5
  EncryptedNoteL2BlockL2Logs,
5
6
  ExtendedUnencryptedL2Log,
6
7
  type FromLogType,
@@ -10,13 +11,14 @@ import {
10
11
  type LogFilter,
11
12
  LogId,
12
13
  LogType,
14
+ TxScopedL2Log,
13
15
  UnencryptedL2BlockL2Logs,
14
16
  type UnencryptedL2Log,
15
17
  } from '@aztec/circuit-types';
16
18
  import { Fr } from '@aztec/circuits.js';
17
- import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js/constants';
19
+ import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX } from '@aztec/circuits.js/constants';
18
20
  import { createDebugLogger } from '@aztec/foundation/log';
19
- import { type AztecKVStore, type AztecMap, type AztecMultiMap } from '@aztec/kv-store';
21
+ import { type AztecKVStore, type AztecMap } from '@aztec/kv-store';
20
22
 
21
23
  import { type BlockStore } from './block_store.js';
22
24
 
@@ -25,58 +27,123 @@ import { type BlockStore } from './block_store.js';
25
27
  */
26
28
  export class LogStore {
27
29
  #noteEncryptedLogsByBlock: AztecMap<number, Buffer>;
28
- #noteEncryptedLogsByHash: AztecMap<string, Buffer>;
29
- #noteEncryptedLogHashesByTag: AztecMultiMap<string, string>;
30
- #noteEncryptedLogTagsByBlock: AztecMultiMap<number, string>;
30
+ #logsByTag: AztecMap<string, Buffer[]>;
31
+ #logTagsByBlock: AztecMap<number, string[]>;
31
32
  #encryptedLogsByBlock: AztecMap<number, Buffer>;
32
33
  #unencryptedLogsByBlock: AztecMap<number, Buffer>;
34
+ #contractClassLogsByBlock: AztecMap<number, Buffer>;
33
35
  #logsMaxPageSize: number;
34
36
  #log = createDebugLogger('aztec:archiver:log_store');
35
37
 
36
38
  constructor(private db: AztecKVStore, private blockStore: BlockStore, logsMaxPageSize: number = 1000) {
37
39
  this.#noteEncryptedLogsByBlock = db.openMap('archiver_note_encrypted_logs_by_block');
38
- this.#noteEncryptedLogsByHash = db.openMap('archiver_note_encrypted_logs_by_hash');
39
- this.#noteEncryptedLogHashesByTag = db.openMultiMap('archiver_tagged_note_encrypted_log_hashes_by_tag');
40
- this.#noteEncryptedLogTagsByBlock = db.openMultiMap('archiver_note_encrypted_log_tags_by_block');
40
+ this.#logsByTag = db.openMap('archiver_tagged_logs_by_tag');
41
+ this.#logTagsByBlock = db.openMap('archiver_log_tags_by_block');
41
42
  this.#encryptedLogsByBlock = db.openMap('archiver_encrypted_logs_by_block');
42
43
  this.#unencryptedLogsByBlock = db.openMap('archiver_unencrypted_logs_by_block');
44
+ this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
43
45
 
44
46
  this.#logsMaxPageSize = logsMaxPageSize;
45
47
  }
46
48
 
49
+ #extractTaggedLogs(block: L2Block, logType: keyof Pick<Body, 'noteEncryptedLogs' | 'unencryptedLogs'>) {
50
+ const taggedLogs = new Map<string, Buffer[]>();
51
+ const dataStartIndexForBlock =
52
+ block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
53
+ block.body.numberOfTxsIncludingPadded * MAX_NOTE_HASHES_PER_TX;
54
+ block.body[logType].txLogs.forEach((txLogs, txIndex) => {
55
+ const txHash = block.body.txEffects[txIndex].txHash;
56
+ const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
57
+ const logs = txLogs.unrollLogs();
58
+ logs.forEach(log => {
59
+ if (
60
+ (logType == 'noteEncryptedLogs' && log.data.length < 32) ||
61
+ // TODO remove when #9835 and #9836 are fixed
62
+ (logType === 'unencryptedLogs' && log.data.length < 32 * 33)
63
+ ) {
64
+ this.#log.warn(`Skipping log (${logType}) with invalid data length: ${log.data.length}`);
65
+ return;
66
+ }
67
+ try {
68
+ let tag = Fr.ZERO;
69
+ // TODO remove when #9835 and #9836 are fixed. The partial note logs are emitted as bytes, but encoded as Fields.
70
+ // This means that for every 32 bytes of payload, we only have 1 byte of data.
71
+ // Also, the tag is not stored in the first 32 bytes of the log, (that's the length of public fields now) but in the next 32.
72
+ if (logType === 'unencryptedLogs') {
73
+ const correctedBuffer = Buffer.alloc(32);
74
+ const initialOffset = 32;
75
+ for (let i = 0; i < 32; i++) {
76
+ const byte = Fr.fromBuffer(
77
+ log.data.subarray(i * 32 + initialOffset, i * 32 + 32 + initialOffset),
78
+ ).toNumber();
79
+ correctedBuffer.writeUInt8(byte, i);
80
+ }
81
+ tag = new Fr(correctedBuffer);
82
+ } else {
83
+ tag = new Fr(log.data.subarray(0, 32));
84
+ }
85
+ this.#log.verbose(`Found tagged (${logType}) log with tag ${tag.toString()} in block ${block.number}`);
86
+ const currentLogs = taggedLogs.get(tag.toString()) ?? [];
87
+ currentLogs.push(
88
+ new TxScopedL2Log(
89
+ txHash,
90
+ dataStartIndexForTx,
91
+ block.number,
92
+ logType === 'unencryptedLogs',
93
+ log.data,
94
+ ).toBuffer(),
95
+ );
96
+ taggedLogs.set(tag.toString(), currentLogs);
97
+ } catch (err) {
98
+ this.#log.warn(`Failed to add tagged log to store: ${err}`);
99
+ }
100
+ });
101
+ });
102
+ return taggedLogs;
103
+ }
104
+
47
105
  /**
48
106
  * Append new logs to the store's list.
49
107
  * @param blocks - The blocks for which to add the logs.
50
108
  * @returns True if the operation is successful.
51
109
  */
52
- addLogs(blocks: L2Block[]): Promise<boolean> {
110
+ async addLogs(blocks: L2Block[]): Promise<boolean> {
111
+ const taggedLogsToAdd = blocks
112
+ .flatMap(block => [
113
+ this.#extractTaggedLogs(block, 'noteEncryptedLogs'),
114
+ this.#extractTaggedLogs(block, 'unencryptedLogs'),
115
+ ])
116
+ .reduce((acc, val) => {
117
+ for (const [tag, logs] of val.entries()) {
118
+ const currentLogs = acc.get(tag) ?? [];
119
+ acc.set(tag, currentLogs.concat(logs));
120
+ }
121
+ return acc;
122
+ });
123
+ const tagsToUpdate = Array.from(taggedLogsToAdd.keys());
124
+ const currentTaggedLogs = await this.db.transaction(() =>
125
+ tagsToUpdate.map(tag => ({ tag, logBuffers: this.#logsByTag.get(tag) })),
126
+ );
127
+ currentTaggedLogs.forEach(taggedLogBuffer => {
128
+ if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
129
+ taggedLogsToAdd.set(
130
+ taggedLogBuffer.tag,
131
+ taggedLogBuffer.logBuffers!.concat(taggedLogsToAdd.get(taggedLogBuffer.tag)!),
132
+ );
133
+ }
134
+ });
53
135
  return this.db.transaction(() => {
54
136
  blocks.forEach(block => {
137
+ const tagsInBlock = [];
138
+ for (const [tag, logs] of taggedLogsToAdd.entries()) {
139
+ void this.#logsByTag.set(tag, logs);
140
+ tagsInBlock.push(tag);
141
+ }
142
+ void this.#logTagsByBlock.set(block.number, tagsInBlock);
55
143
  void this.#noteEncryptedLogsByBlock.set(block.number, block.body.noteEncryptedLogs.toBuffer());
56
- block.body.noteEncryptedLogs.txLogs.forEach(txLogs => {
57
- const noteLogs = txLogs.unrollLogs();
58
- noteLogs.forEach(noteLog => {
59
- if (noteLog.data.length < 32) {
60
- this.#log.warn(`Skipping note log with invalid data length: ${noteLog.data.length}`);
61
- return;
62
- }
63
- try {
64
- const tag = new Fr(noteLog.data.subarray(0, 32));
65
- const hexHash = noteLog.hash().toString('hex');
66
- // Ideally we'd store all of the logs for a matching tag in an AztecMultiMap, but this type doesn't doesn't
67
- // handle storing buffers well. The 'ordered-binary' encoding returns an error trying to decode buffers
68
- // ('the number <> cannot be converted to a BigInt because it is not an integer'). We therefore store
69
- // instead the hashes of the logs.
70
- void this.#noteEncryptedLogHashesByTag.set(tag.toString(), hexHash);
71
- void this.#noteEncryptedLogsByHash.set(hexHash, noteLog.toBuffer());
72
- void this.#noteEncryptedLogTagsByBlock.set(block.number, tag.toString());
73
- } catch (err) {
74
- this.#log.warn(`Failed to add tagged note log to store: ${err}`);
75
- }
76
- });
77
- });
78
144
  void this.#encryptedLogsByBlock.set(block.number, block.body.encryptedLogs.toBuffer());
79
145
  void this.#unencryptedLogsByBlock.set(block.number, block.body.unencryptedLogs.toBuffer());
146
+ void this.#contractClassLogsByBlock.set(block.number, block.body.contractClassLogs.toBuffer());
80
147
  });
81
148
 
82
149
  return true;
@@ -84,26 +151,19 @@ export class LogStore {
84
151
  }
85
152
 
86
153
  async deleteLogs(blocks: L2Block[]): Promise<boolean> {
87
- const noteTagsToDelete = await this.db.transaction(() => {
88
- return blocks.flatMap(block => Array.from(this.#noteEncryptedLogTagsByBlock.getValues(block.number)));
89
- });
90
- const noteLogHashesToDelete = await this.db.transaction(() => {
91
- return noteTagsToDelete.flatMap(tag => Array.from(this.#noteEncryptedLogHashesByTag.getValues(tag)));
154
+ const tagsToDelete = await this.db.transaction(() => {
155
+ return blocks.flatMap(block => this.#logTagsByBlock.get(block.number)?.map(tag => tag.toString()) ?? []);
92
156
  });
93
157
  return this.db.transaction(() => {
94
158
  blocks.forEach(block => {
95
159
  void this.#noteEncryptedLogsByBlock.delete(block.number);
96
160
  void this.#encryptedLogsByBlock.delete(block.number);
97
161
  void this.#unencryptedLogsByBlock.delete(block.number);
98
- void this.#noteEncryptedLogTagsByBlock.delete(block.number);
99
- });
100
-
101
- noteTagsToDelete.forEach(tag => {
102
- void this.#noteEncryptedLogHashesByTag.delete(tag.toString());
162
+ void this.#logTagsByBlock.delete(block.number);
103
163
  });
104
164
 
105
- noteLogHashesToDelete.forEach(hash => {
106
- void this.#noteEncryptedLogsByHash.delete(hash);
165
+ tagsToDelete.forEach(tag => {
166
+ void this.#logsByTag.delete(tag.toString());
107
167
  });
108
168
 
109
169
  return true;
@@ -156,20 +216,12 @@ export class LogStore {
156
216
  * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
157
217
  * that tag.
158
218
  */
159
- getLogsByTags(tags: Fr[]): Promise<EncryptedL2NoteLog[][]> {
160
- return this.db.transaction(() => {
161
- return tags.map(tag => {
162
- const logHashes = Array.from(this.#noteEncryptedLogHashesByTag.getValues(tag.toString()));
163
- return (
164
- logHashes
165
- .map(hash => this.#noteEncryptedLogsByHash.get(hash))
166
- // addLogs should ensure that we never have undefined logs, but we filter them out regardless to protect
167
- // ourselves from database corruption
168
- .filter(noteLogBuffer => noteLogBuffer != undefined)
169
- .map(noteLogBuffer => EncryptedL2NoteLog.fromBuffer(noteLogBuffer!))
170
- );
171
- });
172
- });
219
+ getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
220
+ return this.db.transaction(() =>
221
+ tags
222
+ .map(tag => this.#logsByTag.get(tag.toString()))
223
+ .map(noteLogBuffers => noteLogBuffers?.map(noteLogBuffer => TxScopedL2Log.fromBuffer(noteLogBuffer)) ?? []),
224
+ );
173
225
  }
174
226
 
175
227
  /**
@@ -236,6 +288,72 @@ export class LogStore {
236
288
  return { logs, maxLogsHit };
237
289
  }
238
290
 
291
+ /**
292
+ * Gets contract class logs based on the provided filter.
293
+ * @param filter - The filter to apply to the logs.
294
+ * @returns The requested logs.
295
+ */
296
+ getContractClassLogs(filter: LogFilter): GetUnencryptedLogsResponse {
297
+ if (filter.afterLog) {
298
+ return this.#filterContractClassLogsBetweenBlocks(filter);
299
+ } else if (filter.txHash) {
300
+ return this.#filterContractClassLogsOfTx(filter);
301
+ } else {
302
+ return this.#filterContractClassLogsBetweenBlocks(filter);
303
+ }
304
+ }
305
+
306
+ #filterContractClassLogsOfTx(filter: LogFilter): GetUnencryptedLogsResponse {
307
+ if (!filter.txHash) {
308
+ throw new Error('Missing txHash');
309
+ }
310
+
311
+ const [blockNumber, txIndex] = this.blockStore.getTxLocation(filter.txHash) ?? [];
312
+ if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
313
+ return { logs: [], maxLogsHit: false };
314
+ }
315
+ const contractClassLogsBuffer = this.#contractClassLogsByBlock.get(blockNumber);
316
+ const contractClassLogsInBlock = contractClassLogsBuffer
317
+ ? ContractClass2BlockL2Logs.fromBuffer(contractClassLogsBuffer)
318
+ : new ContractClass2BlockL2Logs([]);
319
+ const txLogs = contractClassLogsInBlock.txLogs[txIndex].unrollLogs();
320
+
321
+ const logs: ExtendedUnencryptedL2Log[] = [];
322
+ const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
323
+
324
+ return { logs, maxLogsHit };
325
+ }
326
+
327
+ #filterContractClassLogsBetweenBlocks(filter: LogFilter): GetUnencryptedLogsResponse {
328
+ const start =
329
+ filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
330
+ const end = filter.toBlock;
331
+
332
+ if (typeof end === 'number' && end < start) {
333
+ return {
334
+ logs: [],
335
+ maxLogsHit: true,
336
+ };
337
+ }
338
+
339
+ const logs: ExtendedUnencryptedL2Log[] = [];
340
+
341
+ let maxLogsHit = false;
342
+ loopOverBlocks: for (const [blockNumber, logBuffer] of this.#contractClassLogsByBlock.entries({ start, end })) {
343
+ const contractClassLogsInBlock = ContractClass2BlockL2Logs.fromBuffer(logBuffer);
344
+ for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.txLogs.length; txIndex++) {
345
+ const txLogs = contractClassLogsInBlock.txLogs[txIndex].unrollLogs();
346
+ maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
347
+ if (maxLogsHit) {
348
+ this.#log.debug(`Max logs hit at block ${blockNumber}`);
349
+ break loopOverBlocks;
350
+ }
351
+ }
352
+ }
353
+
354
+ return { logs, maxLogsHit };
355
+ }
356
+
239
357
  #accumulateLogs(
240
358
  results: ExtendedUnencryptedL2Log[],
241
359
  blockNumber: number,
@@ -10,7 +10,7 @@ import { type DataRetrieval } from '../structs/data_retrieval.js';
10
10
  */
11
11
  export class MessageStore {
12
12
  #l1ToL2Messages: AztecMap<string, Buffer>;
13
- #l1ToL2MessageIndices: AztecMap<string, bigint[]>; // We store array of bigints here because there can be duplicate messages
13
+ #l1ToL2MessageIndices: AztecMap<string, bigint>;
14
14
  #lastSynchedL1Block: AztecSingleton<bigint>;
15
15
  #totalMessageCount: AztecSingleton<bigint>;
16
16
 
@@ -57,11 +57,8 @@ export class MessageStore {
57
57
 
58
58
  for (const message of messages.retrievedData) {
59
59
  const key = `${message.index}`;
60
- void this.#l1ToL2Messages.setIfNotExists(key, message.leaf.toBuffer());
61
-
62
- const indices = this.#l1ToL2MessageIndices.get(message.leaf.toString()) ?? [];
63
- indices.push(message.index);
64
- void this.#l1ToL2MessageIndices.set(message.leaf.toString(), indices);
60
+ void this.#l1ToL2Messages.set(key, message.leaf.toBuffer());
61
+ void this.#l1ToL2MessageIndices.set(message.leaf.toString(), message.index);
65
62
  }
66
63
 
67
64
  const lastTotalMessageCount = this.getTotalL1ToL2MessageCount();
@@ -72,15 +69,12 @@ export class MessageStore {
72
69
  }
73
70
 
74
71
  /**
75
- * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`.
72
+ * Gets the L1 to L2 message index in the L1 to L2 message tree.
76
73
  * @param l1ToL2Message - The L1 to L2 message.
77
- * @param startIndex - The index to start searching from.
78
74
  * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
79
75
  */
80
- getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise<bigint | undefined> {
81
- const indices = this.#l1ToL2MessageIndices.get(l1ToL2Message.toString()) ?? [];
82
- const index = indices.find(i => i >= startIndex);
83
- return Promise.resolve(index);
76
+ getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined> {
77
+ return Promise.resolve(this.#l1ToL2MessageIndices.get(l1ToL2Message.toString()));
84
78
  }
85
79
 
86
80
  getL1ToL2Messages(blockNumber: bigint): Fr[] {
@@ -46,19 +46,14 @@ export class L1ToL2MessageStore {
46
46
  }
47
47
 
48
48
  /**
49
- * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`.
49
+ * Gets the L1 to L2 message index in the L1 to L2 message tree.
50
50
  * @param l1ToL2Message - The L1 to L2 message.
51
- * @param startIndex - The index to start searching from.
52
51
  * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
53
52
  */
54
- getMessageIndex(l1ToL2Message: Fr, startIndex: bigint): bigint | undefined {
53
+ getMessageIndex(l1ToL2Message: Fr): bigint | undefined {
55
54
  for (const [key, message] of this.store.entries()) {
56
55
  if (message.equals(l1ToL2Message)) {
57
- const indexInTheWholeTree = BigInt(key);
58
- if (indexInTheWholeTree < startIndex) {
59
- continue;
60
- }
61
- return indexInTheWholeTree;
56
+ return BigInt(key);
62
57
  }
63
58
  }
64
59
  return undefined;