@aztec/archiver 0.0.0-test.1 → 0.0.1-commit.03f7ef2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. package/README.md +27 -6
  2. package/dest/archiver/archiver.d.ts +201 -94
  3. package/dest/archiver/archiver.d.ts.map +1 -1
  4. package/dest/archiver/archiver.js +1141 -396
  5. package/dest/archiver/archiver_store.d.ts +171 -83
  6. package/dest/archiver/archiver_store.d.ts.map +1 -1
  7. package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
  8. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
  9. package/dest/archiver/archiver_store_test_suite.js +2389 -393
  10. package/dest/archiver/config.d.ts +7 -22
  11. package/dest/archiver/config.d.ts.map +1 -1
  12. package/dest/archiver/config.js +30 -14
  13. package/dest/archiver/errors.d.ts +33 -1
  14. package/dest/archiver/errors.d.ts.map +1 -1
  15. package/dest/archiver/errors.js +49 -0
  16. package/dest/archiver/index.d.ts +3 -4
  17. package/dest/archiver/index.d.ts.map +1 -1
  18. package/dest/archiver/index.js +1 -2
  19. package/dest/archiver/instrumentation.d.ts +14 -6
  20. package/dest/archiver/instrumentation.d.ts.map +1 -1
  21. package/dest/archiver/instrumentation.js +69 -17
  22. package/dest/archiver/kv_archiver_store/block_store.d.ts +91 -21
  23. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
  24. package/dest/archiver/kv_archiver_store/block_store.js +476 -86
  25. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +4 -4
  26. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
  27. package/dest/archiver/kv_archiver_store/contract_class_store.js +13 -19
  28. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +12 -9
  29. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
  30. package/dest/archiver/kv_archiver_store/contract_instance_store.js +30 -16
  31. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +80 -75
  32. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
  33. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +142 -83
  34. package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
  35. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
  36. package/dest/archiver/kv_archiver_store/log_store.js +153 -113
  37. package/dest/archiver/kv_archiver_store/message_store.d.ts +25 -18
  38. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
  39. package/dest/archiver/kv_archiver_store/message_store.js +152 -49
  40. package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
  41. package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
  42. package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
  43. package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
  44. package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
  45. package/dest/archiver/l1/calldata_retriever.js +471 -0
  46. package/dest/archiver/l1/data_retrieval.d.ts +90 -0
  47. package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
  48. package/dest/archiver/l1/data_retrieval.js +331 -0
  49. package/dest/archiver/l1/debug_tx.d.ts +19 -0
  50. package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
  51. package/dest/archiver/l1/debug_tx.js +73 -0
  52. package/dest/archiver/l1/spire_proposer.d.ts +70 -0
  53. package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
  54. package/dest/archiver/l1/spire_proposer.js +157 -0
  55. package/dest/archiver/l1/trace_tx.d.ts +97 -0
  56. package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
  57. package/dest/archiver/l1/trace_tx.js +91 -0
  58. package/dest/archiver/l1/types.d.ts +12 -0
  59. package/dest/archiver/l1/types.d.ts.map +1 -0
  60. package/dest/archiver/l1/types.js +3 -0
  61. package/dest/archiver/l1/validate_trace.d.ts +29 -0
  62. package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
  63. package/dest/archiver/l1/validate_trace.js +150 -0
  64. package/dest/archiver/structs/data_retrieval.d.ts +1 -1
  65. package/dest/archiver/structs/inbox_message.d.ts +15 -0
  66. package/dest/archiver/structs/inbox_message.d.ts.map +1 -0
  67. package/dest/archiver/structs/inbox_message.js +39 -0
  68. package/dest/archiver/structs/published.d.ts +2 -11
  69. package/dest/archiver/structs/published.d.ts.map +1 -1
  70. package/dest/archiver/structs/published.js +1 -1
  71. package/dest/archiver/validation.d.ts +17 -0
  72. package/dest/archiver/validation.d.ts.map +1 -0
  73. package/dest/archiver/validation.js +98 -0
  74. package/dest/factory.d.ts +9 -14
  75. package/dest/factory.d.ts.map +1 -1
  76. package/dest/factory.js +22 -52
  77. package/dest/index.d.ts +2 -2
  78. package/dest/index.d.ts.map +1 -1
  79. package/dest/index.js +1 -1
  80. package/dest/rpc/index.d.ts +2 -3
  81. package/dest/rpc/index.d.ts.map +1 -1
  82. package/dest/rpc/index.js +1 -4
  83. package/dest/test/index.d.ts +1 -1
  84. package/dest/test/mock_archiver.d.ts +16 -8
  85. package/dest/test/mock_archiver.d.ts.map +1 -1
  86. package/dest/test/mock_archiver.js +19 -14
  87. package/dest/test/mock_l1_to_l2_message_source.d.ts +9 -6
  88. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  89. package/dest/test/mock_l1_to_l2_message_source.js +21 -7
  90. package/dest/test/mock_l2_block_source.d.ts +52 -13
  91. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  92. package/dest/test/mock_l2_block_source.js +140 -15
  93. package/dest/test/mock_structs.d.ts +10 -0
  94. package/dest/test/mock_structs.d.ts.map +1 -0
  95. package/dest/test/mock_structs.js +38 -0
  96. package/package.json +29 -30
  97. package/src/archiver/archiver.ts +1477 -501
  98. package/src/archiver/archiver_store.ts +197 -88
  99. package/src/archiver/archiver_store_test_suite.ts +2403 -350
  100. package/src/archiver/config.ts +38 -46
  101. package/src/archiver/errors.ts +85 -0
  102. package/src/archiver/index.ts +2 -3
  103. package/src/archiver/instrumentation.ts +91 -22
  104. package/src/archiver/kv_archiver_store/block_store.ts +640 -101
  105. package/src/archiver/kv_archiver_store/contract_class_store.ts +14 -24
  106. package/src/archiver/kv_archiver_store/contract_instance_store.ts +36 -28
  107. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +193 -113
  108. package/src/archiver/kv_archiver_store/log_store.ts +205 -127
  109. package/src/archiver/kv_archiver_store/message_store.ts +213 -54
  110. package/src/archiver/l1/README.md +98 -0
  111. package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
  112. package/src/archiver/l1/calldata_retriever.ts +641 -0
  113. package/src/archiver/l1/data_retrieval.ts +512 -0
  114. package/src/archiver/l1/debug_tx.ts +99 -0
  115. package/src/archiver/l1/spire_proposer.ts +160 -0
  116. package/src/archiver/l1/trace_tx.ts +128 -0
  117. package/src/archiver/l1/types.ts +13 -0
  118. package/src/archiver/l1/validate_trace.ts +211 -0
  119. package/src/archiver/structs/inbox_message.ts +41 -0
  120. package/src/archiver/structs/published.ts +1 -11
  121. package/src/archiver/validation.ts +124 -0
  122. package/src/factory.ts +28 -69
  123. package/src/index.ts +1 -1
  124. package/src/rpc/index.ts +1 -5
  125. package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
  126. package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
  127. package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
  128. package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
  129. package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
  130. package/src/test/fixtures/trace_transaction-proxied.json +128 -0
  131. package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
  132. package/src/test/mock_archiver.ts +22 -16
  133. package/src/test/mock_l1_to_l2_message_source.ts +20 -8
  134. package/src/test/mock_l2_block_source.ts +186 -21
  135. package/src/test/mock_structs.ts +50 -0
  136. package/dest/archiver/data_retrieval.d.ts +0 -74
  137. package/dest/archiver/data_retrieval.d.ts.map +0 -1
  138. package/dest/archiver/data_retrieval.js +0 -283
  139. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts +0 -12
  140. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts.map +0 -1
  141. package/dest/archiver/kv_archiver_store/nullifier_store.js +0 -73
  142. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts +0 -23
  143. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts.map +0 -1
  144. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.js +0 -49
  145. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +0 -175
  146. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +0 -1
  147. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +0 -636
  148. package/src/archiver/data_retrieval.ts +0 -422
  149. package/src/archiver/kv_archiver_store/nullifier_store.ts +0 -97
  150. package/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts +0 -61
  151. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +0 -801
@@ -1,9 +1,11 @@
1
- import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, PUBLIC_LOG_DATA_SIZE_IN_FIELDS } from '@aztec/constants';
2
- import type { Fr } from '@aztec/foundation/fields';
1
+ import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX } from '@aztec/constants';
2
+ import { BlockNumber } from '@aztec/foundation/branded-types';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
4
  import { createLogger } from '@aztec/foundation/log';
4
5
  import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
5
6
  import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
6
- import type { L2Block } from '@aztec/stdlib/block';
7
+ import type { AztecAddress } from '@aztec/stdlib/aztec-address';
8
+ import { L2BlockHash, L2BlockNew } from '@aztec/stdlib/block';
7
9
  import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
8
10
  import {
9
11
  ContractClassLog,
@@ -11,8 +13,9 @@ import {
11
13
  ExtendedPublicLog,
12
14
  type LogFilter,
13
15
  LogId,
14
- PrivateLog,
15
16
  PublicLog,
17
+ type SiloedTag,
18
+ Tag,
16
19
  TxScopedL2Log,
17
20
  } from '@aztec/stdlib/logs';
18
21
 
@@ -22,98 +25,129 @@ import type { BlockStore } from './block_store.js';
22
25
  * A store for logs
23
26
  */
24
27
  export class LogStore {
25
- #logsByTag: AztecAsyncMap<string, Buffer[]>;
26
- #logTagsByBlock: AztecAsyncMap<number, string[]>;
27
- #privateLogsByBlock: AztecAsyncMap<number, Buffer>;
28
+ // `tag` --> private logs
29
+ #privateLogsByTag: AztecAsyncMap<string, Buffer[]>;
30
+ // `{contractAddress}_${tag}` --> public logs
31
+ #publicLogsByContractAndTag: AztecAsyncMap<string, Buffer[]>;
32
+ #privateLogKeysByBlock: AztecAsyncMap<number, string[]>;
33
+ #publicLogKeysByBlock: AztecAsyncMap<number, string[]>;
28
34
  #publicLogsByBlock: AztecAsyncMap<number, Buffer>;
29
35
  #contractClassLogsByBlock: AztecAsyncMap<number, Buffer>;
30
36
  #logsMaxPageSize: number;
31
37
  #log = createLogger('archiver:log_store');
32
38
 
33
- constructor(private db: AztecAsyncKVStore, private blockStore: BlockStore, logsMaxPageSize: number = 1000) {
34
- this.#logsByTag = db.openMap('archiver_tagged_logs_by_tag');
35
- this.#logTagsByBlock = db.openMap('archiver_log_tags_by_block');
36
- this.#privateLogsByBlock = db.openMap('archiver_private_logs_by_block');
39
+ constructor(
40
+ private db: AztecAsyncKVStore,
41
+ private blockStore: BlockStore,
42
+ logsMaxPageSize: number = 1000,
43
+ ) {
44
+ this.#privateLogsByTag = db.openMap('archiver_private_tagged_logs_by_tag');
45
+ this.#publicLogsByContractAndTag = db.openMap('archiver_public_tagged_logs_by_tag');
46
+ this.#privateLogKeysByBlock = db.openMap('archiver_private_log_keys_by_block');
47
+ this.#publicLogKeysByBlock = db.openMap('archiver_public_log_keys_by_block');
37
48
  this.#publicLogsByBlock = db.openMap('archiver_public_logs_by_block');
38
49
  this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
39
50
 
40
51
  this.#logsMaxPageSize = logsMaxPageSize;
41
52
  }
42
53
 
43
- #extractTaggedLogsFromPrivate(block: L2Block) {
44
- const taggedLogs = new Map<string, Buffer[]>();
54
+ /**
55
+ * Extracts tagged logs from a single block, grouping them into private and public maps.
56
+ *
57
+ * @param block - The L2 block to extract logs from.
58
+ * @returns An object containing the private and public tagged logs for the block.
59
+ */
60
+ async #extractTaggedLogsFromBlock(block: L2BlockNew) {
61
+ const blockHash = L2BlockHash.fromField(await block.hash());
62
+ // SiloedTag (as string) -> array of log buffers.
63
+ const privateTaggedLogs = new Map<string, Buffer[]>();
64
+ // "{contractAddress}_{tag}" (as string) -> array of log buffers.
65
+ const publicTaggedLogs = new Map<string, Buffer[]>();
45
66
  const dataStartIndexForBlock =
46
67
  block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
47
68
  block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX;
69
+
48
70
  block.body.txEffects.forEach((txEffect, txIndex) => {
49
71
  const txHash = txEffect.txHash;
50
72
  const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
51
- txEffect.privateLogs.forEach(log => {
73
+
74
+ txEffect.privateLogs.forEach((log, logIndex) => {
75
+ // Private logs use SiloedTag (already siloed by kernel)
52
76
  const tag = log.fields[0];
53
- const currentLogs = taggedLogs.get(tag.toString()) ?? [];
77
+ this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
78
+
79
+ const currentLogs = privateTaggedLogs.get(tag.toString()) ?? [];
54
80
  currentLogs.push(
55
81
  new TxScopedL2Log(
56
82
  txHash,
57
83
  dataStartIndexForTx,
84
+ logIndex,
58
85
  block.number,
59
- /* isFromPublic */ false,
60
- log.toBuffer(),
86
+ blockHash,
87
+ block.timestamp,
88
+ log,
61
89
  ).toBuffer(),
62
90
  );
63
- taggedLogs.set(tag.toString(), currentLogs);
91
+ privateTaggedLogs.set(tag.toString(), currentLogs);
64
92
  });
65
- });
66
- return taggedLogs;
67
- }
68
-
69
- #extractTaggedLogsFromPublic(block: L2Block) {
70
- const taggedLogs = new Map<string, Buffer[]>();
71
- const dataStartIndexForBlock =
72
- block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
73
- block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX;
74
- block.body.txEffects.forEach((txEffect, txIndex) => {
75
- const txHash = txEffect.txHash;
76
- const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
77
- txEffect.publicLogs.forEach(log => {
78
- // Check that each log stores 2 lengths in its first field. If not, it's not a tagged log:
79
- const firstFieldBuf = log.log[0].toBuffer();
80
- // See macros/note/mod/ and see how finalization_log[0] is constructed, to understand this monstrosity. (It wasn't me).
81
- // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here.
82
- if (!firstFieldBuf.subarray(0, 27).equals(Buffer.alloc(27)) || firstFieldBuf[29] !== 0) {
83
- // See parseLogFromPublic - the first field of a tagged log is 5 bytes structured:
84
- // [ publicLen[0], publicLen[1], 0, privateLen[0], privateLen[1]]
85
- this.#log.warn(`Skipping public log with invalid first field: ${log.log[0]}`);
86
- return;
87
- }
88
- // Check that the length values line up with the log contents
89
- const publicValuesLength = firstFieldBuf.subarray(-5).readUint16BE();
90
- const privateValuesLength = firstFieldBuf.subarray(-5).readUint16BE(3);
91
- // Add 1 for the first field holding lengths
92
- const totalLogLength = 1 + publicValuesLength + privateValuesLength;
93
- // Note that zeroes can be valid log values, so we can only assert that we do not go over the given length
94
- if (totalLogLength > PUBLIC_LOG_DATA_SIZE_IN_FIELDS || log.log.slice(totalLogLength).find(f => !f.isZero())) {
95
- this.#log.warn(`Skipping invalid tagged public log with first field: ${log.log[0]}`);
96
- return;
97
- }
98
93
 
99
- // The first elt stores lengths as above => tag is in fields[1]
100
- const tag = log.log[1];
94
+ txEffect.publicLogs.forEach((log, logIndex) => {
95
+ // Public logs use Tag directly (not siloed) and are stored with contract address
96
+ const tag = log.fields[0];
97
+ const contractAddress = log.contractAddress;
98
+ const key = `${contractAddress.toString()}_${tag.toString()}`;
99
+ this.#log.debug(
100
+ `Found public log with tag ${tag.toString()} from contract ${contractAddress.toString()} in block ${block.number}`,
101
+ );
101
102
 
102
- this.#log.debug(`Found tagged public log with tag ${tag.toString()} in block ${block.number}`);
103
- const currentLogs = taggedLogs.get(tag.toString()) ?? [];
103
+ const currentLogs = publicTaggedLogs.get(key) ?? [];
104
104
  currentLogs.push(
105
105
  new TxScopedL2Log(
106
106
  txHash,
107
107
  dataStartIndexForTx,
108
+ logIndex,
108
109
  block.number,
109
- /* isFromPublic */ true,
110
- log.toBuffer(),
110
+ blockHash,
111
+ block.timestamp,
112
+ log,
111
113
  ).toBuffer(),
112
114
  );
113
- taggedLogs.set(tag.toString(), currentLogs);
115
+ publicTaggedLogs.set(key, currentLogs);
114
116
  });
115
117
  });
116
- return taggedLogs;
118
+
119
+ return { privateTaggedLogs, publicTaggedLogs };
120
+ }
121
+
122
+ /**
123
+ * Extracts and aggregates tagged logs from a list of blocks.
124
+ * @param blocks - The blocks to extract logs from.
125
+ * @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
126
+ * "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
127
+ */
128
+ async #extractTaggedLogs(
129
+ blocks: L2BlockNew[],
130
+ ): Promise<{ privateTaggedLogs: Map<string, Buffer[]>; publicTaggedLogs: Map<string, Buffer[]> }> {
131
+ const taggedLogsInBlocks = await Promise.all(blocks.map(block => this.#extractTaggedLogsFromBlock(block)));
132
+
133
+ // Now we merge the maps from each block into a single map.
134
+ const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs }) => {
135
+ for (const [tag, logs] of privateTaggedLogs.entries()) {
136
+ const currentLogs = acc.get(tag) ?? [];
137
+ acc.set(tag, currentLogs.concat(logs));
138
+ }
139
+ return acc;
140
+ }, new Map<string, Buffer[]>());
141
+
142
+ const publicTaggedLogs = taggedLogsInBlocks.reduce((acc, { publicTaggedLogs }) => {
143
+ for (const [key, logs] of publicTaggedLogs.entries()) {
144
+ const currentLogs = acc.get(key) ?? [];
145
+ acc.set(key, currentLogs.concat(logs));
146
+ }
147
+ return acc;
148
+ }, new Map<string, Buffer[]>());
149
+
150
+ return { privateTaggedLogs, publicTaggedLogs };
117
151
  }
118
152
 
119
153
  /**
@@ -121,43 +155,59 @@ export class LogStore {
121
155
  * @param blocks - The blocks for which to add the logs.
122
156
  * @returns True if the operation is successful.
123
157
  */
124
- addLogs(blocks: L2Block[]): Promise<boolean> {
125
- const taggedLogsToAdd = blocks
126
- .flatMap(block => [this.#extractTaggedLogsFromPrivate(block), this.#extractTaggedLogsFromPublic(block)])
127
- .reduce((acc, val) => {
128
- for (const [tag, logs] of val.entries()) {
129
- const currentLogs = acc.get(tag) ?? [];
130
- acc.set(tag, currentLogs.concat(logs));
131
- }
132
- return acc;
133
- });
134
- const tagsToUpdate = Array.from(taggedLogsToAdd.keys());
158
+ async addLogs(blocks: L2BlockNew[]): Promise<boolean> {
159
+ const { privateTaggedLogs, publicTaggedLogs } = await this.#extractTaggedLogs(blocks);
160
+
161
+ const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
162
+ const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
135
163
 
136
164
  return this.db.transactionAsync(async () => {
137
- const currentTaggedLogs = await Promise.all(
138
- tagsToUpdate.map(async tag => ({ tag, logBuffers: await this.#logsByTag.getAsync(tag) })),
165
+ const currentPrivateTaggedLogs = await Promise.all(
166
+ keysOfPrivateLogsToUpdate.map(async key => ({
167
+ tag: key,
168
+ logBuffers: await this.#privateLogsByTag.getAsync(key),
169
+ })),
139
170
  );
140
- currentTaggedLogs.forEach(taggedLogBuffer => {
171
+ currentPrivateTaggedLogs.forEach(taggedLogBuffer => {
141
172
  if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
142
- taggedLogsToAdd.set(
173
+ privateTaggedLogs.set(
143
174
  taggedLogBuffer.tag,
144
- taggedLogBuffer.logBuffers!.concat(taggedLogsToAdd.get(taggedLogBuffer.tag)!),
175
+ taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
145
176
  );
146
177
  }
147
178
  });
179
+
180
+ const currentPublicTaggedLogs = await Promise.all(
181
+ keysOfPublicLogsToUpdate.map(async key => ({
182
+ key,
183
+ logBuffers: await this.#publicLogsByContractAndTag.getAsync(key),
184
+ })),
185
+ );
186
+ currentPublicTaggedLogs.forEach(taggedLogBuffer => {
187
+ if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
188
+ publicTaggedLogs.set(
189
+ taggedLogBuffer.key,
190
+ taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.key)!),
191
+ );
192
+ }
193
+ });
194
+
148
195
  for (const block of blocks) {
149
- const tagsInBlock = [];
150
- for (const [tag, logs] of taggedLogsToAdd.entries()) {
151
- await this.#logsByTag.set(tag, logs);
152
- tagsInBlock.push(tag);
196
+ const blockHash = await block.hash();
197
+
198
+ const privateTagsInBlock: string[] = [];
199
+ for (const [tag, logs] of privateTaggedLogs.entries()) {
200
+ await this.#privateLogsByTag.set(tag, logs);
201
+ privateTagsInBlock.push(tag);
153
202
  }
154
- await this.#logTagsByBlock.set(block.number, tagsInBlock);
203
+ await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
155
204
 
156
- const privateLogsInBlock = block.body.txEffects
157
- .map(txEffect => txEffect.privateLogs)
158
- .flat()
159
- .map(log => log.toBuffer());
160
- await this.#privateLogsByBlock.set(block.number, Buffer.concat(privateLogsInBlock));
205
+ const publicKeysInBlock: string[] = [];
206
+ for (const [key, logs] of publicTaggedLogs.entries()) {
207
+ await this.#publicLogsByContractAndTag.set(key, logs);
208
+ publicKeysInBlock.push(key);
209
+ }
210
+ await this.#publicLogKeysByBlock.set(block.number, publicKeysInBlock);
161
211
 
162
212
  const publicLogsInBlock = block.body.txEffects
163
213
  .map((txEffect, txIndex) =>
@@ -179,68 +229,82 @@ export class LogStore {
179
229
  )
180
230
  .flat();
181
231
 
182
- await this.#publicLogsByBlock.set(block.number, Buffer.concat(publicLogsInBlock));
183
- await this.#contractClassLogsByBlock.set(block.number, Buffer.concat(contractClassLogsInBlock));
232
+ await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
233
+ await this.#contractClassLogsByBlock.set(
234
+ block.number,
235
+ this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
236
+ );
184
237
  }
185
238
 
186
239
  return true;
187
240
  });
188
241
  }
189
242
 
190
- deleteLogs(blocks: L2Block[]): Promise<boolean> {
243
+ #packWithBlockHash(blockHash: Fr, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
244
+ return Buffer.concat([blockHash.toBuffer(), ...data]);
245
+ }
246
+
247
+ #unpackBlockHash(reader: BufferReader): L2BlockHash {
248
+ const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
249
+
250
+ if (!blockHash) {
251
+ throw new Error('Failed to read block hash from log entry buffer');
252
+ }
253
+
254
+ return L2BlockHash.fromField(blockHash);
255
+ }
256
+
257
+ deleteLogs(blocks: L2BlockNew[]): Promise<boolean> {
191
258
  return this.db.transactionAsync(async () => {
192
- const tagsToDelete = (
193
- await Promise.all(
194
- blocks.map(async block => {
195
- const tags = await this.#logTagsByBlock.getAsync(block.number);
196
- return tags ?? [];
197
- }),
198
- )
199
- ).flat();
259
+ await Promise.all(
260
+ blocks.map(async block => {
261
+ // Delete private logs
262
+ const privateKeys = (await this.#privateLogKeysByBlock.getAsync(block.number)) ?? [];
263
+ await Promise.all(privateKeys.map(tag => this.#privateLogsByTag.delete(tag)));
264
+
265
+ // Delete public logs
266
+ const publicKeys = (await this.#publicLogKeysByBlock.getAsync(block.number)) ?? [];
267
+ await Promise.all(publicKeys.map(key => this.#publicLogsByContractAndTag.delete(key)));
268
+ }),
269
+ );
200
270
 
201
271
  await Promise.all(
202
272
  blocks.map(block =>
203
273
  Promise.all([
204
- this.#privateLogsByBlock.delete(block.number),
205
274
  this.#publicLogsByBlock.delete(block.number),
206
- this.#logTagsByBlock.delete(block.number),
275
+ this.#privateLogKeysByBlock.delete(block.number),
276
+ this.#publicLogKeysByBlock.delete(block.number),
277
+ this.#contractClassLogsByBlock.delete(block.number),
207
278
  ]),
208
279
  ),
209
280
  );
210
281
 
211
- await Promise.all(tagsToDelete.map(tag => this.#logsByTag.delete(tag.toString())));
212
282
  return true;
213
283
  });
214
284
  }
215
285
 
216
286
  /**
217
- * Retrieves all private logs from up to `limit` blocks, starting from the block number `start`.
218
- * @param start - The block number from which to begin retrieving logs.
219
- * @param limit - The maximum number of blocks to retrieve logs from.
220
- * @returns An array of private logs from the specified range of blocks.
287
+ * Gets all private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
288
+ * array implies no logs match that tag.
221
289
  */
222
- async getPrivateLogs(start: number, limit: number): Promise<PrivateLog[]> {
223
- const logs = [];
224
- for await (const buffer of this.#privateLogsByBlock.valuesAsync({ start, limit })) {
225
- const reader = new BufferReader(buffer);
226
- while (reader.remainingBytes() > 0) {
227
- logs.push(reader.readObject(PrivateLog));
228
- }
229
- }
230
- return logs;
290
+ async getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
291
+ const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
292
+
293
+ return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
231
294
  }
232
295
 
233
296
  /**
234
- * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
235
- * @param tags - The tags to filter the logs by.
236
- * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
237
- * that tag.
297
+ * Gets all public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
298
+ * logs is returned. An empty array implies no logs match that tag.
238
299
  */
239
- async getLogsByTags(tags: Fr[]): Promise<TxScopedL2Log[][]> {
240
- const logs = await Promise.all(tags.map(tag => this.#logsByTag.getAsync(tag.toString())));
241
- return logs.map(
242
- noteLogBuffers => noteLogBuffers?.map(noteLogBuffer => TxScopedL2Log.fromBuffer(noteLogBuffer)) ?? [],
300
+ async getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
301
+ const logs = await Promise.all(
302
+ tags.map(tag => {
303
+ const key = `${contractAddress.toString()}_${tag.value.toString()}`;
304
+ return this.#publicLogsByContractAndTag.getAsync(key);
305
+ }),
243
306
  );
307
+ return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
244
308
  }
245
309
 
246
310
  /**
@@ -271,6 +335,9 @@ export class LogStore {
271
335
  const buffer = (await this.#publicLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
272
336
  const publicLogsInBlock: [PublicLog[]] = [[]];
273
337
  const reader = new BufferReader(buffer);
338
+
339
+ const blockHash = this.#unpackBlockHash(reader);
340
+
274
341
  while (reader.remainingBytes() > 0) {
275
342
  const indexOfTx = reader.readNumber();
276
343
  const numLogsInTx = reader.readNumber();
@@ -283,7 +350,7 @@ export class LogStore {
283
350
  const txLogs = publicLogsInBlock[txIndex];
284
351
 
285
352
  const logs: ExtendedPublicLog[] = [];
286
- const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
353
+ const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
287
354
 
288
355
  return { logs, maxLogsHit };
289
356
  }
@@ -306,6 +373,9 @@ export class LogStore {
306
373
  loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({ start, end })) {
307
374
  const publicLogsInBlock: [PublicLog[]] = [[]];
308
375
  const reader = new BufferReader(logBuffer);
376
+
377
+ const blockHash = this.#unpackBlockHash(reader);
378
+
309
379
  while (reader.remainingBytes() > 0) {
310
380
  const indexOfTx = reader.readNumber();
311
381
  const numLogsInTx = reader.readNumber();
@@ -316,7 +386,7 @@ export class LogStore {
316
386
  }
317
387
  for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++) {
318
388
  const txLogs = publicLogsInBlock[txIndex];
319
- maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
389
+ maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
320
390
  if (maxLogsHit) {
321
391
  this.#log.debug(`Max logs hit at block ${blockNumber}`);
322
392
  break loopOverBlocks;
@@ -355,6 +425,8 @@ export class LogStore {
355
425
  const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
356
426
 
357
427
  const reader = new BufferReader(contractClassLogsBuffer);
428
+ const blockHash = this.#unpackBlockHash(reader);
429
+
358
430
  while (reader.remainingBytes() > 0) {
359
431
  const indexOfTx = reader.readNumber();
360
432
  const numLogsInTx = reader.readNumber();
@@ -367,7 +439,7 @@ export class LogStore {
367
439
  const txLogs = contractClassLogsInBlock[txIndex];
368
440
 
369
441
  const logs: ExtendedContractClassLog[] = [];
370
- const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
442
+ const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
371
443
 
372
444
  return { logs, maxLogsHit };
373
445
  }
@@ -393,6 +465,7 @@ export class LogStore {
393
465
  })) {
394
466
  const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
395
467
  const reader = new BufferReader(logBuffer);
468
+ const blockHash = this.#unpackBlockHash(reader);
396
469
  while (reader.remainingBytes() > 0) {
397
470
  const indexOfTx = reader.readNumber();
398
471
  const numLogsInTx = reader.readNumber();
@@ -403,7 +476,7 @@ export class LogStore {
403
476
  }
404
477
  for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++) {
405
478
  const txLogs = contractClassLogsInBlock[txIndex];
406
- maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
479
+ maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
407
480
  if (maxLogsHit) {
408
481
  this.#log.debug(`Max logs hit at block ${blockNumber}`);
409
482
  break loopOverBlocks;
@@ -417,9 +490,10 @@ export class LogStore {
417
490
  #accumulateLogs(
418
491
  results: (ExtendedContractClassLog | ExtendedPublicLog)[],
419
492
  blockNumber: number,
493
+ blockHash: L2BlockHash,
420
494
  txIndex: number,
421
495
  txLogs: (ContractClassLog | PublicLog)[],
422
- filter: LogFilter,
496
+ filter: LogFilter = {},
423
497
  ): boolean {
424
498
  let maxLogsHit = false;
425
499
  let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
@@ -427,9 +501,13 @@ export class LogStore {
427
501
  const log = txLogs[logIndex];
428
502
  if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
429
503
  if (log instanceof ContractClassLog) {
430
- results.push(new ExtendedContractClassLog(new LogId(blockNumber, txIndex, logIndex), log));
504
+ results.push(
505
+ new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log),
506
+ );
507
+ } else if (log instanceof PublicLog) {
508
+ results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
431
509
  } else {
432
- results.push(new ExtendedPublicLog(new LogId(blockNumber, txIndex, logIndex), log));
510
+ throw new Error('Unknown log type');
433
511
  }
434
512
 
435
513
  if (results.length >= this.#logsMaxPageSize) {