@aztec/archiver 0.0.1-commit.03f7ef2 → 0.0.1-commit.04852196a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (204) hide show
  1. package/README.md +156 -22
  2. package/dest/archiver.d.ts +141 -0
  3. package/dest/archiver.d.ts.map +1 -0
  4. package/dest/archiver.js +710 -0
  5. package/dest/{archiver/config.d.ts → config.d.ts} +9 -1
  6. package/dest/config.d.ts.map +1 -0
  7. package/dest/{archiver/config.js → config.js} +11 -2
  8. package/dest/errors.d.ts +41 -0
  9. package/dest/errors.d.ts.map +1 -0
  10. package/dest/{archiver/errors.js → errors.js} +8 -0
  11. package/dest/factory.d.ts +10 -8
  12. package/dest/factory.d.ts.map +1 -1
  13. package/dest/factory.js +93 -11
  14. package/dest/index.d.ts +11 -4
  15. package/dest/index.d.ts.map +1 -1
  16. package/dest/index.js +9 -3
  17. package/dest/interfaces.d.ts +9 -0
  18. package/dest/interfaces.d.ts.map +1 -0
  19. package/dest/interfaces.js +3 -0
  20. package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.d.ts +1 -1
  21. package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
  22. package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.js +35 -32
  23. package/dest/l1/calldata_retriever.d.ts +135 -0
  24. package/dest/l1/calldata_retriever.d.ts.map +1 -0
  25. package/dest/l1/calldata_retriever.js +402 -0
  26. package/dest/l1/data_retrieval.d.ts +88 -0
  27. package/dest/l1/data_retrieval.d.ts.map +1 -0
  28. package/dest/{archiver/l1 → l1}/data_retrieval.js +54 -71
  29. package/dest/{archiver/l1 → l1}/debug_tx.d.ts +1 -1
  30. package/dest/l1/debug_tx.d.ts.map +1 -0
  31. package/dest/{archiver/l1 → l1}/spire_proposer.d.ts +5 -5
  32. package/dest/l1/spire_proposer.d.ts.map +1 -0
  33. package/dest/{archiver/l1 → l1}/spire_proposer.js +9 -17
  34. package/dest/{archiver/l1 → l1}/trace_tx.d.ts +1 -1
  35. package/dest/l1/trace_tx.d.ts.map +1 -0
  36. package/dest/l1/types.d.ts +12 -0
  37. package/dest/l1/types.d.ts.map +1 -0
  38. package/dest/{archiver/l1 → l1}/validate_trace.d.ts +6 -3
  39. package/dest/l1/validate_trace.d.ts.map +1 -0
  40. package/dest/{archiver/l1 → l1}/validate_trace.js +14 -10
  41. package/dest/modules/data_source_base.d.ts +89 -0
  42. package/dest/modules/data_source_base.d.ts.map +1 -0
  43. package/dest/modules/data_source_base.js +216 -0
  44. package/dest/modules/data_store_updater.d.ts +83 -0
  45. package/dest/modules/data_store_updater.d.ts.map +1 -0
  46. package/dest/modules/data_store_updater.js +331 -0
  47. package/dest/modules/instrumentation.d.ts +50 -0
  48. package/dest/modules/instrumentation.d.ts.map +1 -0
  49. package/dest/{archiver → modules}/instrumentation.js +44 -68
  50. package/dest/modules/l1_synchronizer.d.ts +72 -0
  51. package/dest/modules/l1_synchronizer.d.ts.map +1 -0
  52. package/dest/modules/l1_synchronizer.js +1119 -0
  53. package/dest/modules/validation.d.ts +17 -0
  54. package/dest/modules/validation.d.ts.map +1 -0
  55. package/dest/{archiver → modules}/validation.js +7 -1
  56. package/dest/store/block_store.d.ts +196 -0
  57. package/dest/store/block_store.d.ts.map +1 -0
  58. package/dest/{archiver/kv_archiver_store → store}/block_store.js +228 -62
  59. package/dest/store/contract_class_store.d.ts +18 -0
  60. package/dest/store/contract_class_store.d.ts.map +1 -0
  61. package/dest/{archiver/kv_archiver_store → store}/contract_class_store.js +12 -8
  62. package/dest/store/contract_instance_store.d.ts +24 -0
  63. package/dest/store/contract_instance_store.d.ts.map +1 -0
  64. package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +1 -1
  65. package/dest/store/kv_archiver_store.d.ts +354 -0
  66. package/dest/store/kv_archiver_store.d.ts.map +1 -0
  67. package/dest/store/kv_archiver_store.js +464 -0
  68. package/dest/store/l2_tips_cache.d.ts +19 -0
  69. package/dest/store/l2_tips_cache.d.ts.map +1 -0
  70. package/dest/store/l2_tips_cache.js +89 -0
  71. package/dest/store/log_store.d.ts +54 -0
  72. package/dest/store/log_store.d.ts.map +1 -0
  73. package/dest/{archiver/kv_archiver_store → store}/log_store.js +156 -104
  74. package/dest/{archiver/kv_archiver_store → store}/message_store.d.ts +1 -1
  75. package/dest/store/message_store.d.ts.map +1 -0
  76. package/dest/{archiver/kv_archiver_store → store}/message_store.js +1 -1
  77. package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
  78. package/dest/structs/data_retrieval.d.ts.map +1 -0
  79. package/dest/structs/inbox_message.d.ts +15 -0
  80. package/dest/structs/inbox_message.d.ts.map +1 -0
  81. package/dest/{archiver/structs → structs}/published.d.ts +1 -1
  82. package/dest/structs/published.d.ts.map +1 -0
  83. package/dest/test/fake_l1_state.d.ts +195 -0
  84. package/dest/test/fake_l1_state.d.ts.map +1 -0
  85. package/dest/test/fake_l1_state.js +421 -0
  86. package/dest/test/index.d.ts +2 -1
  87. package/dest/test/index.d.ts.map +1 -1
  88. package/dest/test/index.js +4 -1
  89. package/dest/test/mock_archiver.d.ts +2 -2
  90. package/dest/test/mock_archiver.d.ts.map +1 -1
  91. package/dest/test/mock_archiver.js +3 -3
  92. package/dest/test/mock_l1_to_l2_message_source.d.ts +2 -2
  93. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  94. package/dest/test/mock_l1_to_l2_message_source.js +12 -3
  95. package/dest/test/mock_l2_block_source.d.ts +40 -17
  96. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  97. package/dest/test/mock_l2_block_source.js +235 -83
  98. package/dest/test/mock_structs.d.ts +81 -3
  99. package/dest/test/mock_structs.d.ts.map +1 -1
  100. package/dest/test/mock_structs.js +152 -7
  101. package/dest/test/noop_l1_archiver.d.ts +23 -0
  102. package/dest/test/noop_l1_archiver.d.ts.map +1 -0
  103. package/dest/test/noop_l1_archiver.js +68 -0
  104. package/package.json +16 -17
  105. package/src/archiver.ts +466 -0
  106. package/src/{archiver/config.ts → config.ts} +13 -2
  107. package/src/{archiver/errors.ts → errors.ts} +12 -0
  108. package/src/factory.ts +143 -12
  109. package/src/index.ts +11 -3
  110. package/src/interfaces.ts +9 -0
  111. package/src/l1/README.md +55 -0
  112. package/src/{archiver/l1 → l1}/bin/retrieve-calldata.ts +45 -33
  113. package/src/l1/calldata_retriever.ts +511 -0
  114. package/src/{archiver/l1 → l1}/data_retrieval.ts +75 -94
  115. package/src/{archiver/l1 → l1}/spire_proposer.ts +7 -15
  116. package/src/{archiver/l1 → l1}/validate_trace.ts +25 -7
  117. package/src/modules/data_source_base.ts +328 -0
  118. package/src/modules/data_store_updater.ts +453 -0
  119. package/src/{archiver → modules}/instrumentation.ts +53 -70
  120. package/src/modules/l1_synchronizer.ts +939 -0
  121. package/src/{archiver → modules}/validation.ts +11 -6
  122. package/src/{archiver/kv_archiver_store → store}/block_store.ts +293 -100
  123. package/src/{archiver/kv_archiver_store → store}/contract_class_store.ts +12 -8
  124. package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +1 -1
  125. package/src/{archiver/kv_archiver_store → store}/kv_archiver_store.ts +273 -40
  126. package/src/store/l2_tips_cache.ts +89 -0
  127. package/src/{archiver/kv_archiver_store → store}/log_store.ts +256 -141
  128. package/src/{archiver/kv_archiver_store → store}/message_store.ts +1 -1
  129. package/src/test/fake_l1_state.ts +657 -0
  130. package/src/test/index.ts +4 -0
  131. package/src/test/mock_archiver.ts +4 -3
  132. package/src/test/mock_l1_to_l2_message_source.ts +10 -4
  133. package/src/test/mock_l2_block_source.ts +282 -90
  134. package/src/test/mock_structs.ts +283 -8
  135. package/src/test/noop_l1_archiver.ts +109 -0
  136. package/dest/archiver/archiver.d.ts +0 -304
  137. package/dest/archiver/archiver.d.ts.map +0 -1
  138. package/dest/archiver/archiver.js +0 -1645
  139. package/dest/archiver/archiver_store.d.ts +0 -308
  140. package/dest/archiver/archiver_store.d.ts.map +0 -1
  141. package/dest/archiver/archiver_store.js +0 -4
  142. package/dest/archiver/archiver_store_test_suite.d.ts +0 -8
  143. package/dest/archiver/archiver_store_test_suite.d.ts.map +0 -1
  144. package/dest/archiver/archiver_store_test_suite.js +0 -2790
  145. package/dest/archiver/config.d.ts.map +0 -1
  146. package/dest/archiver/errors.d.ts +0 -36
  147. package/dest/archiver/errors.d.ts.map +0 -1
  148. package/dest/archiver/index.d.ts +0 -7
  149. package/dest/archiver/index.d.ts.map +0 -1
  150. package/dest/archiver/index.js +0 -4
  151. package/dest/archiver/instrumentation.d.ts +0 -37
  152. package/dest/archiver/instrumentation.d.ts.map +0 -1
  153. package/dest/archiver/kv_archiver_store/block_store.d.ts +0 -157
  154. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
  155. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
  156. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
  157. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
  158. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
  159. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +0 -158
  160. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
  161. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +0 -313
  162. package/dest/archiver/kv_archiver_store/log_store.d.ts +0 -45
  163. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
  164. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
  165. package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +0 -1
  166. package/dest/archiver/l1/calldata_retriever.d.ts +0 -112
  167. package/dest/archiver/l1/calldata_retriever.d.ts.map +0 -1
  168. package/dest/archiver/l1/calldata_retriever.js +0 -471
  169. package/dest/archiver/l1/data_retrieval.d.ts +0 -90
  170. package/dest/archiver/l1/data_retrieval.d.ts.map +0 -1
  171. package/dest/archiver/l1/debug_tx.d.ts.map +0 -1
  172. package/dest/archiver/l1/spire_proposer.d.ts.map +0 -1
  173. package/dest/archiver/l1/trace_tx.d.ts.map +0 -1
  174. package/dest/archiver/l1/types.d.ts +0 -12
  175. package/dest/archiver/l1/types.d.ts.map +0 -1
  176. package/dest/archiver/l1/validate_trace.d.ts.map +0 -1
  177. package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
  178. package/dest/archiver/structs/inbox_message.d.ts +0 -15
  179. package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
  180. package/dest/archiver/structs/published.d.ts.map +0 -1
  181. package/dest/archiver/validation.d.ts +0 -17
  182. package/dest/archiver/validation.d.ts.map +0 -1
  183. package/dest/rpc/index.d.ts +0 -9
  184. package/dest/rpc/index.d.ts.map +0 -1
  185. package/dest/rpc/index.js +0 -15
  186. package/src/archiver/archiver.ts +0 -2157
  187. package/src/archiver/archiver_store.ts +0 -372
  188. package/src/archiver/archiver_store_test_suite.ts +0 -2863
  189. package/src/archiver/index.ts +0 -6
  190. package/src/archiver/l1/README.md +0 -98
  191. package/src/archiver/l1/calldata_retriever.ts +0 -641
  192. package/src/rpc/index.ts +0 -16
  193. /package/dest/{archiver/l1 → l1}/debug_tx.js +0 -0
  194. /package/dest/{archiver/l1 → l1}/trace_tx.js +0 -0
  195. /package/dest/{archiver/l1 → l1}/types.js +0 -0
  196. /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
  197. /package/dest/{archiver/structs → structs}/inbox_message.js +0 -0
  198. /package/dest/{archiver/structs → structs}/published.js +0 -0
  199. /package/src/{archiver/l1 → l1}/debug_tx.ts +0 -0
  200. /package/src/{archiver/l1 → l1}/trace_tx.ts +0 -0
  201. /package/src/{archiver/l1 → l1}/types.ts +0 -0
  202. /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
  203. /package/src/{archiver/structs → structs}/inbox_message.ts +0 -0
  204. /package/src/{archiver/structs → structs}/published.ts +0 -0
@@ -1,11 +1,13 @@
1
- import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX } from '@aztec/constants';
1
+ import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
2
2
  import { BlockNumber } from '@aztec/foundation/branded-types';
3
+ import { filterAsync } from '@aztec/foundation/collection';
3
4
  import { Fr } from '@aztec/foundation/curves/bn254';
4
5
  import { createLogger } from '@aztec/foundation/log';
5
6
  import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
6
7
  import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
7
8
  import type { AztecAddress } from '@aztec/stdlib/aztec-address';
8
- import { L2BlockHash, L2BlockNew } from '@aztec/stdlib/block';
9
+ import { BlockHash, L2Block } from '@aztec/stdlib/block';
10
+ import { MAX_LOGS_PER_TAG } from '@aztec/stdlib/interfaces/api-limit';
9
11
  import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
10
12
  import {
11
13
  ContractClassLog,
@@ -18,6 +20,7 @@ import {
18
20
  Tag,
19
21
  TxScopedL2Log,
20
22
  } from '@aztec/stdlib/logs';
23
+ import { TxHash } from '@aztec/stdlib/tx';
21
24
 
22
25
  import type { BlockStore } from './block_store.js';
23
26
 
@@ -57,21 +60,16 @@ export class LogStore {
57
60
  * @param block - The L2 block to extract logs from.
58
61
  * @returns An object containing the private and public tagged logs for the block.
59
62
  */
60
- async #extractTaggedLogsFromBlock(block: L2BlockNew) {
61
- const blockHash = L2BlockHash.fromField(await block.hash());
63
+ #extractTaggedLogsFromBlock(block: L2Block) {
62
64
  // SiloedTag (as string) -> array of log buffers.
63
65
  const privateTaggedLogs = new Map<string, Buffer[]>();
64
66
  // "{contractAddress}_{tag}" (as string) -> array of log buffers.
65
67
  const publicTaggedLogs = new Map<string, Buffer[]>();
66
- const dataStartIndexForBlock =
67
- block.header.state.partial.noteHashTree.nextAvailableLeafIndex -
68
- block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX;
69
68
 
70
- block.body.txEffects.forEach((txEffect, txIndex) => {
69
+ block.body.txEffects.forEach(txEffect => {
71
70
  const txHash = txEffect.txHash;
72
- const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
73
71
 
74
- txEffect.privateLogs.forEach((log, logIndex) => {
72
+ txEffect.privateLogs.forEach(log => {
75
73
  // Private logs use SiloedTag (already siloed by kernel)
76
74
  const tag = log.fields[0];
77
75
  this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`);
@@ -80,18 +78,17 @@ export class LogStore {
80
78
  currentLogs.push(
81
79
  new TxScopedL2Log(
82
80
  txHash,
83
- dataStartIndexForTx,
84
- logIndex,
85
81
  block.number,
86
- blockHash,
87
82
  block.timestamp,
88
- log,
83
+ log.getEmittedFields(),
84
+ txEffect.noteHashes,
85
+ txEffect.nullifiers[0],
89
86
  ).toBuffer(),
90
87
  );
91
88
  privateTaggedLogs.set(tag.toString(), currentLogs);
92
89
  });
93
90
 
94
- txEffect.publicLogs.forEach((log, logIndex) => {
91
+ txEffect.publicLogs.forEach(log => {
95
92
  // Public logs use Tag directly (not siloed) and are stored with contract address
96
93
  const tag = log.fields[0];
97
94
  const contractAddress = log.contractAddress;
@@ -104,12 +101,11 @@ export class LogStore {
104
101
  currentLogs.push(
105
102
  new TxScopedL2Log(
106
103
  txHash,
107
- dataStartIndexForTx,
108
- logIndex,
109
104
  block.number,
110
- blockHash,
111
105
  block.timestamp,
112
- log,
106
+ log.getEmittedFields(),
107
+ txEffect.noteHashes,
108
+ txEffect.nullifiers[0],
113
109
  ).toBuffer(),
114
110
  );
115
111
  publicTaggedLogs.set(key, currentLogs);
@@ -125,10 +121,11 @@ export class LogStore {
125
121
  * @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from
126
122
  * "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key.
127
123
  */
128
- async #extractTaggedLogs(
129
- blocks: L2BlockNew[],
130
- ): Promise<{ privateTaggedLogs: Map<string, Buffer[]>; publicTaggedLogs: Map<string, Buffer[]> }> {
131
- const taggedLogsInBlocks = await Promise.all(blocks.map(block => this.#extractTaggedLogsFromBlock(block)));
124
+ #extractTaggedLogs(blocks: L2Block[]): {
125
+ privateTaggedLogs: Map<string, Buffer[]>;
126
+ publicTaggedLogs: Map<string, Buffer[]>;
127
+ } {
128
+ const taggedLogsInBlocks = blocks.map(block => this.#extractTaggedLogsFromBlock(block));
132
129
 
133
130
  // Now we merge the maps from each block into a single map.
134
131
  const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs }) => {
@@ -150,111 +147,148 @@ export class LogStore {
150
147
  return { privateTaggedLogs, publicTaggedLogs };
151
148
  }
152
149
 
153
- /**
154
- * Append new logs to the store's list.
155
- * @param blocks - The blocks for which to add the logs.
156
- * @returns True if the operation is successful.
157
- */
158
- async addLogs(blocks: L2BlockNew[]): Promise<boolean> {
159
- const { privateTaggedLogs, publicTaggedLogs } = await this.#extractTaggedLogs(blocks);
150
+ async #addPrivateLogs(blocks: L2Block[]): Promise<void> {
151
+ const newBlocks = await filterAsync(
152
+ blocks,
153
+ async block => !(await this.#privateLogKeysByBlock.hasAsync(block.number)),
154
+ );
160
155
 
156
+ const { privateTaggedLogs } = this.#extractTaggedLogs(newBlocks);
161
157
  const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys());
162
- const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
163
158
 
164
- return this.db.transactionAsync(async () => {
165
- const currentPrivateTaggedLogs = await Promise.all(
166
- keysOfPrivateLogsToUpdate.map(async key => ({
167
- tag: key,
168
- logBuffers: await this.#privateLogsByTag.getAsync(key),
169
- })),
170
- );
171
- currentPrivateTaggedLogs.forEach(taggedLogBuffer => {
172
- if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
173
- privateTaggedLogs.set(
174
- taggedLogBuffer.tag,
175
- taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
176
- );
177
- }
178
- });
159
+ const currentPrivateTaggedLogs = await Promise.all(
160
+ keysOfPrivateLogsToUpdate.map(async key => ({
161
+ tag: key,
162
+ logBuffers: await this.#privateLogsByTag.getAsync(key),
163
+ })),
164
+ );
179
165
 
180
- const currentPublicTaggedLogs = await Promise.all(
181
- keysOfPublicLogsToUpdate.map(async key => ({
182
- key,
183
- logBuffers: await this.#publicLogsByContractAndTag.getAsync(key),
184
- })),
185
- );
186
- currentPublicTaggedLogs.forEach(taggedLogBuffer => {
187
- if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
188
- publicTaggedLogs.set(
189
- taggedLogBuffer.key,
190
- taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.key)!),
191
- );
192
- }
193
- });
166
+ for (const taggedLogBuffer of currentPrivateTaggedLogs) {
167
+ if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
168
+ privateTaggedLogs.set(
169
+ taggedLogBuffer.tag,
170
+ taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
171
+ );
172
+ }
173
+ }
194
174
 
195
- for (const block of blocks) {
196
- const blockHash = await block.hash();
175
+ for (const block of newBlocks) {
176
+ const privateTagsInBlock: string[] = [];
177
+ for (const [tag, logs] of privateTaggedLogs.entries()) {
178
+ await this.#privateLogsByTag.set(tag, logs);
179
+ privateTagsInBlock.push(tag);
180
+ }
181
+ await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
182
+ }
183
+ }
197
184
 
198
- const privateTagsInBlock: string[] = [];
199
- for (const [tag, logs] of privateTaggedLogs.entries()) {
200
- await this.#privateLogsByTag.set(tag, logs);
201
- privateTagsInBlock.push(tag);
202
- }
203
- await this.#privateLogKeysByBlock.set(block.number, privateTagsInBlock);
185
+ async #addPublicLogs(blocks: L2Block[]): Promise<void> {
186
+ const newBlocks = await filterAsync(
187
+ blocks,
188
+ async block => !(await this.#publicLogKeysByBlock.hasAsync(block.number)),
189
+ );
204
190
 
205
- const publicKeysInBlock: string[] = [];
206
- for (const [key, logs] of publicTaggedLogs.entries()) {
207
- await this.#publicLogsByContractAndTag.set(key, logs);
208
- publicKeysInBlock.push(key);
209
- }
210
- await this.#publicLogKeysByBlock.set(block.number, publicKeysInBlock);
211
-
212
- const publicLogsInBlock = block.body.txEffects
213
- .map((txEffect, txIndex) =>
214
- [
215
- numToUInt32BE(txIndex),
216
- numToUInt32BE(txEffect.publicLogs.length),
217
- txEffect.publicLogs.map(log => log.toBuffer()),
218
- ].flat(),
219
- )
220
- .flat();
221
-
222
- const contractClassLogsInBlock = block.body.txEffects
223
- .map((txEffect, txIndex) =>
224
- [
225
- numToUInt32BE(txIndex),
226
- numToUInt32BE(txEffect.contractClassLogs.length),
227
- txEffect.contractClassLogs.map(log => log.toBuffer()),
228
- ].flat(),
229
- )
230
- .flat();
231
-
232
- await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
233
- await this.#contractClassLogsByBlock.set(
234
- block.number,
235
- this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
191
+ const { publicTaggedLogs } = this.#extractTaggedLogs(newBlocks);
192
+ const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys());
193
+
194
+ const currentPublicTaggedLogs = await Promise.all(
195
+ keysOfPublicLogsToUpdate.map(async key => ({
196
+ tag: key,
197
+ logBuffers: await this.#publicLogsByContractAndTag.getAsync(key),
198
+ })),
199
+ );
200
+
201
+ for (const taggedLogBuffer of currentPublicTaggedLogs) {
202
+ if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
203
+ publicTaggedLogs.set(
204
+ taggedLogBuffer.tag,
205
+ taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.tag)!),
236
206
  );
237
207
  }
208
+ }
209
+
210
+ for (const block of newBlocks) {
211
+ const blockHash = await block.hash();
212
+ const publicTagsInBlock: string[] = [];
213
+ for (const [tag, logs] of publicTaggedLogs.entries()) {
214
+ await this.#publicLogsByContractAndTag.set(tag, logs);
215
+ publicTagsInBlock.push(tag);
216
+ }
217
+ await this.#publicLogKeysByBlock.set(block.number, publicTagsInBlock);
218
+
219
+ const publicLogsInBlock = block.body.txEffects
220
+ .map((txEffect, txIndex) =>
221
+ [
222
+ numToUInt32BE(txIndex),
223
+ txEffect.txHash.toBuffer(),
224
+ numToUInt32BE(txEffect.publicLogs.length),
225
+ txEffect.publicLogs.map(log => log.toBuffer()),
226
+ ].flat(),
227
+ )
228
+ .flat();
229
+
230
+ await this.#publicLogsByBlock.set(block.number, this.#packWithBlockHash(blockHash, publicLogsInBlock));
231
+ }
232
+ }
233
+
234
+ async #addContractClassLogs(blocks: L2Block[]): Promise<void> {
235
+ const newBlocks = await filterAsync(
236
+ blocks,
237
+ async block => !(await this.#contractClassLogsByBlock.hasAsync(block.number)),
238
+ );
239
+
240
+ for (const block of newBlocks) {
241
+ const blockHash = await block.hash();
242
+
243
+ const contractClassLogsInBlock = block.body.txEffects
244
+ .map((txEffect, txIndex) =>
245
+ [
246
+ numToUInt32BE(txIndex),
247
+ txEffect.txHash.toBuffer(),
248
+ numToUInt32BE(txEffect.contractClassLogs.length),
249
+ txEffect.contractClassLogs.map(log => log.toBuffer()),
250
+ ].flat(),
251
+ )
252
+ .flat();
253
+
254
+ await this.#contractClassLogsByBlock.set(
255
+ block.number,
256
+ this.#packWithBlockHash(blockHash, contractClassLogsInBlock),
257
+ );
258
+ }
259
+ }
238
260
 
261
+ /**
262
+ * Append new logs to the store's list.
263
+ * @param blocks - The blocks for which to add the logs.
264
+ * @returns True if the operation is successful.
265
+ */
266
+ addLogs(blocks: L2Block[]): Promise<boolean> {
267
+ return this.db.transactionAsync(async () => {
268
+ await Promise.all([
269
+ this.#addPrivateLogs(blocks),
270
+ this.#addPublicLogs(blocks),
271
+ this.#addContractClassLogs(blocks),
272
+ ]);
239
273
  return true;
240
274
  });
241
275
  }
242
276
 
243
- #packWithBlockHash(blockHash: Fr, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
277
+ #packWithBlockHash(blockHash: BlockHash, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
244
278
  return Buffer.concat([blockHash.toBuffer(), ...data]);
245
279
  }
246
280
 
247
- #unpackBlockHash(reader: BufferReader): L2BlockHash {
281
+ #unpackBlockHash(reader: BufferReader): BlockHash {
248
282
  const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
249
283
 
250
284
  if (!blockHash) {
251
285
  throw new Error('Failed to read block hash from log entry buffer');
252
286
  }
253
287
 
254
- return L2BlockHash.fromField(blockHash);
288
+ return new BlockHash(blockHash);
255
289
  }
256
290
 
257
- deleteLogs(blocks: L2BlockNew[]): Promise<boolean> {
291
+ deleteLogs(blocks: L2Block[]): Promise<boolean> {
258
292
  return this.db.transactionAsync(async () => {
259
293
  await Promise.all(
260
294
  blocks.map(async block => {
@@ -284,27 +318,49 @@ export class LogStore {
284
318
  }
285
319
 
286
320
  /**
287
- * Gets all private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
321
+ * Gets private logs that match any of the `tags`. For each tag, an array of matching logs is returned. An empty
288
322
  * array implies no logs match that tag.
323
+ * @param tags - The tags to search for.
324
+ * @param page - The page number (0-indexed) for pagination.
325
+ * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
326
+ * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
289
327
  */
290
- async getPrivateLogsByTags(tags: SiloedTag[]): Promise<TxScopedL2Log[][]> {
328
+ async getPrivateLogsByTags(tags: SiloedTag[], page: number = 0): Promise<TxScopedL2Log[][]> {
291
329
  const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
330
+ const start = page * MAX_LOGS_PER_TAG;
331
+ const end = start + MAX_LOGS_PER_TAG;
292
332
 
293
- return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
333
+ return logs.map(
334
+ logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
335
+ );
294
336
  }
295
337
 
296
338
  /**
297
- * Gets all public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
339
+ * Gets public logs that match any of the `tags` from the specified contract. For each tag, an array of matching
298
340
  * logs is returned. An empty array implies no logs match that tag.
341
+ * @param contractAddress - The contract address to search logs for.
342
+ * @param tags - The tags to search for.
343
+ * @param page - The page number (0-indexed) for pagination.
344
+ * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
345
+ * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
299
346
  */
300
- async getPublicLogsByTagsFromContract(contractAddress: AztecAddress, tags: Tag[]): Promise<TxScopedL2Log[][]> {
347
+ async getPublicLogsByTagsFromContract(
348
+ contractAddress: AztecAddress,
349
+ tags: Tag[],
350
+ page: number = 0,
351
+ ): Promise<TxScopedL2Log[][]> {
301
352
  const logs = await Promise.all(
302
353
  tags.map(tag => {
303
354
  const key = `${contractAddress.toString()}_${tag.value.toString()}`;
304
355
  return this.#publicLogsByContractAndTag.getAsync(key);
305
356
  }),
306
357
  );
307
- return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []);
358
+ const start = page * MAX_LOGS_PER_TAG;
359
+ const end = start + MAX_LOGS_PER_TAG;
360
+
361
+ return logs.map(
362
+ logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
363
+ );
308
364
  }
309
365
 
310
366
  /**
@@ -333,24 +389,33 @@ export class LogStore {
333
389
  }
334
390
 
335
391
  const buffer = (await this.#publicLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
336
- const publicLogsInBlock: [PublicLog[]] = [[]];
392
+ const publicLogsInBlock: { txHash: TxHash; logs: PublicLog[] }[] = [];
337
393
  const reader = new BufferReader(buffer);
338
394
 
339
395
  const blockHash = this.#unpackBlockHash(reader);
340
396
 
341
397
  while (reader.remainingBytes() > 0) {
342
398
  const indexOfTx = reader.readNumber();
399
+ const txHash = reader.readObject(TxHash);
343
400
  const numLogsInTx = reader.readNumber();
344
- publicLogsInBlock[indexOfTx] = [];
401
+ publicLogsInBlock[indexOfTx] = { txHash, logs: [] };
345
402
  for (let i = 0; i < numLogsInTx; i++) {
346
- publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
403
+ publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
347
404
  }
348
405
  }
349
406
 
350
- const txLogs = publicLogsInBlock[txIndex];
407
+ const txData = publicLogsInBlock[txIndex];
351
408
 
352
409
  const logs: ExtendedPublicLog[] = [];
353
- const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
410
+ const maxLogsHit = this.#accumulatePublicLogs(
411
+ logs,
412
+ blockNumber,
413
+ blockHash,
414
+ txIndex,
415
+ txData.txHash,
416
+ txData.logs,
417
+ filter,
418
+ );
354
419
 
355
420
  return { logs, maxLogsHit };
356
421
  }
@@ -371,22 +436,31 @@ export class LogStore {
371
436
 
372
437
  let maxLogsHit = false;
373
438
  loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({ start, end })) {
374
- const publicLogsInBlock: [PublicLog[]] = [[]];
439
+ const publicLogsInBlock: { txHash: TxHash; logs: PublicLog[] }[] = [];
375
440
  const reader = new BufferReader(logBuffer);
376
441
 
377
442
  const blockHash = this.#unpackBlockHash(reader);
378
443
 
379
444
  while (reader.remainingBytes() > 0) {
380
445
  const indexOfTx = reader.readNumber();
446
+ const txHash = reader.readObject(TxHash);
381
447
  const numLogsInTx = reader.readNumber();
382
- publicLogsInBlock[indexOfTx] = [];
448
+ publicLogsInBlock[indexOfTx] = { txHash, logs: [] };
383
449
  for (let i = 0; i < numLogsInTx; i++) {
384
- publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
450
+ publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
385
451
  }
386
452
  }
387
453
  for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++) {
388
- const txLogs = publicLogsInBlock[txIndex];
389
- maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
454
+ const txData = publicLogsInBlock[txIndex];
455
+ maxLogsHit = this.#accumulatePublicLogs(
456
+ logs,
457
+ blockNumber,
458
+ blockHash,
459
+ txIndex,
460
+ txData.txHash,
461
+ txData.logs,
462
+ filter,
463
+ );
390
464
  if (maxLogsHit) {
391
465
  this.#log.debug(`Max logs hit at block ${blockNumber}`);
392
466
  break loopOverBlocks;
@@ -422,24 +496,33 @@ export class LogStore {
422
496
  return { logs: [], maxLogsHit: false };
423
497
  }
424
498
  const contractClassLogsBuffer = (await this.#contractClassLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
425
- const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
499
+ const contractClassLogsInBlock: { txHash: TxHash; logs: ContractClassLog[] }[] = [];
426
500
 
427
501
  const reader = new BufferReader(contractClassLogsBuffer);
428
502
  const blockHash = this.#unpackBlockHash(reader);
429
503
 
430
504
  while (reader.remainingBytes() > 0) {
431
505
  const indexOfTx = reader.readNumber();
506
+ const txHash = reader.readObject(TxHash);
432
507
  const numLogsInTx = reader.readNumber();
433
- contractClassLogsInBlock[indexOfTx] = [];
508
+ contractClassLogsInBlock[indexOfTx] = { txHash, logs: [] };
434
509
  for (let i = 0; i < numLogsInTx; i++) {
435
- contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
510
+ contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
436
511
  }
437
512
  }
438
513
 
439
- const txLogs = contractClassLogsInBlock[txIndex];
514
+ const txData = contractClassLogsInBlock[txIndex];
440
515
 
441
516
  const logs: ExtendedContractClassLog[] = [];
442
- const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
517
+ const maxLogsHit = this.#accumulateContractClassLogs(
518
+ logs,
519
+ blockNumber,
520
+ blockHash,
521
+ txIndex,
522
+ txData.txHash,
523
+ txData.logs,
524
+ filter,
525
+ );
443
526
 
444
527
  return { logs, maxLogsHit };
445
528
  }
@@ -463,20 +546,29 @@ export class LogStore {
463
546
  start,
464
547
  end,
465
548
  })) {
466
- const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
549
+ const contractClassLogsInBlock: { txHash: TxHash; logs: ContractClassLog[] }[] = [];
467
550
  const reader = new BufferReader(logBuffer);
468
551
  const blockHash = this.#unpackBlockHash(reader);
469
552
  while (reader.remainingBytes() > 0) {
470
553
  const indexOfTx = reader.readNumber();
554
+ const txHash = reader.readObject(TxHash);
471
555
  const numLogsInTx = reader.readNumber();
472
- contractClassLogsInBlock[indexOfTx] = [];
556
+ contractClassLogsInBlock[indexOfTx] = { txHash, logs: [] };
473
557
  for (let i = 0; i < numLogsInTx; i++) {
474
- contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
558
+ contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
475
559
  }
476
560
  }
477
561
  for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++) {
478
- const txLogs = contractClassLogsInBlock[txIndex];
479
- maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
562
+ const txData = contractClassLogsInBlock[txIndex];
563
+ maxLogsHit = this.#accumulateContractClassLogs(
564
+ logs,
565
+ blockNumber,
566
+ blockHash,
567
+ txIndex,
568
+ txData.txHash,
569
+ txData.logs,
570
+ filter,
571
+ );
480
572
  if (maxLogsHit) {
481
573
  this.#log.debug(`Max logs hit at block ${blockNumber}`);
482
574
  break loopOverBlocks;
@@ -487,12 +579,13 @@ export class LogStore {
487
579
  return { logs, maxLogsHit };
488
580
  }
489
581
 
490
- #accumulateLogs(
491
- results: (ExtendedContractClassLog | ExtendedPublicLog)[],
582
+ #accumulatePublicLogs(
583
+ results: ExtendedPublicLog[],
492
584
  blockNumber: number,
493
- blockHash: L2BlockHash,
585
+ blockHash: BlockHash,
494
586
  txIndex: number,
495
- txLogs: (ContractClassLog | PublicLog)[],
587
+ txHash: TxHash,
588
+ txLogs: PublicLog[],
496
589
  filter: LogFilter = {},
497
590
  ): boolean {
498
591
  let maxLogsHit = false;
@@ -500,15 +593,37 @@ export class LogStore {
500
593
  for (; logIndex < txLogs.length; logIndex++) {
501
594
  const log = txLogs[logIndex];
502
595
  if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
503
- if (log instanceof ContractClassLog) {
504
- results.push(
505
- new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log),
506
- );
507
- } else if (log instanceof PublicLog) {
508
- results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
509
- } else {
510
- throw new Error('Unknown log type');
596
+ results.push(
597
+ new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
598
+ );
599
+
600
+ if (results.length >= this.#logsMaxPageSize) {
601
+ maxLogsHit = true;
602
+ break;
511
603
  }
604
+ }
605
+ }
606
+
607
+ return maxLogsHit;
608
+ }
609
+
610
+ #accumulateContractClassLogs(
611
+ results: ExtendedContractClassLog[],
612
+ blockNumber: number,
613
+ blockHash: BlockHash,
614
+ txIndex: number,
615
+ txHash: TxHash,
616
+ txLogs: ContractClassLog[],
617
+ filter: LogFilter = {},
618
+ ): boolean {
619
+ let maxLogsHit = false;
620
+ let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
621
+ for (; logIndex < txLogs.length; logIndex++) {
622
+ const log = txLogs[logIndex];
623
+ if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
624
+ results.push(
625
+ new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
626
+ );
512
627
 
513
628
  if (results.length >= this.#logsMaxPageSize) {
514
629
  maxLogsHit = true;
@@ -137,7 +137,7 @@ export class MessageStore {
137
137
  );
138
138
  }
139
139
 
140
- // Check the first message in a block has the correct index.
140
+ // Check the first message in a checkpoint has the correct index.
141
141
  if (
142
142
  (!lastMessage || message.checkpointNumber > lastMessage.checkpointNumber) &&
143
143
  message.index !== expectedStart