@aztec/archiver 0.0.0-test.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. package/README.md +16 -0
  2. package/dest/archiver/archiver.d.ts +197 -0
  3. package/dest/archiver/archiver.d.ts.map +1 -0
  4. package/dest/archiver/archiver.js +900 -0
  5. package/dest/archiver/archiver_store.d.ts +220 -0
  6. package/dest/archiver/archiver_store.d.ts.map +1 -0
  7. package/dest/archiver/archiver_store.js +4 -0
  8. package/dest/archiver/archiver_store_test_suite.d.ts +8 -0
  9. package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -0
  10. package/dest/archiver/archiver_store_test_suite.js +794 -0
  11. package/dest/archiver/config.d.ts +37 -0
  12. package/dest/archiver/config.d.ts.map +1 -0
  13. package/dest/archiver/config.js +46 -0
  14. package/dest/archiver/data_retrieval.d.ts +74 -0
  15. package/dest/archiver/data_retrieval.d.ts.map +1 -0
  16. package/dest/archiver/data_retrieval.js +283 -0
  17. package/dest/archiver/errors.d.ts +4 -0
  18. package/dest/archiver/errors.d.ts.map +1 -0
  19. package/dest/archiver/errors.js +5 -0
  20. package/dest/archiver/index.d.ts +8 -0
  21. package/dest/archiver/index.d.ts.map +1 -0
  22. package/dest/archiver/index.js +5 -0
  23. package/dest/archiver/instrumentation.d.ts +29 -0
  24. package/dest/archiver/instrumentation.d.ts.map +1 -0
  25. package/dest/archiver/instrumentation.js +99 -0
  26. package/dest/archiver/kv_archiver_store/block_store.d.ts +87 -0
  27. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -0
  28. package/dest/archiver/kv_archiver_store/block_store.js +217 -0
  29. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +18 -0
  30. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -0
  31. package/dest/archiver/kv_archiver_store/contract_class_store.js +126 -0
  32. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +21 -0
  33. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -0
  34. package/dest/archiver/kv_archiver_store/contract_instance_store.js +63 -0
  35. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +153 -0
  36. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -0
  37. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +254 -0
  38. package/dest/archiver/kv_archiver_store/log_store.d.ts +49 -0
  39. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -0
  40. package/dest/archiver/kv_archiver_store/log_store.js +364 -0
  41. package/dest/archiver/kv_archiver_store/message_store.d.ts +33 -0
  42. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -0
  43. package/dest/archiver/kv_archiver_store/message_store.js +85 -0
  44. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts +12 -0
  45. package/dest/archiver/kv_archiver_store/nullifier_store.d.ts.map +1 -0
  46. package/dest/archiver/kv_archiver_store/nullifier_store.js +73 -0
  47. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts +23 -0
  48. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts.map +1 -0
  49. package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.js +49 -0
  50. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +175 -0
  51. package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +1 -0
  52. package/dest/archiver/memory_archiver_store/memory_archiver_store.js +636 -0
  53. package/dest/archiver/structs/data_retrieval.d.ts +27 -0
  54. package/dest/archiver/structs/data_retrieval.d.ts.map +1 -0
  55. package/dest/archiver/structs/data_retrieval.js +5 -0
  56. package/dest/archiver/structs/published.d.ts +11 -0
  57. package/dest/archiver/structs/published.d.ts.map +1 -0
  58. package/dest/archiver/structs/published.js +1 -0
  59. package/dest/factory.d.ts +24 -0
  60. package/dest/factory.d.ts.map +1 -0
  61. package/dest/factory.js +85 -0
  62. package/dest/index.d.ts +5 -0
  63. package/dest/index.d.ts.map +1 -0
  64. package/dest/index.js +4 -0
  65. package/dest/rpc/index.d.ts +10 -0
  66. package/dest/rpc/index.d.ts.map +1 -0
  67. package/dest/rpc/index.js +18 -0
  68. package/dest/test/index.d.ts +4 -0
  69. package/dest/test/index.d.ts.map +1 -0
  70. package/dest/test/index.js +3 -0
  71. package/dest/test/mock_archiver.d.ts +23 -0
  72. package/dest/test/mock_archiver.d.ts.map +1 -0
  73. package/dest/test/mock_archiver.js +40 -0
  74. package/dest/test/mock_l1_to_l2_message_source.d.ts +16 -0
  75. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -0
  76. package/dest/test/mock_l1_to_l2_message_source.js +25 -0
  77. package/dest/test/mock_l2_block_source.d.ts +79 -0
  78. package/dest/test/mock_l2_block_source.d.ts.map +1 -0
  79. package/dest/test/mock_l2_block_source.js +163 -0
  80. package/package.json +110 -0
  81. package/src/archiver/archiver.ts +1181 -0
  82. package/src/archiver/archiver_store.ts +263 -0
  83. package/src/archiver/archiver_store_test_suite.ts +810 -0
  84. package/src/archiver/config.ts +92 -0
  85. package/src/archiver/data_retrieval.ts +422 -0
  86. package/src/archiver/errors.ts +5 -0
  87. package/src/archiver/index.ts +7 -0
  88. package/src/archiver/instrumentation.ts +132 -0
  89. package/src/archiver/kv_archiver_store/block_store.ts +283 -0
  90. package/src/archiver/kv_archiver_store/contract_class_store.ts +186 -0
  91. package/src/archiver/kv_archiver_store/contract_instance_store.ts +107 -0
  92. package/src/archiver/kv_archiver_store/kv_archiver_store.ts +358 -0
  93. package/src/archiver/kv_archiver_store/log_store.ts +444 -0
  94. package/src/archiver/kv_archiver_store/message_store.ts +102 -0
  95. package/src/archiver/kv_archiver_store/nullifier_store.ts +97 -0
  96. package/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts +61 -0
  97. package/src/archiver/memory_archiver_store/memory_archiver_store.ts +801 -0
  98. package/src/archiver/structs/data_retrieval.ts +27 -0
  99. package/src/archiver/structs/published.ts +11 -0
  100. package/src/factory.ts +107 -0
  101. package/src/index.ts +5 -0
  102. package/src/rpc/index.ts +20 -0
  103. package/src/test/index.ts +3 -0
  104. package/src/test/mock_archiver.ts +57 -0
  105. package/src/test/mock_l1_to_l2_message_source.ts +31 -0
  106. package/src/test/mock_l2_block_source.ts +204 -0
@@ -0,0 +1,364 @@
1
+ import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, PUBLIC_LOG_DATA_SIZE_IN_FIELDS } from '@aztec/constants';
2
+ import { createLogger } from '@aztec/foundation/log';
3
+ import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
4
+ import { ContractClassLog, ExtendedContractClassLog, ExtendedPublicLog, LogId, PrivateLog, PublicLog, TxScopedL2Log } from '@aztec/stdlib/logs';
5
+ /**
6
+ * A store for logs
7
+ */ export class LogStore {
8
+ db;
9
+ blockStore;
10
+ #logsByTag;
11
+ #logTagsByBlock;
12
+ #privateLogsByBlock;
13
+ #publicLogsByBlock;
14
+ #contractClassLogsByBlock;
15
+ #logsMaxPageSize;
16
+ #log;
17
+ constructor(db, blockStore, logsMaxPageSize = 1000){
18
+ this.db = db;
19
+ this.blockStore = blockStore;
20
+ this.#log = createLogger('archiver:log_store');
21
+ this.#logsByTag = db.openMap('archiver_tagged_logs_by_tag');
22
+ this.#logTagsByBlock = db.openMap('archiver_log_tags_by_block');
23
+ this.#privateLogsByBlock = db.openMap('archiver_private_logs_by_block');
24
+ this.#publicLogsByBlock = db.openMap('archiver_public_logs_by_block');
25
+ this.#contractClassLogsByBlock = db.openMap('archiver_contract_class_logs_by_block');
26
+ this.#logsMaxPageSize = logsMaxPageSize;
27
+ }
28
+ #extractTaggedLogsFromPrivate(block) {
29
+ const taggedLogs = new Map();
30
+ const dataStartIndexForBlock = block.header.state.partial.noteHashTree.nextAvailableLeafIndex - block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX;
31
+ block.body.txEffects.forEach((txEffect, txIndex)=>{
32
+ const txHash = txEffect.txHash;
33
+ const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
34
+ txEffect.privateLogs.forEach((log)=>{
35
+ const tag = log.fields[0];
36
+ const currentLogs = taggedLogs.get(tag.toString()) ?? [];
37
+ currentLogs.push(new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, /* isFromPublic */ false, log.toBuffer()).toBuffer());
38
+ taggedLogs.set(tag.toString(), currentLogs);
39
+ });
40
+ });
41
+ return taggedLogs;
42
+ }
43
+ #extractTaggedLogsFromPublic(block) {
44
+ const taggedLogs = new Map();
45
+ const dataStartIndexForBlock = block.header.state.partial.noteHashTree.nextAvailableLeafIndex - block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX;
46
+ block.body.txEffects.forEach((txEffect, txIndex)=>{
47
+ const txHash = txEffect.txHash;
48
+ const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX;
49
+ txEffect.publicLogs.forEach((log)=>{
50
+ // Check that each log stores 2 lengths in its first field. If not, it's not a tagged log:
51
+ const firstFieldBuf = log.log[0].toBuffer();
52
+ // See macros/note/mod/ and see how finalization_log[0] is constructed, to understand this monstrosity. (It wasn't me).
53
+ // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here.
54
+ if (!firstFieldBuf.subarray(0, 27).equals(Buffer.alloc(27)) || firstFieldBuf[29] !== 0) {
55
+ // See parseLogFromPublic - the first field of a tagged log is 5 bytes structured:
56
+ // [ publicLen[0], publicLen[1], 0, privateLen[0], privateLen[1]]
57
+ this.#log.warn(`Skipping public log with invalid first field: ${log.log[0]}`);
58
+ return;
59
+ }
60
+ // Check that the length values line up with the log contents
61
+ const publicValuesLength = firstFieldBuf.subarray(-5).readUint16BE();
62
+ const privateValuesLength = firstFieldBuf.subarray(-5).readUint16BE(3);
63
+ // Add 1 for the first field holding lengths
64
+ const totalLogLength = 1 + publicValuesLength + privateValuesLength;
65
+ // Note that zeroes can be valid log values, so we can only assert that we do not go over the given length
66
+ if (totalLogLength > PUBLIC_LOG_DATA_SIZE_IN_FIELDS || log.log.slice(totalLogLength).find((f)=>!f.isZero())) {
67
+ this.#log.warn(`Skipping invalid tagged public log with first field: ${log.log[0]}`);
68
+ return;
69
+ }
70
+ // The first elt stores lengths as above => tag is in fields[1]
71
+ const tag = log.log[1];
72
+ this.#log.debug(`Found tagged public log with tag ${tag.toString()} in block ${block.number}`);
73
+ const currentLogs = taggedLogs.get(tag.toString()) ?? [];
74
+ currentLogs.push(new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, /* isFromPublic */ true, log.toBuffer()).toBuffer());
75
+ taggedLogs.set(tag.toString(), currentLogs);
76
+ });
77
+ });
78
+ return taggedLogs;
79
+ }
80
+ /**
81
+ * Append new logs to the store's list.
82
+ * @param blocks - The blocks for which to add the logs.
83
+ * @returns True if the operation is successful.
84
+ */ addLogs(blocks) {
85
+ const taggedLogsToAdd = blocks.flatMap((block)=>[
86
+ this.#extractTaggedLogsFromPrivate(block),
87
+ this.#extractTaggedLogsFromPublic(block)
88
+ ]).reduce((acc, val)=>{
89
+ for (const [tag, logs] of val.entries()){
90
+ const currentLogs = acc.get(tag) ?? [];
91
+ acc.set(tag, currentLogs.concat(logs));
92
+ }
93
+ return acc;
94
+ });
95
+ const tagsToUpdate = Array.from(taggedLogsToAdd.keys());
96
+ return this.db.transactionAsync(async ()=>{
97
+ const currentTaggedLogs = await Promise.all(tagsToUpdate.map(async (tag)=>({
98
+ tag,
99
+ logBuffers: await this.#logsByTag.getAsync(tag)
100
+ })));
101
+ currentTaggedLogs.forEach((taggedLogBuffer)=>{
102
+ if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
103
+ taggedLogsToAdd.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(taggedLogsToAdd.get(taggedLogBuffer.tag)));
104
+ }
105
+ });
106
+ for (const block of blocks){
107
+ const tagsInBlock = [];
108
+ for (const [tag, logs] of taggedLogsToAdd.entries()){
109
+ await this.#logsByTag.set(tag, logs);
110
+ tagsInBlock.push(tag);
111
+ }
112
+ await this.#logTagsByBlock.set(block.number, tagsInBlock);
113
+ const privateLogsInBlock = block.body.txEffects.map((txEffect)=>txEffect.privateLogs).flat().map((log)=>log.toBuffer());
114
+ await this.#privateLogsByBlock.set(block.number, Buffer.concat(privateLogsInBlock));
115
+ const publicLogsInBlock = block.body.txEffects.map((txEffect, txIndex)=>[
116
+ numToUInt32BE(txIndex),
117
+ numToUInt32BE(txEffect.publicLogs.length),
118
+ txEffect.publicLogs.map((log)=>log.toBuffer())
119
+ ].flat()).flat();
120
+ const contractClassLogsInBlock = block.body.txEffects.map((txEffect, txIndex)=>[
121
+ numToUInt32BE(txIndex),
122
+ numToUInt32BE(txEffect.contractClassLogs.length),
123
+ txEffect.contractClassLogs.map((log)=>log.toBuffer())
124
+ ].flat()).flat();
125
+ await this.#publicLogsByBlock.set(block.number, Buffer.concat(publicLogsInBlock));
126
+ await this.#contractClassLogsByBlock.set(block.number, Buffer.concat(contractClassLogsInBlock));
127
+ }
128
+ return true;
129
+ });
130
+ }
131
+ deleteLogs(blocks) {
132
+ return this.db.transactionAsync(async ()=>{
133
+ const tagsToDelete = (await Promise.all(blocks.map(async (block)=>{
134
+ const tags = await this.#logTagsByBlock.getAsync(block.number);
135
+ return tags ?? [];
136
+ }))).flat();
137
+ await Promise.all(blocks.map((block)=>Promise.all([
138
+ this.#privateLogsByBlock.delete(block.number),
139
+ this.#publicLogsByBlock.delete(block.number),
140
+ this.#logTagsByBlock.delete(block.number)
141
+ ])));
142
+ await Promise.all(tagsToDelete.map((tag)=>this.#logsByTag.delete(tag.toString())));
143
+ return true;
144
+ });
145
+ }
146
+ /**
147
+ * Retrieves all private logs from up to `limit` blocks, starting from the block number `start`.
148
+ * @param start - The block number from which to begin retrieving logs.
149
+ * @param limit - The maximum number of blocks to retrieve logs from.
150
+ * @returns An array of private logs from the specified range of blocks.
151
+ */ async getPrivateLogs(start, limit) {
152
+ const logs = [];
153
+ for await (const buffer of this.#privateLogsByBlock.valuesAsync({
154
+ start,
155
+ limit
156
+ })){
157
+ const reader = new BufferReader(buffer);
158
+ while(reader.remainingBytes() > 0){
159
+ logs.push(reader.readObject(PrivateLog));
160
+ }
161
+ }
162
+ return logs;
163
+ }
164
+ /**
165
+ * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
166
+ * @param tags - The tags to filter the logs by.
167
+ * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
168
+ * that tag.
169
+ */ async getLogsByTags(tags) {
170
+ const logs = await Promise.all(tags.map((tag)=>this.#logsByTag.getAsync(tag.toString())));
171
+ return logs.map((noteLogBuffers)=>noteLogBuffers?.map((noteLogBuffer)=>TxScopedL2Log.fromBuffer(noteLogBuffer)) ?? []);
172
+ }
173
+ /**
174
+ * Gets public logs based on the provided filter.
175
+ * @param filter - The filter to apply to the logs.
176
+ * @returns The requested logs.
177
+ */ getPublicLogs(filter) {
178
+ if (filter.afterLog) {
179
+ return this.#filterPublicLogsBetweenBlocks(filter);
180
+ } else if (filter.txHash) {
181
+ return this.#filterPublicLogsOfTx(filter);
182
+ } else {
183
+ return this.#filterPublicLogsBetweenBlocks(filter);
184
+ }
185
+ }
186
+ async #filterPublicLogsOfTx(filter) {
187
+ if (!filter.txHash) {
188
+ throw new Error('Missing txHash');
189
+ }
190
+ const [blockNumber, txIndex] = await this.blockStore.getTxLocation(filter.txHash) ?? [];
191
+ if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
192
+ return {
193
+ logs: [],
194
+ maxLogsHit: false
195
+ };
196
+ }
197
+ const buffer = await this.#publicLogsByBlock.getAsync(blockNumber) ?? Buffer.alloc(0);
198
+ const publicLogsInBlock = [
199
+ []
200
+ ];
201
+ const reader = new BufferReader(buffer);
202
+ while(reader.remainingBytes() > 0){
203
+ const indexOfTx = reader.readNumber();
204
+ const numLogsInTx = reader.readNumber();
205
+ publicLogsInBlock[indexOfTx] = [];
206
+ for(let i = 0; i < numLogsInTx; i++){
207
+ publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
208
+ }
209
+ }
210
+ const txLogs = publicLogsInBlock[txIndex];
211
+ const logs = [];
212
+ const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
213
+ return {
214
+ logs,
215
+ maxLogsHit
216
+ };
217
+ }
218
+ async #filterPublicLogsBetweenBlocks(filter) {
219
+ const start = filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
220
+ const end = filter.toBlock;
221
+ if (typeof end === 'number' && end < start) {
222
+ return {
223
+ logs: [],
224
+ maxLogsHit: true
225
+ };
226
+ }
227
+ const logs = [];
228
+ let maxLogsHit = false;
229
+ loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({
230
+ start,
231
+ end
232
+ })){
233
+ const publicLogsInBlock = [
234
+ []
235
+ ];
236
+ const reader = new BufferReader(logBuffer);
237
+ while(reader.remainingBytes() > 0){
238
+ const indexOfTx = reader.readNumber();
239
+ const numLogsInTx = reader.readNumber();
240
+ publicLogsInBlock[indexOfTx] = [];
241
+ for(let i = 0; i < numLogsInTx; i++){
242
+ publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
243
+ }
244
+ }
245
+ for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++){
246
+ const txLogs = publicLogsInBlock[txIndex];
247
+ maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
248
+ if (maxLogsHit) {
249
+ this.#log.debug(`Max logs hit at block ${blockNumber}`);
250
+ break loopOverBlocks;
251
+ }
252
+ }
253
+ }
254
+ return {
255
+ logs,
256
+ maxLogsHit
257
+ };
258
+ }
259
+ /**
260
+ * Gets contract class logs based on the provided filter.
261
+ * @param filter - The filter to apply to the logs.
262
+ * @returns The requested logs.
263
+ */ getContractClassLogs(filter) {
264
+ if (filter.afterLog) {
265
+ return this.#filterContractClassLogsBetweenBlocks(filter);
266
+ } else if (filter.txHash) {
267
+ return this.#filterContractClassLogsOfTx(filter);
268
+ } else {
269
+ return this.#filterContractClassLogsBetweenBlocks(filter);
270
+ }
271
+ }
272
+ async #filterContractClassLogsOfTx(filter) {
273
+ if (!filter.txHash) {
274
+ throw new Error('Missing txHash');
275
+ }
276
+ const [blockNumber, txIndex] = await this.blockStore.getTxLocation(filter.txHash) ?? [];
277
+ if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') {
278
+ return {
279
+ logs: [],
280
+ maxLogsHit: false
281
+ };
282
+ }
283
+ const contractClassLogsBuffer = await this.#contractClassLogsByBlock.getAsync(blockNumber) ?? Buffer.alloc(0);
284
+ const contractClassLogsInBlock = [
285
+ []
286
+ ];
287
+ const reader = new BufferReader(contractClassLogsBuffer);
288
+ while(reader.remainingBytes() > 0){
289
+ const indexOfTx = reader.readNumber();
290
+ const numLogsInTx = reader.readNumber();
291
+ contractClassLogsInBlock[indexOfTx] = [];
292
+ for(let i = 0; i < numLogsInTx; i++){
293
+ contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
294
+ }
295
+ }
296
+ const txLogs = contractClassLogsInBlock[txIndex];
297
+ const logs = [];
298
+ const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
299
+ return {
300
+ logs,
301
+ maxLogsHit
302
+ };
303
+ }
304
+ async #filterContractClassLogsBetweenBlocks(filter) {
305
+ const start = filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
306
+ const end = filter.toBlock;
307
+ if (typeof end === 'number' && end < start) {
308
+ return {
309
+ logs: [],
310
+ maxLogsHit: true
311
+ };
312
+ }
313
+ const logs = [];
314
+ let maxLogsHit = false;
315
+ loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#contractClassLogsByBlock.entriesAsync({
316
+ start,
317
+ end
318
+ })){
319
+ const contractClassLogsInBlock = [
320
+ []
321
+ ];
322
+ const reader = new BufferReader(logBuffer);
323
+ while(reader.remainingBytes() > 0){
324
+ const indexOfTx = reader.readNumber();
325
+ const numLogsInTx = reader.readNumber();
326
+ contractClassLogsInBlock[indexOfTx] = [];
327
+ for(let i = 0; i < numLogsInTx; i++){
328
+ contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
329
+ }
330
+ }
331
+ for(let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++){
332
+ const txLogs = contractClassLogsInBlock[txIndex];
333
+ maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
334
+ if (maxLogsHit) {
335
+ this.#log.debug(`Max logs hit at block ${blockNumber}`);
336
+ break loopOverBlocks;
337
+ }
338
+ }
339
+ }
340
+ return {
341
+ logs,
342
+ maxLogsHit
343
+ };
344
+ }
345
+ #accumulateLogs(results, blockNumber, txIndex, txLogs, filter) {
346
+ let maxLogsHit = false;
347
+ let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
348
+ for(; logIndex < txLogs.length; logIndex++){
349
+ const log = txLogs[logIndex];
350
+ if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
351
+ if (log instanceof ContractClassLog) {
352
+ results.push(new ExtendedContractClassLog(new LogId(blockNumber, txIndex, logIndex), log));
353
+ } else {
354
+ results.push(new ExtendedPublicLog(new LogId(blockNumber, txIndex, logIndex), log));
355
+ }
356
+ if (results.length >= this.#logsMaxPageSize) {
357
+ maxLogsHit = true;
358
+ break;
359
+ }
360
+ }
361
+ }
362
+ return maxLogsHit;
363
+ }
364
+ }
@@ -0,0 +1,33 @@
1
+ import { Fr } from '@aztec/foundation/fields';
2
+ import type { AztecAsyncKVStore } from '@aztec/kv-store';
3
+ import { InboxLeaf } from '@aztec/stdlib/messaging';
4
+ import type { DataRetrieval } from '../structs/data_retrieval.js';
5
+ /**
6
+ * LMDB implementation of the ArchiverDataStore interface.
7
+ */
8
+ export declare class MessageStore {
9
+ #private;
10
+ private db;
11
+ constructor(db: AztecAsyncKVStore);
12
+ getTotalL1ToL2MessageCount(): Promise<bigint>;
13
+ /**
14
+ * Gets the last L1 block number that emitted new messages.
15
+ * @returns The last L1 block number processed
16
+ */
17
+ getSynchedL1BlockNumber(): Promise<bigint | undefined>;
18
+ setSynchedL1BlockNumber(l1BlockNumber: bigint): Promise<void>;
19
+ /**
20
+ * Append L1 to L2 messages to the store.
21
+ * @param messages - The L1 to L2 messages to be added to the store and the last processed L1 block.
22
+ * @returns True if the operation is successful.
23
+ */
24
+ addL1ToL2Messages(messages: DataRetrieval<InboxLeaf>): Promise<boolean>;
25
+ /**
26
+ * Gets the L1 to L2 message index in the L1 to L2 message tree.
27
+ * @param l1ToL2Message - The L1 to L2 message.
28
+ * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
29
+ */
30
+ getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise<bigint | undefined>;
31
+ getL1ToL2Messages(blockNumber: bigint): Promise<Fr[]>;
32
+ }
33
+ //# sourceMappingURL=message_store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"message_store.d.ts","sourceRoot":"","sources":["../../../src/archiver/kv_archiver_store/message_store.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAE9C,OAAO,KAAK,EAAE,iBAAiB,EAAsC,MAAM,iBAAiB,CAAC;AAC7F,OAAO,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AAEpD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,8BAA8B,CAAC;AAElE;;GAEG;AACH,qBAAa,YAAY;;IAUX,OAAO,CAAC,EAAE;gBAAF,EAAE,EAAE,iBAAiB;IAOnC,0BAA0B,IAAI,OAAO,CAAC,MAAM,CAAC;IAInD;;;OAGG;IACH,uBAAuB,IAAI,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;IAIhD,uBAAuB,CAAC,aAAa,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAInE;;;;OAIG;IACH,iBAAiB,CAAC,QAAQ,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC;IAsBvE;;;;OAIG;IACH,qBAAqB,CAAC,aAAa,EAAE,EAAE,GAAG,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;IAI/D,iBAAiB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,EAAE,EAAE,CAAC;CAqB5D"}
@@ -0,0 +1,85 @@
1
+ import { L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/constants';
2
+ import { Fr } from '@aztec/foundation/fields';
3
+ import { createLogger } from '@aztec/foundation/log';
4
+ import { InboxLeaf } from '@aztec/stdlib/messaging';
5
+ /**
6
+ * LMDB implementation of the ArchiverDataStore interface.
7
+ */ export class MessageStore {
8
+ db;
9
+ #l1ToL2Messages;
10
+ #l1ToL2MessageIndices;
11
+ #lastSynchedL1Block;
12
+ #totalMessageCount;
13
+ #log;
14
+ #l1ToL2MessagesSubtreeSize;
15
+ constructor(db){
16
+ this.db = db;
17
+ this.#log = createLogger('archiver:message_store');
18
+ this.#l1ToL2MessagesSubtreeSize = 2 ** L1_TO_L2_MSG_SUBTREE_HEIGHT;
19
+ this.#l1ToL2Messages = db.openMap('archiver_l1_to_l2_messages');
20
+ this.#l1ToL2MessageIndices = db.openMap('archiver_l1_to_l2_message_indices');
21
+ this.#lastSynchedL1Block = db.openSingleton('archiver_last_l1_block_new_messages');
22
+ this.#totalMessageCount = db.openSingleton('archiver_l1_to_l2_message_count');
23
+ }
24
+ async getTotalL1ToL2MessageCount() {
25
+ return await this.#totalMessageCount.getAsync() ?? 0n;
26
+ }
27
+ /**
28
+ * Gets the last L1 block number that emitted new messages.
29
+ * @returns The last L1 block number processed
30
+ */ getSynchedL1BlockNumber() {
31
+ return this.#lastSynchedL1Block.getAsync();
32
+ }
33
+ async setSynchedL1BlockNumber(l1BlockNumber) {
34
+ await this.#lastSynchedL1Block.set(l1BlockNumber);
35
+ }
36
+ /**
37
+ * Append L1 to L2 messages to the store.
38
+ * @param messages - The L1 to L2 messages to be added to the store and the last processed L1 block.
39
+ * @returns True if the operation is successful.
40
+ */ addL1ToL2Messages(messages) {
41
+ return this.db.transactionAsync(async ()=>{
42
+ const lastL1BlockNumber = await this.#lastSynchedL1Block.getAsync() ?? 0n;
43
+ if (lastL1BlockNumber >= messages.lastProcessedL1BlockNumber) {
44
+ return false;
45
+ }
46
+ await this.#lastSynchedL1Block.set(messages.lastProcessedL1BlockNumber);
47
+ for (const message of messages.retrievedData){
48
+ const key = `${message.index}`;
49
+ await this.#l1ToL2Messages.set(key, message.leaf.toBuffer());
50
+ await this.#l1ToL2MessageIndices.set(message.leaf.toString(), message.index);
51
+ }
52
+ const lastTotalMessageCount = await this.getTotalL1ToL2MessageCount();
53
+ await this.#totalMessageCount.set(lastTotalMessageCount + BigInt(messages.retrievedData.length));
54
+ return true;
55
+ });
56
+ }
57
+ /**
58
+ * Gets the L1 to L2 message index in the L1 to L2 message tree.
59
+ * @param l1ToL2Message - The L1 to L2 message.
60
+ * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
61
+ */ getL1ToL2MessageIndex(l1ToL2Message) {
62
+ return this.#l1ToL2MessageIndices.getAsync(l1ToL2Message.toString());
63
+ }
64
+ async getL1ToL2Messages(blockNumber) {
65
+ const messages = [];
66
+ let undefinedMessageFound = false;
67
+ const startIndex = Number(InboxLeaf.smallestIndexFromL2Block(blockNumber));
68
+ for(let i = startIndex; i < startIndex + this.#l1ToL2MessagesSubtreeSize; i++){
69
+ // This is inefficient but probably fine for now.
70
+ const key = `${i}`;
71
+ const message = await this.#l1ToL2Messages.getAsync(key);
72
+ if (message) {
73
+ if (undefinedMessageFound) {
74
+ throw new Error(`L1 to L2 message gap found in block ${blockNumber}`);
75
+ }
76
+ messages.push(Fr.fromBuffer(message));
77
+ } else {
78
+ undefinedMessageFound = true;
79
+ // We continue iterating over messages here to verify that there are no more messages after the undefined one.
80
+ // --> If this was the case this would imply there is some issue with log fetching.
81
+ }
82
+ }
83
+ return messages;
84
+ }
85
+ }
@@ -0,0 +1,12 @@
1
+ import type { Fr } from '@aztec/foundation/fields';
2
+ import type { AztecAsyncKVStore } from '@aztec/kv-store';
3
+ import type { InBlock, L2Block } from '@aztec/stdlib/block';
4
+ export declare class NullifierStore {
5
+ #private;
6
+ private db;
7
+ constructor(db: AztecAsyncKVStore);
8
+ addNullifiers(blocks: L2Block[]): Promise<boolean>;
9
+ deleteNullifiers(blocks: L2Block[]): Promise<boolean>;
10
+ findNullifiersIndexesWithBlock(blockNumber: number, nullifiers: Fr[]): Promise<(InBlock<bigint> | undefined)[]>;
11
+ }
12
+ //# sourceMappingURL=nullifier_store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"nullifier_store.d.ts","sourceRoot":"","sources":["../../../src/archiver/kv_archiver_store/nullifier_store.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAEnD,OAAO,KAAK,EAAE,iBAAiB,EAAiB,MAAM,iBAAiB,CAAC;AACxE,OAAO,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAE5D,qBAAa,cAAc;;IAMb,OAAO,CAAC,EAAE;gBAAF,EAAE,EAAE,iBAAiB;IAMnC,aAAa,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC;IA0BlD,gBAAgB,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC;IAerD,8BAA8B,CAClC,WAAW,EAAE,MAAM,EACnB,UAAU,EAAE,EAAE,EAAE,GACf,OAAO,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC,EAAE,CAAC;CAkC5C"}
@@ -0,0 +1,73 @@
1
+ import { MAX_NULLIFIERS_PER_TX } from '@aztec/constants';
2
+ import { createLogger } from '@aztec/foundation/log';
3
+ export class NullifierStore {
4
+ db;
5
+ #nullifiersToBlockNumber;
6
+ #nullifiersToBlockHash;
7
+ #nullifiersToIndex;
8
+ #log;
9
+ constructor(db){
10
+ this.db = db;
11
+ this.#log = createLogger('archiver:log_store');
12
+ this.#nullifiersToBlockNumber = db.openMap('archiver_nullifiers_to_block_number');
13
+ this.#nullifiersToBlockHash = db.openMap('archiver_nullifiers_to_block_hash');
14
+ this.#nullifiersToIndex = db.openMap('archiver_nullifiers_to_index');
15
+ }
16
+ async addNullifiers(blocks) {
17
+ const blockHashes = await Promise.all(blocks.map((block)=>block.hash()));
18
+ await this.db.transactionAsync(async ()=>{
19
+ await Promise.all(blocks.map((block, i)=>{
20
+ const dataStartIndexForBlock = block.header.state.partial.nullifierTree.nextAvailableLeafIndex - block.body.txEffects.length * MAX_NULLIFIERS_PER_TX;
21
+ return Promise.all(block.body.txEffects.map((txEffects, txIndex)=>{
22
+ const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NULLIFIERS_PER_TX;
23
+ return Promise.all(txEffects.nullifiers.map(async (nullifier, nullifierIndex)=>{
24
+ await this.#nullifiersToBlockNumber.set(nullifier.toString(), block.number);
25
+ await this.#nullifiersToBlockHash.set(nullifier.toString(), blockHashes[i].toString());
26
+ await this.#nullifiersToIndex.set(nullifier.toString(), dataStartIndexForTx + nullifierIndex);
27
+ }));
28
+ }));
29
+ }));
30
+ });
31
+ return true;
32
+ }
33
+ async deleteNullifiers(blocks) {
34
+ await this.db.transactionAsync(async ()=>{
35
+ for (const block of blocks){
36
+ for (const nullifier of block.body.txEffects.flatMap((tx)=>tx.nullifiers)){
37
+ await Promise.all([
38
+ this.#nullifiersToBlockNumber.delete(nullifier.toString()),
39
+ this.#nullifiersToBlockHash.delete(nullifier.toString()),
40
+ this.#nullifiersToIndex.delete(nullifier.toString())
41
+ ]);
42
+ }
43
+ }
44
+ });
45
+ return true;
46
+ }
47
+ async findNullifiersIndexesWithBlock(blockNumber, nullifiers) {
48
+ const asStrings = nullifiers.map((x)=>x.toString());
49
+ const maybeNullifiers = await Promise.all(asStrings.map(async (nullifier)=>{
50
+ const [data, l2BlockNumber, l2BlockHash] = await Promise.all([
51
+ this.#nullifiersToIndex.getAsync(nullifier),
52
+ this.#nullifiersToBlockNumber.getAsync(nullifier),
53
+ this.#nullifiersToBlockHash.getAsync(nullifier)
54
+ ]);
55
+ return {
56
+ data,
57
+ l2BlockNumber,
58
+ l2BlockHash
59
+ };
60
+ }));
61
+ return maybeNullifiers.map(({ data, l2BlockNumber, l2BlockHash })=>{
62
+ if (data === undefined || l2BlockNumber === undefined || l2BlockHash === undefined || l2BlockNumber > blockNumber) {
63
+ return undefined;
64
+ } else {
65
+ return {
66
+ data: BigInt(data),
67
+ l2BlockNumber,
68
+ l2BlockHash
69
+ };
70
+ }
71
+ });
72
+ }
73
+ }
@@ -0,0 +1,23 @@
1
+ import type { Fr } from '@aztec/foundation/fields';
2
+ import { InboxLeaf } from '@aztec/stdlib/messaging';
3
+ /**
4
+ * A simple in-memory implementation of an L1 to L2 message store.
5
+ */
6
+ export declare class L1ToL2MessageStore {
7
+ #private;
8
+ /**
9
+ * A map pointing from a key in a "messageIndex" format to the corresponding L1 to L2 message hash.
10
+ */
11
+ protected store: Map<string, Fr>;
12
+ constructor();
13
+ getTotalL1ToL2MessageCount(): bigint;
14
+ addMessage(message: InboxLeaf): void;
15
+ getMessages(blockNumber: bigint): Fr[];
16
+ /**
17
+ * Gets the L1 to L2 message index in the L1 to L2 message tree.
18
+ * @param l1ToL2Message - The L1 to L2 message.
19
+ * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
20
+ */
21
+ getMessageIndex(l1ToL2Message: Fr): bigint | undefined;
22
+ }
23
+ //# sourceMappingURL=l1_to_l2_message_store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"l1_to_l2_message_store.d.ts","sourceRoot":"","sources":["../../../src/archiver/memory_archiver_store/l1_to_l2_message_store.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AACnD,OAAO,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AAEpD;;GAEG;AACH,qBAAa,kBAAkB;;IAC7B;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,EAAE,CAAC,CAAa;;IAM7C,0BAA0B,IAAI,MAAM;IAIpC,UAAU,CAAC,OAAO,EAAE,SAAS;IAI7B,WAAW,CAAC,WAAW,EAAE,MAAM,GAAG,EAAE,EAAE;IAsBtC;;;;OAIG;IACH,eAAe,CAAC,aAAa,EAAE,EAAE,GAAG,MAAM,GAAG,SAAS;CAQvD"}
@@ -0,0 +1,49 @@
1
+ import { L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/constants';
2
+ import { InboxLeaf } from '@aztec/stdlib/messaging';
3
+ /**
4
+ * A simple in-memory implementation of an L1 to L2 message store.
5
+ */ export class L1ToL2MessageStore {
6
+ /**
7
+ * A map pointing from a key in a "messageIndex" format to the corresponding L1 to L2 message hash.
8
+ */ store = new Map();
9
+ #l1ToL2MessagesSubtreeSize = 2 ** L1_TO_L2_MSG_SUBTREE_HEIGHT;
10
+ constructor(){}
11
+ getTotalL1ToL2MessageCount() {
12
+ return BigInt(this.store.size);
13
+ }
14
+ addMessage(message) {
15
+ this.store.set(`${message.index}`, message.leaf);
16
+ }
17
+ getMessages(blockNumber) {
18
+ const messages = [];
19
+ let undefinedMessageFound = false;
20
+ const startIndex = Number(InboxLeaf.smallestIndexFromL2Block(blockNumber));
21
+ for(let i = startIndex; i < startIndex + this.#l1ToL2MessagesSubtreeSize; i++){
22
+ // This is inefficient but probably fine for now.
23
+ const message = this.store.get(`${i}`);
24
+ if (message) {
25
+ if (undefinedMessageFound) {
26
+ throw new Error(`L1 to L2 message gap found in block ${blockNumber}`);
27
+ }
28
+ messages.push(message);
29
+ } else {
30
+ undefinedMessageFound = true;
31
+ // We continue iterating over messages here to verify that there are no more messages after the undefined one.
32
+ // --> If this was the case this would imply there is some issue with log fetching.
33
+ }
34
+ }
35
+ return messages;
36
+ }
37
+ /**
38
+ * Gets the L1 to L2 message index in the L1 to L2 message tree.
39
+ * @param l1ToL2Message - The L1 to L2 message.
40
+ * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
41
+ */ getMessageIndex(l1ToL2Message) {
42
+ for (const [key, message] of this.store.entries()){
43
+ if (message.equals(l1ToL2Message)) {
44
+ return BigInt(key);
45
+ }
46
+ }
47
+ return undefined;
48
+ }
49
+ }