@aztec/archiver 0.0.1-commit.f146247c → 0.0.1-commit.f1b29a41e

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/dest/archiver.d.ts +9 -6
  2. package/dest/archiver.d.ts.map +1 -1
  3. package/dest/archiver.js +76 -111
  4. package/dest/config.d.ts +3 -3
  5. package/dest/config.d.ts.map +1 -1
  6. package/dest/config.js +2 -1
  7. package/dest/errors.d.ts +34 -10
  8. package/dest/errors.d.ts.map +1 -1
  9. package/dest/errors.js +45 -16
  10. package/dest/factory.d.ts +4 -5
  11. package/dest/factory.d.ts.map +1 -1
  12. package/dest/factory.js +31 -26
  13. package/dest/index.d.ts +2 -1
  14. package/dest/index.d.ts.map +1 -1
  15. package/dest/index.js +1 -0
  16. package/dest/l1/bin/retrieve-calldata.js +36 -33
  17. package/dest/l1/calldata_retriever.d.ts +73 -50
  18. package/dest/l1/calldata_retriever.d.ts.map +1 -1
  19. package/dest/l1/calldata_retriever.js +191 -259
  20. package/dest/l1/data_retrieval.d.ts +9 -9
  21. package/dest/l1/data_retrieval.d.ts.map +1 -1
  22. package/dest/l1/data_retrieval.js +21 -19
  23. package/dest/l1/spire_proposer.d.ts +5 -5
  24. package/dest/l1/spire_proposer.d.ts.map +1 -1
  25. package/dest/l1/spire_proposer.js +9 -17
  26. package/dest/modules/data_source_base.d.ts +14 -7
  27. package/dest/modules/data_source_base.d.ts.map +1 -1
  28. package/dest/modules/data_source_base.js +39 -77
  29. package/dest/modules/data_store_updater.d.ts +25 -12
  30. package/dest/modules/data_store_updater.d.ts.map +1 -1
  31. package/dest/modules/data_store_updater.js +125 -94
  32. package/dest/modules/instrumentation.d.ts +15 -2
  33. package/dest/modules/instrumentation.d.ts.map +1 -1
  34. package/dest/modules/instrumentation.js +19 -2
  35. package/dest/modules/l1_synchronizer.d.ts +5 -8
  36. package/dest/modules/l1_synchronizer.d.ts.map +1 -1
  37. package/dest/modules/l1_synchronizer.js +73 -33
  38. package/dest/modules/validation.d.ts +1 -1
  39. package/dest/modules/validation.d.ts.map +1 -1
  40. package/dest/modules/validation.js +2 -2
  41. package/dest/store/block_store.d.ts +65 -28
  42. package/dest/store/block_store.d.ts.map +1 -1
  43. package/dest/store/block_store.js +311 -134
  44. package/dest/store/contract_class_store.d.ts +2 -3
  45. package/dest/store/contract_class_store.d.ts.map +1 -1
  46. package/dest/store/contract_class_store.js +7 -67
  47. package/dest/store/contract_instance_store.d.ts +1 -1
  48. package/dest/store/contract_instance_store.d.ts.map +1 -1
  49. package/dest/store/contract_instance_store.js +6 -2
  50. package/dest/store/kv_archiver_store.d.ts +62 -21
  51. package/dest/store/kv_archiver_store.d.ts.map +1 -1
  52. package/dest/store/kv_archiver_store.js +75 -22
  53. package/dest/store/l2_tips_cache.d.ts +20 -0
  54. package/dest/store/l2_tips_cache.d.ts.map +1 -0
  55. package/dest/store/l2_tips_cache.js +109 -0
  56. package/dest/store/log_store.d.ts +6 -3
  57. package/dest/store/log_store.d.ts.map +1 -1
  58. package/dest/store/log_store.js +148 -51
  59. package/dest/store/message_store.d.ts +5 -1
  60. package/dest/store/message_store.d.ts.map +1 -1
  61. package/dest/store/message_store.js +14 -1
  62. package/dest/test/fake_l1_state.d.ts +13 -1
  63. package/dest/test/fake_l1_state.d.ts.map +1 -1
  64. package/dest/test/fake_l1_state.js +95 -23
  65. package/dest/test/mock_archiver.d.ts +1 -1
  66. package/dest/test/mock_archiver.d.ts.map +1 -1
  67. package/dest/test/mock_archiver.js +3 -2
  68. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  69. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  70. package/dest/test/mock_l1_to_l2_message_source.js +2 -1
  71. package/dest/test/mock_l2_block_source.d.ts +26 -5
  72. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  73. package/dest/test/mock_l2_block_source.js +160 -89
  74. package/dest/test/mock_structs.d.ts +4 -1
  75. package/dest/test/mock_structs.d.ts.map +1 -1
  76. package/dest/test/mock_structs.js +13 -1
  77. package/dest/test/noop_l1_archiver.d.ts +4 -1
  78. package/dest/test/noop_l1_archiver.d.ts.map +1 -1
  79. package/dest/test/noop_l1_archiver.js +5 -1
  80. package/package.json +13 -13
  81. package/src/archiver.ts +93 -132
  82. package/src/config.ts +8 -1
  83. package/src/errors.ts +70 -26
  84. package/src/factory.ts +46 -24
  85. package/src/index.ts +1 -0
  86. package/src/l1/README.md +25 -68
  87. package/src/l1/bin/retrieve-calldata.ts +46 -39
  88. package/src/l1/calldata_retriever.ts +250 -379
  89. package/src/l1/data_retrieval.ts +23 -25
  90. package/src/l1/spire_proposer.ts +7 -15
  91. package/src/modules/data_source_base.ts +78 -98
  92. package/src/modules/data_store_updater.ts +138 -124
  93. package/src/modules/instrumentation.ts +29 -2
  94. package/src/modules/l1_synchronizer.ts +86 -43
  95. package/src/modules/validation.ts +2 -2
  96. package/src/store/block_store.ts +393 -170
  97. package/src/store/contract_class_store.ts +8 -106
  98. package/src/store/contract_instance_store.ts +8 -5
  99. package/src/store/kv_archiver_store.ts +117 -36
  100. package/src/store/l2_tips_cache.ts +128 -0
  101. package/src/store/log_store.ts +219 -58
  102. package/src/store/message_store.ts +20 -1
  103. package/src/test/fake_l1_state.ts +125 -26
  104. package/src/test/mock_archiver.ts +3 -2
  105. package/src/test/mock_l1_to_l2_message_source.ts +1 -0
  106. package/src/test/mock_l2_block_source.ts +209 -82
  107. package/src/test/mock_structs.ts +20 -6
  108. package/src/test/noop_l1_archiver.ts +7 -1
@@ -1,6 +1,6 @@
1
1
  import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
2
2
  import { BlockNumber } from '@aztec/foundation/branded-types';
3
- import { filterAsync } from '@aztec/foundation/collection';
3
+ import { compactArray, filterAsync } from '@aztec/foundation/collection';
4
4
  import { Fr } from '@aztec/foundation/curves/bn254';
5
5
  import { createLogger } from '@aztec/foundation/log';
6
6
  import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
@@ -20,7 +20,9 @@ import {
20
20
  Tag,
21
21
  TxScopedL2Log,
22
22
  } from '@aztec/stdlib/logs';
23
+ import { TxHash } from '@aztec/stdlib/tx';
23
24
 
25
+ import { OutOfOrderLogInsertionError } from '../errors.js';
24
26
  import type { BlockStore } from './block_store.js';
25
27
 
26
28
  /**
@@ -164,10 +166,21 @@ export class LogStore {
164
166
 
165
167
  for (const taggedLogBuffer of currentPrivateTaggedLogs) {
166
168
  if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
167
- privateTaggedLogs.set(
168
- taggedLogBuffer.tag,
169
- taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
170
- );
169
+ const newLogs = privateTaggedLogs.get(taggedLogBuffer.tag)!;
170
+ if (newLogs.length === 0) {
171
+ continue;
172
+ }
173
+ const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1)!);
174
+ const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
175
+ if (lastExisting.blockNumber > firstNew.blockNumber) {
176
+ throw new OutOfOrderLogInsertionError(
177
+ 'private',
178
+ taggedLogBuffer.tag,
179
+ lastExisting.blockNumber,
180
+ firstNew.blockNumber,
181
+ );
182
+ }
183
+ privateTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
171
184
  }
172
185
  }
173
186
 
@@ -199,10 +212,21 @@ export class LogStore {
199
212
 
200
213
  for (const taggedLogBuffer of currentPublicTaggedLogs) {
201
214
  if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
202
- publicTaggedLogs.set(
203
- taggedLogBuffer.tag,
204
- taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.tag)!),
205
- );
215
+ const newLogs = publicTaggedLogs.get(taggedLogBuffer.tag)!;
216
+ if (newLogs.length === 0) {
217
+ continue;
218
+ }
219
+ const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1)!);
220
+ const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
221
+ if (lastExisting.blockNumber > firstNew.blockNumber) {
222
+ throw new OutOfOrderLogInsertionError(
223
+ 'public',
224
+ taggedLogBuffer.tag,
225
+ lastExisting.blockNumber,
226
+ firstNew.blockNumber,
227
+ );
228
+ }
229
+ publicTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
206
230
  }
207
231
  }
208
232
 
@@ -219,6 +243,7 @@ export class LogStore {
219
243
  .map((txEffect, txIndex) =>
220
244
  [
221
245
  numToUInt32BE(txIndex),
246
+ txEffect.txHash.toBuffer(),
222
247
  numToUInt32BE(txEffect.publicLogs.length),
223
248
  txEffect.publicLogs.map(log => log.toBuffer()),
224
249
  ].flat(),
@@ -242,6 +267,7 @@ export class LogStore {
242
267
  .map((txEffect, txIndex) =>
243
268
  [
244
269
  numToUInt32BE(txIndex),
270
+ txEffect.txHash.toBuffer(),
245
271
  numToUInt32BE(txEffect.contractClassLogs.length),
246
272
  txEffect.contractClassLogs.map(log => log.toBuffer()),
247
273
  ].flat(),
@@ -287,18 +313,49 @@ export class LogStore {
287
313
 
288
314
  deleteLogs(blocks: L2Block[]): Promise<boolean> {
289
315
  return this.db.transactionAsync(async () => {
290
- await Promise.all(
291
- blocks.map(async block => {
292
- // Delete private logs
293
- const privateKeys = (await this.#privateLogKeysByBlock.getAsync(block.number)) ?? [];
294
- await Promise.all(privateKeys.map(tag => this.#privateLogsByTag.delete(tag)));
295
-
296
- // Delete public logs
297
- const publicKeys = (await this.#publicLogKeysByBlock.getAsync(block.number)) ?? [];
298
- await Promise.all(publicKeys.map(key => this.#publicLogsByContractAndTag.delete(key)));
299
- }),
316
+ const blockNumbers = new Set(blocks.map(block => block.number));
317
+ const firstBlockToDelete = Math.min(...blockNumbers);
318
+
319
+ // Collect all unique private tags across all blocks being deleted
320
+ const allPrivateTags = new Set(
321
+ compactArray(await Promise.all(blocks.map(block => this.#privateLogKeysByBlock.getAsync(block.number)))).flat(),
322
+ );
323
+
324
+ // Trim private logs: for each tag, delete all instances including and after the first block being deleted.
325
+ // This hinges on the invariant that logs for a given tag are always inserted in order of block number, which is enforced in #addPrivateLogs.
326
+ for (const tag of allPrivateTags) {
327
+ const existing = await this.#privateLogsByTag.getAsync(tag);
328
+ if (existing === undefined || existing.length === 0) {
329
+ continue;
330
+ }
331
+ const lastIndexToKeep = existing.findLastIndex(
332
+ buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete,
333
+ );
334
+ const remaining = existing.slice(0, lastIndexToKeep + 1);
335
+ await (remaining.length > 0 ? this.#privateLogsByTag.set(tag, remaining) : this.#privateLogsByTag.delete(tag));
336
+ }
337
+
338
+ // Collect all unique public keys across all blocks being deleted
339
+ const allPublicKeys = new Set(
340
+ compactArray(await Promise.all(blocks.map(block => this.#publicLogKeysByBlock.getAsync(block.number)))).flat(),
300
341
  );
301
342
 
343
+ // And do the same as we did with private logs
344
+ for (const key of allPublicKeys) {
345
+ const existing = await this.#publicLogsByContractAndTag.getAsync(key);
346
+ if (existing === undefined || existing.length === 0) {
347
+ continue;
348
+ }
349
+ const lastIndexToKeep = existing.findLastIndex(
350
+ buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete,
351
+ );
352
+ const remaining = existing.slice(0, lastIndexToKeep + 1);
353
+ await (remaining.length > 0
354
+ ? this.#publicLogsByContractAndTag.set(key, remaining)
355
+ : this.#publicLogsByContractAndTag.delete(key));
356
+ }
357
+
358
+ // After trimming the tagged logs, we can delete the block-level keys that track which tags are in which blocks.
302
359
  await Promise.all(
303
360
  blocks.map(block =>
304
361
  Promise.all([
@@ -319,17 +376,30 @@ export class LogStore {
319
376
  * array implies no logs match that tag.
320
377
  * @param tags - The tags to search for.
321
378
  * @param page - The page number (0-indexed) for pagination.
379
+ * @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
322
380
  * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
323
381
  * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
324
382
  */
325
- async getPrivateLogsByTags(tags: SiloedTag[], page: number = 0): Promise<TxScopedL2Log[][]> {
383
+ async getPrivateLogsByTags(
384
+ tags: SiloedTag[],
385
+ page: number = 0,
386
+ upToBlockNumber?: BlockNumber,
387
+ ): Promise<TxScopedL2Log[][]> {
326
388
  const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
389
+
327
390
  const start = page * MAX_LOGS_PER_TAG;
328
391
  const end = start + MAX_LOGS_PER_TAG;
329
392
 
330
- return logs.map(
331
- logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
332
- );
393
+ return logs.map(logBuffers => {
394
+ const deserialized = logBuffers?.slice(start, end).map(buf => TxScopedL2Log.fromBuffer(buf)) ?? [];
395
+ if (upToBlockNumber !== undefined) {
396
+ const cutoff = deserialized.findIndex(log => log.blockNumber > upToBlockNumber);
397
+ if (cutoff !== -1) {
398
+ return deserialized.slice(0, cutoff);
399
+ }
400
+ }
401
+ return deserialized;
402
+ });
333
403
  }
334
404
 
335
405
  /**
@@ -338,6 +408,7 @@ export class LogStore {
338
408
  * @param contractAddress - The contract address to search logs for.
339
409
  * @param tags - The tags to search for.
340
410
  * @param page - The page number (0-indexed) for pagination.
411
+ * @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
341
412
  * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
342
413
  * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
343
414
  */
@@ -345,6 +416,7 @@ export class LogStore {
345
416
  contractAddress: AztecAddress,
346
417
  tags: Tag[],
347
418
  page: number = 0,
419
+ upToBlockNumber?: BlockNumber,
348
420
  ): Promise<TxScopedL2Log[][]> {
349
421
  const logs = await Promise.all(
350
422
  tags.map(tag => {
@@ -355,9 +427,16 @@ export class LogStore {
355
427
  const start = page * MAX_LOGS_PER_TAG;
356
428
  const end = start + MAX_LOGS_PER_TAG;
357
429
 
358
- return logs.map(
359
- logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
360
- );
430
+ return logs.map(logBuffers => {
431
+ const deserialized = logBuffers?.slice(start, end).map(buf => TxScopedL2Log.fromBuffer(buf)) ?? [];
432
+ if (upToBlockNumber !== undefined) {
433
+ const cutoff = deserialized.findIndex(log => log.blockNumber > upToBlockNumber);
434
+ if (cutoff !== -1) {
435
+ return deserialized.slice(0, cutoff);
436
+ }
437
+ }
438
+ return deserialized;
439
+ });
361
440
  }
362
441
 
363
442
  /**
@@ -386,24 +465,33 @@ export class LogStore {
386
465
  }
387
466
 
388
467
  const buffer = (await this.#publicLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
389
- const publicLogsInBlock: [PublicLog[]] = [[]];
468
+ const publicLogsInBlock: { txHash: TxHash; logs: PublicLog[] }[] = [];
390
469
  const reader = new BufferReader(buffer);
391
470
 
392
471
  const blockHash = this.#unpackBlockHash(reader);
393
472
 
394
473
  while (reader.remainingBytes() > 0) {
395
474
  const indexOfTx = reader.readNumber();
475
+ const txHash = reader.readObject(TxHash);
396
476
  const numLogsInTx = reader.readNumber();
397
- publicLogsInBlock[indexOfTx] = [];
477
+ publicLogsInBlock[indexOfTx] = { txHash, logs: [] };
398
478
  for (let i = 0; i < numLogsInTx; i++) {
399
- publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
479
+ publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
400
480
  }
401
481
  }
402
482
 
403
- const txLogs = publicLogsInBlock[txIndex];
483
+ const txData = publicLogsInBlock[txIndex];
404
484
 
405
485
  const logs: ExtendedPublicLog[] = [];
406
- const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
486
+ const maxLogsHit = this.#accumulatePublicLogs(
487
+ logs,
488
+ blockNumber,
489
+ blockHash,
490
+ txIndex,
491
+ txData.txHash,
492
+ txData.logs,
493
+ filter,
494
+ );
407
495
 
408
496
  return { logs, maxLogsHit };
409
497
  }
@@ -424,22 +512,31 @@ export class LogStore {
424
512
 
425
513
  let maxLogsHit = false;
426
514
  loopOverBlocks: for await (const [blockNumber, logBuffer] of this.#publicLogsByBlock.entriesAsync({ start, end })) {
427
- const publicLogsInBlock: [PublicLog[]] = [[]];
515
+ const publicLogsInBlock: { txHash: TxHash; logs: PublicLog[] }[] = [];
428
516
  const reader = new BufferReader(logBuffer);
429
517
 
430
518
  const blockHash = this.#unpackBlockHash(reader);
431
519
 
432
520
  while (reader.remainingBytes() > 0) {
433
521
  const indexOfTx = reader.readNumber();
522
+ const txHash = reader.readObject(TxHash);
434
523
  const numLogsInTx = reader.readNumber();
435
- publicLogsInBlock[indexOfTx] = [];
524
+ publicLogsInBlock[indexOfTx] = { txHash, logs: [] };
436
525
  for (let i = 0; i < numLogsInTx; i++) {
437
- publicLogsInBlock[indexOfTx].push(reader.readObject(PublicLog));
526
+ publicLogsInBlock[indexOfTx].logs.push(reader.readObject(PublicLog));
438
527
  }
439
528
  }
440
529
  for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < publicLogsInBlock.length; txIndex++) {
441
- const txLogs = publicLogsInBlock[txIndex];
442
- maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
530
+ const txData = publicLogsInBlock[txIndex];
531
+ maxLogsHit = this.#accumulatePublicLogs(
532
+ logs,
533
+ blockNumber,
534
+ blockHash,
535
+ txIndex,
536
+ txData.txHash,
537
+ txData.logs,
538
+ filter,
539
+ );
443
540
  if (maxLogsHit) {
444
541
  this.#log.debug(`Max logs hit at block ${blockNumber}`);
445
542
  break loopOverBlocks;
@@ -475,24 +572,33 @@ export class LogStore {
475
572
  return { logs: [], maxLogsHit: false };
476
573
  }
477
574
  const contractClassLogsBuffer = (await this.#contractClassLogsByBlock.getAsync(blockNumber)) ?? Buffer.alloc(0);
478
- const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
575
+ const contractClassLogsInBlock: { txHash: TxHash; logs: ContractClassLog[] }[] = [];
479
576
 
480
577
  const reader = new BufferReader(contractClassLogsBuffer);
481
578
  const blockHash = this.#unpackBlockHash(reader);
482
579
 
483
580
  while (reader.remainingBytes() > 0) {
484
581
  const indexOfTx = reader.readNumber();
582
+ const txHash = reader.readObject(TxHash);
485
583
  const numLogsInTx = reader.readNumber();
486
- contractClassLogsInBlock[indexOfTx] = [];
584
+ contractClassLogsInBlock[indexOfTx] = { txHash, logs: [] };
487
585
  for (let i = 0; i < numLogsInTx; i++) {
488
- contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
586
+ contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
489
587
  }
490
588
  }
491
589
 
492
- const txLogs = contractClassLogsInBlock[txIndex];
590
+ const txData = contractClassLogsInBlock[txIndex];
493
591
 
494
592
  const logs: ExtendedContractClassLog[] = [];
495
- const maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
593
+ const maxLogsHit = this.#accumulateContractClassLogs(
594
+ logs,
595
+ blockNumber,
596
+ blockHash,
597
+ txIndex,
598
+ txData.txHash,
599
+ txData.logs,
600
+ filter,
601
+ );
496
602
 
497
603
  return { logs, maxLogsHit };
498
604
  }
@@ -516,20 +622,29 @@ export class LogStore {
516
622
  start,
517
623
  end,
518
624
  })) {
519
- const contractClassLogsInBlock: [ContractClassLog[]] = [[]];
625
+ const contractClassLogsInBlock: { txHash: TxHash; logs: ContractClassLog[] }[] = [];
520
626
  const reader = new BufferReader(logBuffer);
521
627
  const blockHash = this.#unpackBlockHash(reader);
522
628
  while (reader.remainingBytes() > 0) {
523
629
  const indexOfTx = reader.readNumber();
630
+ const txHash = reader.readObject(TxHash);
524
631
  const numLogsInTx = reader.readNumber();
525
- contractClassLogsInBlock[indexOfTx] = [];
632
+ contractClassLogsInBlock[indexOfTx] = { txHash, logs: [] };
526
633
  for (let i = 0; i < numLogsInTx; i++) {
527
- contractClassLogsInBlock[indexOfTx].push(reader.readObject(ContractClassLog));
634
+ contractClassLogsInBlock[indexOfTx].logs.push(reader.readObject(ContractClassLog));
528
635
  }
529
636
  }
530
637
  for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < contractClassLogsInBlock.length; txIndex++) {
531
- const txLogs = contractClassLogsInBlock[txIndex];
532
- maxLogsHit = this.#accumulateLogs(logs, blockNumber, blockHash, txIndex, txLogs, filter);
638
+ const txData = contractClassLogsInBlock[txIndex];
639
+ maxLogsHit = this.#accumulateContractClassLogs(
640
+ logs,
641
+ blockNumber,
642
+ blockHash,
643
+ txIndex,
644
+ txData.txHash,
645
+ txData.logs,
646
+ filter,
647
+ );
533
648
  if (maxLogsHit) {
534
649
  this.#log.debug(`Max logs hit at block ${blockNumber}`);
535
650
  break loopOverBlocks;
@@ -540,28 +655,74 @@ export class LogStore {
540
655
  return { logs, maxLogsHit };
541
656
  }
542
657
 
543
- #accumulateLogs(
544
- results: (ExtendedContractClassLog | ExtendedPublicLog)[],
658
+ #accumulatePublicLogs(
659
+ results: ExtendedPublicLog[],
545
660
  blockNumber: number,
546
661
  blockHash: BlockHash,
547
662
  txIndex: number,
548
- txLogs: (ContractClassLog | PublicLog)[],
663
+ txHash: TxHash,
664
+ txLogs: PublicLog[],
549
665
  filter: LogFilter = {},
550
666
  ): boolean {
667
+ if (filter.fromBlock && blockNumber < filter.fromBlock) {
668
+ return false;
669
+ }
670
+ if (filter.toBlock && blockNumber >= filter.toBlock) {
671
+ return false;
672
+ }
673
+ if (filter.txHash && !txHash.equals(filter.txHash)) {
674
+ return false;
675
+ }
676
+
551
677
  let maxLogsHit = false;
552
678
  let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
553
679
  for (; logIndex < txLogs.length; logIndex++) {
554
680
  const log = txLogs[logIndex];
555
- if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
556
- if (log instanceof ContractClassLog) {
557
- results.push(
558
- new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log),
559
- );
560
- } else if (log instanceof PublicLog) {
561
- results.push(new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txIndex, logIndex), log));
562
- } else {
563
- throw new Error('Unknown log type');
681
+ if (
682
+ (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) &&
683
+ (!filter.tag || log.fields[0]?.equals(filter.tag))
684
+ ) {
685
+ results.push(
686
+ new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
687
+ );
688
+
689
+ if (results.length >= this.#logsMaxPageSize) {
690
+ maxLogsHit = true;
691
+ break;
564
692
  }
693
+ }
694
+ }
695
+
696
+ return maxLogsHit;
697
+ }
698
+
699
+ #accumulateContractClassLogs(
700
+ results: ExtendedContractClassLog[],
701
+ blockNumber: number,
702
+ blockHash: BlockHash,
703
+ txIndex: number,
704
+ txHash: TxHash,
705
+ txLogs: ContractClassLog[],
706
+ filter: LogFilter = {},
707
+ ): boolean {
708
+ if (filter.fromBlock && blockNumber < filter.fromBlock) {
709
+ return false;
710
+ }
711
+ if (filter.toBlock && blockNumber >= filter.toBlock) {
712
+ return false;
713
+ }
714
+ if (filter.txHash && !txHash.equals(filter.txHash)) {
715
+ return false;
716
+ }
717
+
718
+ let maxLogsHit = false;
719
+ let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
720
+ for (; logIndex < txLogs.length; logIndex++) {
721
+ const log = txLogs[logIndex];
722
+ if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
723
+ results.push(
724
+ new ExtendedContractClassLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
725
+ );
565
726
 
566
727
  if (results.length >= this.#logsMaxPageSize) {
567
728
  maxLogsHit = true;
@@ -14,6 +14,7 @@ import {
14
14
  } from '@aztec/kv-store';
15
15
  import { InboxLeaf } from '@aztec/stdlib/messaging';
16
16
 
17
+ import { L1ToL2MessagesNotReadyError } from '../errors.js';
17
18
  import {
18
19
  type InboxMessage,
19
20
  deserializeInboxMessage,
@@ -40,6 +41,8 @@ export class MessageStore {
40
41
  #lastSynchedL1Block: AztecAsyncSingleton<Buffer>;
41
42
  /** Stores total messages stored */
42
43
  #totalMessageCount: AztecAsyncSingleton<bigint>;
44
+ /** Stores the checkpoint number whose message tree is currently being filled on L1. */
45
+ #inboxTreeInProgress: AztecAsyncSingleton<bigint>;
43
46
 
44
47
  #log = createLogger('archiver:message_store');
45
48
 
@@ -48,6 +51,7 @@ export class MessageStore {
48
51
  this.#l1ToL2MessageIndices = db.openMap('archiver_l1_to_l2_message_indices');
49
52
  this.#lastSynchedL1Block = db.openSingleton('archiver_last_l1_block_id');
50
53
  this.#totalMessageCount = db.openSingleton('archiver_l1_to_l2_message_count');
54
+ this.#inboxTreeInProgress = db.openSingleton('archiver_inbox_tree_in_progress');
51
55
  }
52
56
 
53
57
  public async getTotalL1ToL2MessageCount(): Promise<bigint> {
@@ -137,7 +141,7 @@ export class MessageStore {
137
141
  );
138
142
  }
139
143
 
140
- // Check the first message in a block has the correct index.
144
+ // Check the first message in a checkpoint has the correct index.
141
145
  if (
142
146
  (!lastMessage || message.checkpointNumber > lastMessage.checkpointNumber) &&
143
147
  message.index !== expectedStart
@@ -185,7 +189,22 @@ export class MessageStore {
185
189
  return msg ? deserializeInboxMessage(msg) : undefined;
186
190
  }
187
191
 
192
+ /** Returns the inbox tree-in-progress checkpoint number from L1, or undefined if not yet set. */
193
+ public getInboxTreeInProgress(): Promise<bigint | undefined> {
194
+ return this.#inboxTreeInProgress.getAsync();
195
+ }
196
+
197
+ /** Persists the inbox tree-in-progress checkpoint number from L1 state. */
198
+ public async setInboxTreeInProgress(value: bigint): Promise<void> {
199
+ await this.#inboxTreeInProgress.set(value);
200
+ }
201
+
188
202
  public async getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
203
+ const treeInProgress = await this.#inboxTreeInProgress.getAsync();
204
+ if (treeInProgress !== undefined && BigInt(checkpointNumber) >= treeInProgress) {
205
+ throw new L1ToL2MessagesNotReadyError(checkpointNumber, treeInProgress);
206
+ }
207
+
189
208
  const messages: Fr[] = [];
190
209
 
191
210
  const [startIndex, endIndex] = InboxLeaf.indexRangeForCheckpoint(checkpointNumber);