@aztec/archiver 0.0.1-commit.e2b2873ed → 0.0.1-commit.e304674f1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/README.md +12 -6
  2. package/dest/archiver.d.ts +11 -8
  3. package/dest/archiver.d.ts.map +1 -1
  4. package/dest/archiver.js +79 -114
  5. package/dest/config.d.ts +3 -3
  6. package/dest/config.d.ts.map +1 -1
  7. package/dest/config.js +2 -1
  8. package/dest/errors.d.ts +34 -10
  9. package/dest/errors.d.ts.map +1 -1
  10. package/dest/errors.js +45 -16
  11. package/dest/factory.d.ts +4 -5
  12. package/dest/factory.d.ts.map +1 -1
  13. package/dest/factory.js +29 -26
  14. package/dest/index.d.ts +2 -1
  15. package/dest/index.d.ts.map +1 -1
  16. package/dest/index.js +1 -0
  17. package/dest/l1/bin/retrieve-calldata.js +32 -28
  18. package/dest/l1/calldata_retriever.d.ts +73 -50
  19. package/dest/l1/calldata_retriever.d.ts.map +1 -1
  20. package/dest/l1/calldata_retriever.js +191 -259
  21. package/dest/l1/data_retrieval.d.ts +11 -11
  22. package/dest/l1/data_retrieval.d.ts.map +1 -1
  23. package/dest/l1/data_retrieval.js +35 -34
  24. package/dest/l1/spire_proposer.d.ts +5 -5
  25. package/dest/l1/spire_proposer.d.ts.map +1 -1
  26. package/dest/l1/spire_proposer.js +9 -17
  27. package/dest/modules/data_source_base.d.ts +14 -7
  28. package/dest/modules/data_source_base.d.ts.map +1 -1
  29. package/dest/modules/data_source_base.js +39 -77
  30. package/dest/modules/data_store_updater.d.ts +25 -12
  31. package/dest/modules/data_store_updater.d.ts.map +1 -1
  32. package/dest/modules/data_store_updater.js +125 -94
  33. package/dest/modules/instrumentation.d.ts +15 -2
  34. package/dest/modules/instrumentation.d.ts.map +1 -1
  35. package/dest/modules/instrumentation.js +19 -2
  36. package/dest/modules/l1_synchronizer.d.ts +7 -9
  37. package/dest/modules/l1_synchronizer.d.ts.map +1 -1
  38. package/dest/modules/l1_synchronizer.js +176 -136
  39. package/dest/modules/validation.d.ts +1 -1
  40. package/dest/modules/validation.d.ts.map +1 -1
  41. package/dest/modules/validation.js +2 -2
  42. package/dest/store/block_store.d.ts +66 -28
  43. package/dest/store/block_store.d.ts.map +1 -1
  44. package/dest/store/block_store.js +356 -135
  45. package/dest/store/contract_class_store.d.ts +2 -3
  46. package/dest/store/contract_class_store.d.ts.map +1 -1
  47. package/dest/store/contract_class_store.js +7 -67
  48. package/dest/store/contract_instance_store.d.ts +1 -1
  49. package/dest/store/contract_instance_store.d.ts.map +1 -1
  50. package/dest/store/contract_instance_store.js +6 -2
  51. package/dest/store/kv_archiver_store.d.ts +61 -24
  52. package/dest/store/kv_archiver_store.d.ts.map +1 -1
  53. package/dest/store/kv_archiver_store.js +75 -27
  54. package/dest/store/l2_tips_cache.d.ts +20 -0
  55. package/dest/store/l2_tips_cache.d.ts.map +1 -0
  56. package/dest/store/l2_tips_cache.js +109 -0
  57. package/dest/store/log_store.d.ts +6 -3
  58. package/dest/store/log_store.d.ts.map +1 -1
  59. package/dest/store/log_store.js +93 -16
  60. package/dest/store/message_store.d.ts +5 -1
  61. package/dest/store/message_store.d.ts.map +1 -1
  62. package/dest/store/message_store.js +21 -9
  63. package/dest/test/fake_l1_state.d.ts +21 -1
  64. package/dest/test/fake_l1_state.d.ts.map +1 -1
  65. package/dest/test/fake_l1_state.js +133 -26
  66. package/dest/test/mock_archiver.d.ts +1 -1
  67. package/dest/test/mock_archiver.d.ts.map +1 -1
  68. package/dest/test/mock_archiver.js +3 -2
  69. package/dest/test/mock_l1_to_l2_message_source.d.ts +1 -1
  70. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  71. package/dest/test/mock_l1_to_l2_message_source.js +2 -1
  72. package/dest/test/mock_l2_block_source.d.ts +26 -5
  73. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  74. package/dest/test/mock_l2_block_source.js +160 -89
  75. package/dest/test/mock_structs.d.ts +4 -1
  76. package/dest/test/mock_structs.d.ts.map +1 -1
  77. package/dest/test/mock_structs.js +13 -1
  78. package/dest/test/noop_l1_archiver.d.ts +4 -1
  79. package/dest/test/noop_l1_archiver.d.ts.map +1 -1
  80. package/dest/test/noop_l1_archiver.js +5 -2
  81. package/package.json +13 -13
  82. package/src/archiver.ts +101 -138
  83. package/src/config.ts +8 -1
  84. package/src/errors.ts +70 -26
  85. package/src/factory.ts +30 -16
  86. package/src/index.ts +1 -0
  87. package/src/l1/README.md +25 -68
  88. package/src/l1/bin/retrieve-calldata.ts +40 -27
  89. package/src/l1/calldata_retriever.ts +250 -379
  90. package/src/l1/data_retrieval.ts +31 -37
  91. package/src/l1/spire_proposer.ts +7 -15
  92. package/src/modules/data_source_base.ts +78 -98
  93. package/src/modules/data_store_updater.ts +138 -124
  94. package/src/modules/instrumentation.ts +29 -2
  95. package/src/modules/l1_synchronizer.ts +196 -168
  96. package/src/modules/validation.ts +2 -2
  97. package/src/store/block_store.ts +451 -172
  98. package/src/store/contract_class_store.ts +8 -106
  99. package/src/store/contract_instance_store.ts +8 -5
  100. package/src/store/kv_archiver_store.ts +115 -41
  101. package/src/store/l2_tips_cache.ts +128 -0
  102. package/src/store/log_store.ts +126 -27
  103. package/src/store/message_store.ts +27 -10
  104. package/src/structs/inbox_message.ts +1 -1
  105. package/src/test/fake_l1_state.ts +178 -30
  106. package/src/test/mock_archiver.ts +3 -2
  107. package/src/test/mock_l1_to_l2_message_source.ts +1 -0
  108. package/src/test/mock_l2_block_source.ts +209 -82
  109. package/src/test/mock_structs.ts +20 -6
  110. package/src/test/noop_l1_archiver.ts +7 -2
@@ -1,6 +1,6 @@
1
1
  import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
2
2
  import { BlockNumber } from '@aztec/foundation/branded-types';
3
- import { filterAsync } from '@aztec/foundation/collection';
3
+ import { compactArray, filterAsync } from '@aztec/foundation/collection';
4
4
  import { Fr } from '@aztec/foundation/curves/bn254';
5
5
  import { createLogger } from '@aztec/foundation/log';
6
6
  import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
@@ -22,6 +22,7 @@ import {
22
22
  } from '@aztec/stdlib/logs';
23
23
  import { TxHash } from '@aztec/stdlib/tx';
24
24
 
25
+ import { OutOfOrderLogInsertionError } from '../errors.js';
25
26
  import type { BlockStore } from './block_store.js';
26
27
 
27
28
  /**
@@ -165,10 +166,21 @@ export class LogStore {
165
166
 
166
167
  for (const taggedLogBuffer of currentPrivateTaggedLogs) {
167
168
  if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
168
- privateTaggedLogs.set(
169
- taggedLogBuffer.tag,
170
- taggedLogBuffer.logBuffers!.concat(privateTaggedLogs.get(taggedLogBuffer.tag)!),
171
- );
169
+ const newLogs = privateTaggedLogs.get(taggedLogBuffer.tag)!;
170
+ if (newLogs.length === 0) {
171
+ continue;
172
+ }
173
+ const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1)!);
174
+ const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
175
+ if (lastExisting.blockNumber > firstNew.blockNumber) {
176
+ throw new OutOfOrderLogInsertionError(
177
+ 'private',
178
+ taggedLogBuffer.tag,
179
+ lastExisting.blockNumber,
180
+ firstNew.blockNumber,
181
+ );
182
+ }
183
+ privateTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
172
184
  }
173
185
  }
174
186
 
@@ -200,10 +212,21 @@ export class LogStore {
200
212
 
201
213
  for (const taggedLogBuffer of currentPublicTaggedLogs) {
202
214
  if (taggedLogBuffer.logBuffers && taggedLogBuffer.logBuffers.length > 0) {
203
- publicTaggedLogs.set(
204
- taggedLogBuffer.tag,
205
- taggedLogBuffer.logBuffers!.concat(publicTaggedLogs.get(taggedLogBuffer.tag)!),
206
- );
215
+ const newLogs = publicTaggedLogs.get(taggedLogBuffer.tag)!;
216
+ if (newLogs.length === 0) {
217
+ continue;
218
+ }
219
+ const lastExisting = TxScopedL2Log.fromBuffer(taggedLogBuffer.logBuffers.at(-1)!);
220
+ const firstNew = TxScopedL2Log.fromBuffer(newLogs[0]);
221
+ if (lastExisting.blockNumber > firstNew.blockNumber) {
222
+ throw new OutOfOrderLogInsertionError(
223
+ 'public',
224
+ taggedLogBuffer.tag,
225
+ lastExisting.blockNumber,
226
+ firstNew.blockNumber,
227
+ );
228
+ }
229
+ publicTaggedLogs.set(taggedLogBuffer.tag, taggedLogBuffer.logBuffers.concat(newLogs));
207
230
  }
208
231
  }
209
232
 
@@ -290,18 +313,49 @@ export class LogStore {
290
313
 
291
314
  deleteLogs(blocks: L2Block[]): Promise<boolean> {
292
315
  return this.db.transactionAsync(async () => {
293
- await Promise.all(
294
- blocks.map(async block => {
295
- // Delete private logs
296
- const privateKeys = (await this.#privateLogKeysByBlock.getAsync(block.number)) ?? [];
297
- await Promise.all(privateKeys.map(tag => this.#privateLogsByTag.delete(tag)));
298
-
299
- // Delete public logs
300
- const publicKeys = (await this.#publicLogKeysByBlock.getAsync(block.number)) ?? [];
301
- await Promise.all(publicKeys.map(key => this.#publicLogsByContractAndTag.delete(key)));
302
- }),
316
+ const blockNumbers = new Set(blocks.map(block => block.number));
317
+ const firstBlockToDelete = Math.min(...blockNumbers);
318
+
319
+ // Collect all unique private tags across all blocks being deleted
320
+ const allPrivateTags = new Set(
321
+ compactArray(await Promise.all(blocks.map(block => this.#privateLogKeysByBlock.getAsync(block.number)))).flat(),
303
322
  );
304
323
 
324
+ // Trim private logs: for each tag, delete all instances including and after the first block being deleted.
325
+ // This hinges on the invariant that logs for a given tag are always inserted in order of block number, which is enforced in #addPrivateLogs.
326
+ for (const tag of allPrivateTags) {
327
+ const existing = await this.#privateLogsByTag.getAsync(tag);
328
+ if (existing === undefined || existing.length === 0) {
329
+ continue;
330
+ }
331
+ const lastIndexToKeep = existing.findLastIndex(
332
+ buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete,
333
+ );
334
+ const remaining = existing.slice(0, lastIndexToKeep + 1);
335
+ await (remaining.length > 0 ? this.#privateLogsByTag.set(tag, remaining) : this.#privateLogsByTag.delete(tag));
336
+ }
337
+
338
+ // Collect all unique public keys across all blocks being deleted
339
+ const allPublicKeys = new Set(
340
+ compactArray(await Promise.all(blocks.map(block => this.#publicLogKeysByBlock.getAsync(block.number)))).flat(),
341
+ );
342
+
343
+ // And do the same as we did with private logs
344
+ for (const key of allPublicKeys) {
345
+ const existing = await this.#publicLogsByContractAndTag.getAsync(key);
346
+ if (existing === undefined || existing.length === 0) {
347
+ continue;
348
+ }
349
+ const lastIndexToKeep = existing.findLastIndex(
350
+ buf => TxScopedL2Log.getBlockNumberFromBuffer(buf) < firstBlockToDelete,
351
+ );
352
+ const remaining = existing.slice(0, lastIndexToKeep + 1);
353
+ await (remaining.length > 0
354
+ ? this.#publicLogsByContractAndTag.set(key, remaining)
355
+ : this.#publicLogsByContractAndTag.delete(key));
356
+ }
357
+
358
+ // After trimming the tagged logs, we can delete the block-level keys that track which tags are in which blocks.
305
359
  await Promise.all(
306
360
  blocks.map(block =>
307
361
  Promise.all([
@@ -322,17 +376,30 @@ export class LogStore {
322
376
  * array implies no logs match that tag.
323
377
  * @param tags - The tags to search for.
324
378
  * @param page - The page number (0-indexed) for pagination.
379
+ * @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
325
380
  * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
326
381
  * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
327
382
  */
328
- async getPrivateLogsByTags(tags: SiloedTag[], page: number = 0): Promise<TxScopedL2Log[][]> {
383
+ async getPrivateLogsByTags(
384
+ tags: SiloedTag[],
385
+ page: number = 0,
386
+ upToBlockNumber?: BlockNumber,
387
+ ): Promise<TxScopedL2Log[][]> {
329
388
  const logs = await Promise.all(tags.map(tag => this.#privateLogsByTag.getAsync(tag.toString())));
389
+
330
390
  const start = page * MAX_LOGS_PER_TAG;
331
391
  const end = start + MAX_LOGS_PER_TAG;
332
392
 
333
- return logs.map(
334
- logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
335
- );
393
+ return logs.map(logBuffers => {
394
+ const deserialized = logBuffers?.slice(start, end).map(buf => TxScopedL2Log.fromBuffer(buf)) ?? [];
395
+ if (upToBlockNumber !== undefined) {
396
+ const cutoff = deserialized.findIndex(log => log.blockNumber > upToBlockNumber);
397
+ if (cutoff !== -1) {
398
+ return deserialized.slice(0, cutoff);
399
+ }
400
+ }
401
+ return deserialized;
402
+ });
336
403
  }
337
404
 
338
405
  /**
@@ -341,6 +408,7 @@ export class LogStore {
341
408
  * @param contractAddress - The contract address to search logs for.
342
409
  * @param tags - The tags to search for.
343
410
  * @param page - The page number (0-indexed) for pagination.
411
+ * @param upToBlockNumber - If set, only return logs from blocks up to and including this block number.
344
412
  * @returns An array of log arrays, one per tag. Returns at most MAX_LOGS_PER_TAG logs per tag per page. If
345
413
  * MAX_LOGS_PER_TAG logs are returned for a tag, the caller should fetch the next page to check for more logs.
346
414
  */
@@ -348,6 +416,7 @@ export class LogStore {
348
416
  contractAddress: AztecAddress,
349
417
  tags: Tag[],
350
418
  page: number = 0,
419
+ upToBlockNumber?: BlockNumber,
351
420
  ): Promise<TxScopedL2Log[][]> {
352
421
  const logs = await Promise.all(
353
422
  tags.map(tag => {
@@ -358,9 +427,16 @@ export class LogStore {
358
427
  const start = page * MAX_LOGS_PER_TAG;
359
428
  const end = start + MAX_LOGS_PER_TAG;
360
429
 
361
- return logs.map(
362
- logBuffers => logBuffers?.slice(start, end).map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? [],
363
- );
430
+ return logs.map(logBuffers => {
431
+ const deserialized = logBuffers?.slice(start, end).map(buf => TxScopedL2Log.fromBuffer(buf)) ?? [];
432
+ if (upToBlockNumber !== undefined) {
433
+ const cutoff = deserialized.findIndex(log => log.blockNumber > upToBlockNumber);
434
+ if (cutoff !== -1) {
435
+ return deserialized.slice(0, cutoff);
436
+ }
437
+ }
438
+ return deserialized;
439
+ });
364
440
  }
365
441
 
366
442
  /**
@@ -588,11 +664,24 @@ export class LogStore {
588
664
  txLogs: PublicLog[],
589
665
  filter: LogFilter = {},
590
666
  ): boolean {
667
+ if (filter.fromBlock && blockNumber < filter.fromBlock) {
668
+ return false;
669
+ }
670
+ if (filter.toBlock && blockNumber >= filter.toBlock) {
671
+ return false;
672
+ }
673
+ if (filter.txHash && !txHash.equals(filter.txHash)) {
674
+ return false;
675
+ }
676
+
591
677
  let maxLogsHit = false;
592
678
  let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
593
679
  for (; logIndex < txLogs.length; logIndex++) {
594
680
  const log = txLogs[logIndex];
595
- if (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) {
681
+ if (
682
+ (!filter.contractAddress || log.contractAddress.equals(filter.contractAddress)) &&
683
+ (!filter.tag || log.fields[0]?.equals(filter.tag))
684
+ ) {
596
685
  results.push(
597
686
  new ExtendedPublicLog(new LogId(BlockNumber(blockNumber), blockHash, txHash, txIndex, logIndex), log),
598
687
  );
@@ -616,6 +705,16 @@ export class LogStore {
616
705
  txLogs: ContractClassLog[],
617
706
  filter: LogFilter = {},
618
707
  ): boolean {
708
+ if (filter.fromBlock && blockNumber < filter.fromBlock) {
709
+ return false;
710
+ }
711
+ if (filter.toBlock && blockNumber >= filter.toBlock) {
712
+ return false;
713
+ }
714
+ if (filter.txHash && !txHash.equals(filter.txHash)) {
715
+ return false;
716
+ }
717
+
619
718
  let maxLogsHit = false;
620
719
  let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0;
621
720
  for (; logIndex < txLogs.length; logIndex++) {
@@ -14,6 +14,7 @@ import {
14
14
  } from '@aztec/kv-store';
15
15
  import { InboxLeaf } from '@aztec/stdlib/messaging';
16
16
 
17
+ import { L1ToL2MessagesNotReadyError } from '../errors.js';
17
18
  import {
18
19
  type InboxMessage,
19
20
  deserializeInboxMessage,
@@ -40,6 +41,8 @@ export class MessageStore {
40
41
  #lastSynchedL1Block: AztecAsyncSingleton<Buffer>;
41
42
  /** Stores total messages stored */
42
43
  #totalMessageCount: AztecAsyncSingleton<bigint>;
44
+ /** Stores the checkpoint number whose message tree is currently being filled on L1. */
45
+ #inboxTreeInProgress: AztecAsyncSingleton<bigint>;
43
46
 
44
47
  #log = createLogger('archiver:message_store');
45
48
 
@@ -48,6 +51,7 @@ export class MessageStore {
48
51
  this.#l1ToL2MessageIndices = db.openMap('archiver_l1_to_l2_message_indices');
49
52
  this.#lastSynchedL1Block = db.openSingleton('archiver_last_l1_block_id');
50
53
  this.#totalMessageCount = db.openSingleton('archiver_l1_to_l2_message_count');
54
+ this.#inboxTreeInProgress = db.openSingleton('archiver_inbox_tree_in_progress');
51
55
  }
52
56
 
53
57
  public async getTotalL1ToL2MessageCount(): Promise<bigint> {
@@ -137,7 +141,7 @@ export class MessageStore {
137
141
  );
138
142
  }
139
143
 
140
- // Check the first message in a block has the correct index.
144
+ // Check the first message in a checkpoint has the correct index.
141
145
  if (
142
146
  (!lastMessage || message.checkpointNumber > lastMessage.checkpointNumber) &&
143
147
  message.index !== expectedStart
@@ -157,15 +161,6 @@ export class MessageStore {
157
161
  lastMessage = message;
158
162
  }
159
163
 
160
- // Update the L1 sync point to that of the last message added.
161
- const currentSyncPoint = await this.getSynchedL1Block();
162
- if (!currentSyncPoint || currentSyncPoint.l1BlockNumber < lastMessage!.l1BlockNumber) {
163
- await this.setSynchedL1Block({
164
- l1BlockNumber: lastMessage!.l1BlockNumber,
165
- l1BlockHash: lastMessage!.l1BlockHash,
166
- });
167
- }
168
-
169
164
  // Update total message count with the number of inserted messages.
170
165
  await this.increaseTotalMessageCount(messageCount);
171
166
  });
@@ -185,7 +180,29 @@ export class MessageStore {
185
180
  return msg ? deserializeInboxMessage(msg) : undefined;
186
181
  }
187
182
 
183
+ /** Returns the inbox tree-in-progress checkpoint number from L1, or undefined if not yet set. */
184
+ public getInboxTreeInProgress(): Promise<bigint | undefined> {
185
+ return this.#inboxTreeInProgress.getAsync();
186
+ }
187
+
188
+ /** Atomically updates the message sync state: the L1 sync point and the inbox tree-in-progress marker. */
189
+ public setMessageSyncState(l1Block: L1BlockId, treeInProgress: bigint | undefined): Promise<void> {
190
+ return this.db.transactionAsync(async () => {
191
+ await this.setSynchedL1Block(l1Block);
192
+ if (treeInProgress !== undefined) {
193
+ await this.#inboxTreeInProgress.set(treeInProgress);
194
+ } else {
195
+ await this.#inboxTreeInProgress.delete();
196
+ }
197
+ });
198
+ }
199
+
188
200
  public async getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise<Fr[]> {
201
+ const treeInProgress = await this.#inboxTreeInProgress.getAsync();
202
+ if (treeInProgress !== undefined && BigInt(checkpointNumber) >= treeInProgress) {
203
+ throw new L1ToL2MessagesNotReadyError(checkpointNumber, treeInProgress);
204
+ }
205
+
189
206
  const messages: Fr[] = [];
190
207
 
191
208
  const [startIndex, endIndex] = InboxLeaf.indexRangeForCheckpoint(checkpointNumber);
@@ -8,7 +8,7 @@ export type InboxMessage = {
8
8
  index: bigint;
9
9
  leaf: Fr;
10
10
  checkpointNumber: CheckpointNumber;
11
- l1BlockNumber: bigint; // L1 block number - NOT Aztec L2
11
+ l1BlockNumber: bigint;
12
12
  l1BlockHash: Buffer32;
13
13
  rollingHash: Buffer16;
14
14
  };