@aztec/archiver 0.0.1-commit.e61ad554 → 0.0.1-commit.f146247c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dest/archiver.d.ts +3 -2
  2. package/dest/archiver.d.ts.map +1 -1
  3. package/dest/archiver.js +15 -2
  4. package/dest/factory.d.ts +3 -1
  5. package/dest/factory.d.ts.map +1 -1
  6. package/dest/factory.js +2 -3
  7. package/dest/l1/data_retrieval.js +1 -1
  8. package/dest/l1/validate_trace.d.ts +6 -3
  9. package/dest/l1/validate_trace.d.ts.map +1 -1
  10. package/dest/l1/validate_trace.js +13 -9
  11. package/dest/modules/data_source_base.d.ts +5 -5
  12. package/dest/modules/data_source_base.d.ts.map +1 -1
  13. package/dest/modules/instrumentation.d.ts +1 -1
  14. package/dest/modules/instrumentation.d.ts.map +1 -1
  15. package/dest/modules/instrumentation.js +17 -10
  16. package/dest/modules/l1_synchronizer.d.ts +1 -1
  17. package/dest/modules/l1_synchronizer.d.ts.map +1 -1
  18. package/dest/modules/l1_synchronizer.js +2 -3
  19. package/dest/store/block_store.d.ts +5 -5
  20. package/dest/store/block_store.d.ts.map +1 -1
  21. package/dest/store/block_store.js +2 -2
  22. package/dest/store/contract_class_store.d.ts +1 -1
  23. package/dest/store/contract_class_store.d.ts.map +1 -1
  24. package/dest/store/contract_class_store.js +11 -7
  25. package/dest/store/kv_archiver_store.d.ts +5 -5
  26. package/dest/store/kv_archiver_store.d.ts.map +1 -1
  27. package/dest/store/kv_archiver_store.js +2 -3
  28. package/dest/store/log_store.d.ts +1 -1
  29. package/dest/store/log_store.d.ts.map +1 -1
  30. package/dest/store/log_store.js +2 -2
  31. package/dest/test/index.js +3 -1
  32. package/dest/test/mock_l2_block_source.d.ts +6 -6
  33. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  34. package/dest/test/mock_l2_block_source.js +3 -3
  35. package/dest/test/mock_structs.d.ts +3 -2
  36. package/dest/test/mock_structs.d.ts.map +1 -1
  37. package/dest/test/mock_structs.js +7 -5
  38. package/dest/test/noop_l1_archiver.d.ts +23 -0
  39. package/dest/test/noop_l1_archiver.d.ts.map +1 -0
  40. package/dest/test/noop_l1_archiver.js +68 -0
  41. package/package.json +14 -13
  42. package/src/archiver.ts +22 -2
  43. package/src/factory.ts +3 -3
  44. package/src/l1/data_retrieval.ts +1 -1
  45. package/src/l1/validate_trace.ts +24 -6
  46. package/src/modules/data_source_base.ts +4 -4
  47. package/src/modules/instrumentation.ts +15 -10
  48. package/src/modules/l1_synchronizer.ts +2 -3
  49. package/src/store/block_store.ts +5 -5
  50. package/src/store/contract_class_store.ts +11 -7
  51. package/src/store/kv_archiver_store.ts +6 -6
  52. package/src/store/log_store.ts +5 -5
  53. package/src/test/index.ts +3 -0
  54. package/src/test/mock_l2_block_source.ts +6 -6
  55. package/src/test/mock_structs.ts +22 -6
  56. package/src/test/noop_l1_archiver.ts +109 -0
package/src/archiver.ts CHANGED
@@ -26,6 +26,7 @@ import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
26
26
  import {
27
27
  type L1RollupConstants,
28
28
  getEpochNumberAtTimestamp,
29
+ getSlotAtNextL1Block,
29
30
  getSlotAtTimestamp,
30
31
  getSlotRangeForEpoch,
31
32
  getTimestampRangeForEpoch,
@@ -68,7 +69,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
68
69
  public readonly events: ArchiverEmitter;
69
70
 
70
71
  /** A loop in which we will be continually fetching new checkpoints. */
71
- private runningPromise: RunningPromise;
72
+ protected runningPromise: RunningPromise;
72
73
 
73
74
  /** L1 synchronizer that handles fetching checkpoints and messages from L1. */
74
75
  private readonly synchronizer: ArchiverL1Synchronizer;
@@ -158,7 +159,11 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
158
159
 
159
160
  await this.blobClient.testSources();
160
161
  await this.synchronizer.testEthereumNodeSynced();
161
- await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
162
+ await validateAndLogTraceAvailability(
163
+ this.debugClient,
164
+ this.config.ethereumAllowNoDebugHosts ?? false,
165
+ this.log.getBindings(),
166
+ );
162
167
 
163
168
  // Log initial state for the archiver
164
169
  const { l1StartBlock } = this.l1Constants;
@@ -212,8 +217,23 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra
212
217
  const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
213
218
  this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
214
219
 
220
+ // Calculate slot threshold for validation
221
+ const l1Timestamp = this.synchronizer.getL1Timestamp();
222
+ const slotAtNextL1Block =
223
+ l1Timestamp === undefined ? undefined : getSlotAtNextL1Block(l1Timestamp, this.l1Constants);
224
+
215
225
  // Process each block individually to properly resolve/reject each promise
216
226
  for (const { block, resolve, reject } of queuedItems) {
227
+ const blockSlot = block.header.globalVariables.slotNumber;
228
+ if (slotAtNextL1Block !== undefined && blockSlot < slotAtNextL1Block) {
229
+ this.log.warn(
230
+ `Rejecting proposed block ${block.number} for past slot ${blockSlot} (current is ${slotAtNextL1Block})`,
231
+ { block: block.toBlockInfo(), l1Timestamp, slotAtNextL1Block },
232
+ );
233
+ reject(new Error(`Block ${block.number} is for past slot ${blockSlot} (current is ${slotAtNextL1Block})`));
234
+ continue;
235
+ }
236
+
217
237
  try {
218
238
  await this.updater.addProposedBlocks([block]);
219
239
  this.log.debug(`Added block ${block.number} to store`);
package/src/factory.ts CHANGED
@@ -6,7 +6,6 @@ import { BlockNumber } from '@aztec/foundation/branded-types';
6
6
  import { Buffer32 } from '@aztec/foundation/buffer';
7
7
  import { merge } from '@aztec/foundation/collection';
8
8
  import { Fr } from '@aztec/foundation/curves/bn254';
9
- import { createLogger } from '@aztec/foundation/log';
10
9
  import { DateProvider } from '@aztec/foundation/timer';
11
10
  import type { DataStoreConfig } from '@aztec/kv-store/config';
12
11
  import { createStore } from '@aztec/kv-store/lmdb-v2';
@@ -38,7 +37,7 @@ export async function createArchiverStore(
38
37
  ...userConfig,
39
38
  dataStoreMapSizeKb: userConfig.archiverStoreMapSizeKb ?? userConfig.dataStoreMapSizeKb,
40
39
  };
41
- const store = await createStore(ARCHIVER_STORE_NAME, ARCHIVER_DB_VERSION, config, createLogger('archiver:lmdb'));
40
+ const store = await createStore(ARCHIVER_STORE_NAME, ARCHIVER_DB_VERSION, config);
42
41
  return new KVArchiverDataStore(store, config.maxLogs, l1Constants);
43
42
  }
44
43
 
@@ -157,7 +156,8 @@ export async function createArchiver(
157
156
  return archiver;
158
157
  }
159
158
 
160
- async function registerProtocolContracts(store: KVArchiverDataStore) {
159
+ /** Registers protocol contracts in the archiver store. */
160
+ export async function registerProtocolContracts(store: KVArchiverDataStore) {
161
161
  const blockNumber = 0;
162
162
  for (const name of protocolContractNames) {
163
163
  const provider = new BundledProtocolContractsProvider();
@@ -100,7 +100,7 @@ export async function retrievedToPublishedCheckpoint({
100
100
  }),
101
101
  });
102
102
 
103
- const body = Body.fromTxBlobData(checkpointBlobData.blocks[0].txs);
103
+ const body = Body.fromTxBlobData(blockBlobData.txs);
104
104
 
105
105
  const blobFields = encodeBlockBlobData(blockBlobData);
106
106
  await spongeBlob.absorb(blobFields);
@@ -1,5 +1,5 @@
1
1
  import type { ViemPublicDebugClient } from '@aztec/ethereum/types';
2
- import { createLogger } from '@aztec/foundation/log';
2
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
3
3
 
4
4
  import type { Hex } from 'viem';
5
5
  import type { ZodSchema } from 'zod';
@@ -7,8 +7,6 @@ import type { ZodSchema } from 'zod';
7
7
  import { callTraceSchema } from './debug_tx.js';
8
8
  import { traceTransactionResponseSchema } from './trace_tx.js';
9
9
 
10
- const logger = createLogger('aztec:archiver:validate_trace');
11
-
12
10
  /**
13
11
  * Helper function to test a trace method with validation
14
12
  *
@@ -17,6 +15,7 @@ const logger = createLogger('aztec:archiver:validate_trace');
17
15
  * @param schema - Zod schema to validate the response
18
16
  * @param method - Name of the RPC method ('debug_traceTransaction' or 'trace_transaction')
19
17
  * @param blockType - Type of block being tested ('recent' or 'old')
18
+ * @param logger - Logger instance
20
19
  * @returns true if the method works and validation passes, false otherwise
21
20
  */
22
21
  async function testTraceMethod(
@@ -25,6 +24,7 @@ async function testTraceMethod(
25
24
  schema: ZodSchema,
26
25
  method: 'debug_traceTransaction' | 'trace_transaction',
27
26
  blockType: string,
27
+ logger: Logger,
28
28
  ): Promise<boolean> {
29
29
  try {
30
30
  // Make request with appropriate params based on method name
@@ -59,9 +59,14 @@ export interface TraceAvailability {
59
59
  * Validates the availability of debug/trace methods on the Ethereum client.
60
60
  *
61
61
  * @param client - The Viem public debug client
62
+ * @param bindings - Optional logger bindings for context
62
63
  * @returns Object indicating which trace methods are available for recent and old blocks
63
64
  */
64
- export async function validateTraceAvailability(client: ViemPublicDebugClient): Promise<TraceAvailability> {
65
+ export async function validateTraceAvailability(
66
+ client: ViemPublicDebugClient,
67
+ bindings?: LoggerBindings,
68
+ ): Promise<TraceAvailability> {
69
+ const logger = createLogger('archiver:validate_trace', bindings);
65
70
  const result: TraceAvailability = {
66
71
  debugTraceRecent: false,
67
72
  traceTransactionRecent: false,
@@ -95,6 +100,7 @@ export async function validateTraceAvailability(client: ViemPublicDebugClient):
95
100
  callTraceSchema,
96
101
  'debug_traceTransaction',
97
102
  'recent',
103
+ logger,
98
104
  );
99
105
 
100
106
  // Test trace_transaction with recent block
@@ -104,6 +110,7 @@ export async function validateTraceAvailability(client: ViemPublicDebugClient):
104
110
  traceTransactionResponseSchema,
105
111
  'trace_transaction',
106
112
  'recent',
113
+ logger,
107
114
  );
108
115
 
109
116
  // Get a block from 512 blocks ago
@@ -132,7 +139,14 @@ export async function validateTraceAvailability(client: ViemPublicDebugClient):
132
139
  const oldTxHash = oldBlock.transactions[0] as Hex;
133
140
 
134
141
  // Test debug_traceTransaction with old block
135
- result.debugTraceOld = await testTraceMethod(client, oldTxHash, callTraceSchema, 'debug_traceTransaction', 'old');
142
+ result.debugTraceOld = await testTraceMethod(
143
+ client,
144
+ oldTxHash,
145
+ callTraceSchema,
146
+ 'debug_traceTransaction',
147
+ 'old',
148
+ logger,
149
+ );
136
150
 
137
151
  // Test trace_transaction with old block
138
152
  result.traceTransactionOld = await testTraceMethod(
@@ -141,6 +155,7 @@ export async function validateTraceAvailability(client: ViemPublicDebugClient):
141
155
  traceTransactionResponseSchema,
142
156
  'trace_transaction',
143
157
  'old',
158
+ logger,
144
159
  );
145
160
  } catch (error) {
146
161
  logger.warn(`Error validating debug_traceTransaction and trace_transaction availability: ${error}`);
@@ -159,15 +174,18 @@ function hasTxs(block: { transactions?: Hex[] }): boolean {
159
174
  *
160
175
  * @param client - The Viem public debug client
161
176
  * @param ethereumAllowNoDebugHosts - If false, throws an error when no trace methods are available
177
+ * @param bindings - Optional logger bindings for context
162
178
  * @throws Error if ethereumAllowNoDebugHosts is false and no trace methods are available
163
179
  */
164
180
  export async function validateAndLogTraceAvailability(
165
181
  client: ViemPublicDebugClient,
166
182
  ethereumAllowNoDebugHosts: boolean,
183
+ bindings?: LoggerBindings,
167
184
  ): Promise<void> {
185
+ const logger = createLogger('archiver:validate_trace', bindings);
168
186
  logger.debug('Validating trace/debug method availability...');
169
187
 
170
- const availability = await validateTraceAvailability(client);
188
+ const availability = await validateTraceAvailability(client, bindings);
171
189
 
172
190
  // Check if we have support for old blocks (either debug or trace)
173
191
  const hasOldBlockSupport = availability.debugTraceOld || availability.traceTransactionOld;
@@ -4,7 +4,7 @@ import type { EthAddress } from '@aztec/foundation/eth-address';
4
4
  import { isDefined } from '@aztec/foundation/types';
5
5
  import type { FunctionSelector } from '@aztec/stdlib/abi';
6
6
  import type { AztecAddress } from '@aztec/stdlib/aztec-address';
7
- import { CheckpointedL2Block, CommitteeAttestation, L2Block, type L2Tips } from '@aztec/stdlib/block';
7
+ import { type BlockHash, CheckpointedL2Block, CommitteeAttestation, L2Block, type L2Tips } from '@aztec/stdlib/block';
8
8
  import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
9
9
  import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract';
10
10
  import { type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
@@ -121,7 +121,7 @@ export abstract class ArchiverDataSourceBase
121
121
  return this.store.getCheckpointedBlocks(from, limit);
122
122
  }
123
123
 
124
- public getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
124
+ public getBlockHeaderByHash(blockHash: BlockHash): Promise<BlockHeader | undefined> {
125
125
  return this.store.getBlockHeaderByHash(blockHash);
126
126
  }
127
127
 
@@ -347,7 +347,7 @@ export abstract class ArchiverDataSourceBase
347
347
  return this.store.getBlocks(from, limit);
348
348
  }
349
349
 
350
- public getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
350
+ public getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
351
351
  return this.store.getCheckpointedBlockByHash(blockHash);
352
352
  }
353
353
 
@@ -355,7 +355,7 @@ export abstract class ArchiverDataSourceBase
355
355
  return this.store.getCheckpointedBlockByArchive(archive);
356
356
  }
357
357
 
358
- public async getL2BlockByHash(blockHash: Fr): Promise<L2Block | undefined> {
358
+ public async getL2BlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
359
359
  const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
360
360
  return checkpointedBlock?.block;
361
361
  }
@@ -10,6 +10,7 @@ import {
10
10
  type TelemetryClient,
11
11
  type Tracer,
12
12
  type UpDownCounter,
13
+ createUpDownCounterWithDefault,
13
14
  } from '@aztec/telemetry-client';
14
15
 
15
16
  export class ArchiverInstrumentation {
@@ -48,15 +49,17 @@ export class ArchiverInstrumentation {
48
49
 
49
50
  this.l1BlockHeight = meter.createGauge(Metrics.ARCHIVER_L1_BLOCK_HEIGHT);
50
51
 
51
- this.txCount = meter.createUpDownCounter(Metrics.ARCHIVER_TOTAL_TXS);
52
+ this.txCount = createUpDownCounterWithDefault(meter, Metrics.ARCHIVER_TOTAL_TXS);
52
53
 
53
- this.proofsSubmittedCount = meter.createUpDownCounter(Metrics.ARCHIVER_ROLLUP_PROOF_COUNT);
54
+ this.proofsSubmittedCount = createUpDownCounterWithDefault(meter, Metrics.ARCHIVER_ROLLUP_PROOF_COUNT, {
55
+ [Attributes.PROOF_TIMED_OUT]: [true, false],
56
+ });
54
57
 
55
58
  this.proofsSubmittedDelay = meter.createHistogram(Metrics.ARCHIVER_ROLLUP_PROOF_DELAY);
56
59
 
57
60
  this.syncDurationPerBlock = meter.createHistogram(Metrics.ARCHIVER_SYNC_PER_BLOCK);
58
61
 
59
- this.syncBlockCount = meter.createUpDownCounter(Metrics.ARCHIVER_SYNC_BLOCK_COUNT);
62
+ this.syncBlockCount = createUpDownCounterWithDefault(meter, Metrics.ARCHIVER_SYNC_BLOCK_COUNT);
60
63
 
61
64
  this.manaPerBlock = meter.createHistogram(Metrics.ARCHIVER_MANA_PER_BLOCK);
62
65
 
@@ -64,13 +67,19 @@ export class ArchiverInstrumentation {
64
67
 
65
68
  this.syncDurationPerMessage = meter.createHistogram(Metrics.ARCHIVER_SYNC_PER_MESSAGE);
66
69
 
67
- this.syncMessageCount = meter.createUpDownCounter(Metrics.ARCHIVER_SYNC_MESSAGE_COUNT);
70
+ this.syncMessageCount = createUpDownCounterWithDefault(meter, Metrics.ARCHIVER_SYNC_MESSAGE_COUNT);
68
71
 
69
72
  this.pruneDuration = meter.createHistogram(Metrics.ARCHIVER_PRUNE_DURATION);
70
73
 
71
- this.pruneCount = meter.createUpDownCounter(Metrics.ARCHIVER_PRUNE_COUNT);
74
+ this.pruneCount = createUpDownCounterWithDefault(meter, Metrics.ARCHIVER_PRUNE_COUNT);
72
75
 
73
- this.blockProposalTxTargetCount = meter.createUpDownCounter(Metrics.ARCHIVER_BLOCK_PROPOSAL_TX_TARGET_COUNT);
76
+ this.blockProposalTxTargetCount = createUpDownCounterWithDefault(
77
+ meter,
78
+ Metrics.ARCHIVER_BLOCK_PROPOSAL_TX_TARGET_COUNT,
79
+ {
80
+ [Attributes.L1_BLOCK_PROPOSAL_USED_TRACE]: [true, false],
81
+ },
82
+ );
74
83
 
75
84
  this.dbMetrics = new LmdbMetrics(
76
85
  meter,
@@ -84,10 +93,6 @@ export class ArchiverInstrumentation {
84
93
  public static async new(telemetry: TelemetryClient, lmdbStats?: LmdbStatsCallback) {
85
94
  const instance = new ArchiverInstrumentation(telemetry, lmdbStats);
86
95
 
87
- instance.syncBlockCount.add(0);
88
- instance.syncMessageCount.add(0);
89
- instance.pruneCount.add(0);
90
-
91
96
  await instance.telemetry.flush();
92
97
 
93
98
  return instance;
@@ -16,7 +16,7 @@ import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
16
16
  import { isDefined } from '@aztec/foundation/types';
17
17
  import { type ArchiverEmitter, L2BlockSourceEvents, type ValidateCheckpointResult } from '@aztec/stdlib/block';
18
18
  import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
19
- import { type L1RollupConstants, getEpochAtSlot, getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers';
19
+ import { type L1RollupConstants, getEpochAtSlot, getSlotAtNextL1Block } from '@aztec/stdlib/epoch-helpers';
20
20
  import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
21
21
  import { type Traceable, type Tracer, execInSpan, trackSpan } from '@aztec/telemetry-client';
22
22
 
@@ -249,8 +249,7 @@ export class ArchiverL1Synchronizer implements Traceable {
249
249
  const firstUncheckpointedBlockSlot = firstUncheckpointedBlockHeader?.getSlot();
250
250
 
251
251
  // What's the slot at the next L1 block? All blocks for slots strictly before this one should've been checkpointed by now.
252
- const nextL1BlockTimestamp = currentL1Timestamp + BigInt(this.l1Constants.ethereumSlotDuration);
253
- const slotAtNextL1Block = getSlotAtTimestamp(nextL1BlockTimestamp, this.l1Constants);
252
+ const slotAtNextL1Block = getSlotAtNextL1Block(currentL1Timestamp, this.l1Constants);
254
253
 
255
254
  // Prune provisional blocks from slots that have ended without being checkpointed
256
255
  if (firstUncheckpointedBlockSlot !== undefined && firstUncheckpointedBlockSlot < slotAtNextL1Block) {
@@ -9,11 +9,11 @@ import { isDefined } from '@aztec/foundation/types';
9
9
  import type { AztecAsyncKVStore, AztecAsyncMap, AztecAsyncSingleton, Range } from '@aztec/kv-store';
10
10
  import type { AztecAddress } from '@aztec/stdlib/aztec-address';
11
11
  import {
12
+ BlockHash,
12
13
  Body,
13
14
  CheckpointedL2Block,
14
15
  CommitteeAttestation,
15
16
  L2Block,
16
- L2BlockHash,
17
17
  type ValidateCheckpointResult,
18
18
  deserializeValidateCheckpointResult,
19
19
  serializeValidateCheckpointResult,
@@ -351,7 +351,7 @@ export class BlockStore {
351
351
  }
352
352
 
353
353
  private async addBlockToDatabase(block: L2Block, checkpointNumber: number, indexWithinCheckpoint: number) {
354
- const blockHash = L2BlockHash.fromField(await block.hash());
354
+ const blockHash = await block.hash();
355
355
 
356
356
  await this.#blocks.set(block.number, {
357
357
  header: block.header.toBuffer(),
@@ -624,7 +624,7 @@ export class BlockStore {
624
624
  }
625
625
  }
626
626
 
627
- async getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
627
+ async getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
628
628
  const blockNumber = await this.#blockHashIndex.getAsync(blockHash.toString());
629
629
  if (blockNumber === undefined) {
630
630
  return undefined;
@@ -673,7 +673,7 @@ export class BlockStore {
673
673
  * @param blockHash - The hash of the block to return.
674
674
  * @returns The requested L2 block.
675
675
  */
676
- async getBlockByHash(blockHash: L2BlockHash): Promise<L2Block | undefined> {
676
+ async getBlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
677
677
  const blockNumber = await this.#blockHashIndex.getAsync(blockHash.toString());
678
678
  if (blockNumber === undefined) {
679
679
  return undefined;
@@ -699,7 +699,7 @@ export class BlockStore {
699
699
  * @param blockHash - The hash of the block to return.
700
700
  * @returns The requested block header.
701
701
  */
702
- async getBlockHeaderByHash(blockHash: L2BlockHash): Promise<BlockHeader | undefined> {
702
+ async getBlockHeaderByHash(blockHash: BlockHash): Promise<BlockHeader | undefined> {
703
703
  const blockNumber = await this.#blockHashIndex.getAsync(blockHash.toString());
704
704
  if (blockNumber === undefined) {
705
705
  return undefined;
@@ -28,18 +28,22 @@ export class ContractClassStore {
28
28
  bytecodeCommitment: Fr,
29
29
  blockNumber: number,
30
30
  ): Promise<void> {
31
- await this.#contractClasses.setIfNotExists(
32
- contractClass.id.toString(),
33
- serializeContractClassPublic({ ...contractClass, l2BlockNumber: blockNumber }),
34
- );
35
- await this.#bytecodeCommitments.setIfNotExists(contractClass.id.toString(), bytecodeCommitment.toBuffer());
31
+ await this.db.transactionAsync(async () => {
32
+ await this.#contractClasses.setIfNotExists(
33
+ contractClass.id.toString(),
34
+ serializeContractClassPublic({ ...contractClass, l2BlockNumber: blockNumber }),
35
+ );
36
+ await this.#bytecodeCommitments.setIfNotExists(contractClass.id.toString(), bytecodeCommitment.toBuffer());
37
+ });
36
38
  }
37
39
 
38
40
  async deleteContractClasses(contractClass: ContractClassPublic, blockNumber: number): Promise<void> {
39
41
  const restoredContractClass = await this.#contractClasses.getAsync(contractClass.id.toString());
40
42
  if (restoredContractClass && deserializeContractClassPublic(restoredContractClass).l2BlockNumber >= blockNumber) {
41
- await this.#contractClasses.delete(contractClass.id.toString());
42
- await this.#bytecodeCommitments.delete(contractClass.id.toString());
43
+ await this.db.transactionAsync(async () => {
44
+ await this.#contractClasses.delete(contractClass.id.toString());
45
+ await this.#bytecodeCommitments.delete(contractClass.id.toString());
46
+ });
43
47
  }
44
48
  }
45
49
 
@@ -6,7 +6,7 @@ import { createLogger } from '@aztec/foundation/log';
6
6
  import type { AztecAsyncKVStore, CustomRange, StoreSize } from '@aztec/kv-store';
7
7
  import { FunctionSelector } from '@aztec/stdlib/abi';
8
8
  import type { AztecAddress } from '@aztec/stdlib/aztec-address';
9
- import { CheckpointedL2Block, L2Block, L2BlockHash, type ValidateCheckpointResult } from '@aztec/stdlib/block';
9
+ import { BlockHash, CheckpointedL2Block, L2Block, type ValidateCheckpointResult } from '@aztec/stdlib/block';
10
10
  import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
11
11
  import type {
12
12
  ContractClassPublic,
@@ -291,7 +291,7 @@ export class KVArchiverDataStore implements ContractDataSource {
291
291
  * Returns the block for the given hash, or undefined if not exists.
292
292
  * @param blockHash - The block hash to return.
293
293
  */
294
- getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
294
+ getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
295
295
  return this.#blockStore.getCheckpointedBlockByHash(blockHash);
296
296
  }
297
297
  /**
@@ -312,8 +312,8 @@ export class KVArchiverDataStore implements ContractDataSource {
312
312
  * Returns the block for the given hash, or undefined if not exists.
313
313
  * @param blockHash - The block hash to return.
314
314
  */
315
- getBlockByHash(blockHash: Fr): Promise<L2Block | undefined> {
316
- return this.#blockStore.getBlockByHash(L2BlockHash.fromField(blockHash));
315
+ getBlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
316
+ return this.#blockStore.getBlockByHash(blockHash);
317
317
  }
318
318
  /**
319
319
  * Returns the block for the given archive root, or undefined if not exists.
@@ -357,8 +357,8 @@ export class KVArchiverDataStore implements ContractDataSource {
357
357
  * Returns the block header for the given hash, or undefined if not exists.
358
358
  * @param blockHash - The block hash to return.
359
359
  */
360
- getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
361
- return this.#blockStore.getBlockHeaderByHash(L2BlockHash.fromField(blockHash));
360
+ getBlockHeaderByHash(blockHash: BlockHash): Promise<BlockHeader | undefined> {
361
+ return this.#blockStore.getBlockHeaderByHash(blockHash);
362
362
  }
363
363
 
364
364
  /**
@@ -6,7 +6,7 @@ import { createLogger } from '@aztec/foundation/log';
6
6
  import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize';
7
7
  import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store';
8
8
  import type { AztecAddress } from '@aztec/stdlib/aztec-address';
9
- import { L2Block, L2BlockHash } from '@aztec/stdlib/block';
9
+ import { BlockHash, L2Block } from '@aztec/stdlib/block';
10
10
  import { MAX_LOGS_PER_TAG } from '@aztec/stdlib/interfaces/api-limit';
11
11
  import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client';
12
12
  import {
@@ -271,18 +271,18 @@ export class LogStore {
271
271
  });
272
272
  }
273
273
 
274
- #packWithBlockHash(blockHash: Fr, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
274
+ #packWithBlockHash(blockHash: BlockHash, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
275
275
  return Buffer.concat([blockHash.toBuffer(), ...data]);
276
276
  }
277
277
 
278
- #unpackBlockHash(reader: BufferReader): L2BlockHash {
278
+ #unpackBlockHash(reader: BufferReader): BlockHash {
279
279
  const blockHash = reader.remainingBytes() > 0 ? reader.readObject(Fr) : undefined;
280
280
 
281
281
  if (!blockHash) {
282
282
  throw new Error('Failed to read block hash from log entry buffer');
283
283
  }
284
284
 
285
- return L2BlockHash.fromField(blockHash);
285
+ return new BlockHash(blockHash);
286
286
  }
287
287
 
288
288
  deleteLogs(blocks: L2Block[]): Promise<boolean> {
@@ -543,7 +543,7 @@ export class LogStore {
543
543
  #accumulateLogs(
544
544
  results: (ExtendedContractClassLog | ExtendedPublicLog)[],
545
545
  blockNumber: number,
546
- blockHash: L2BlockHash,
546
+ blockHash: BlockHash,
547
547
  txIndex: number,
548
548
  txLogs: (ContractClassLog | PublicLog)[],
549
549
  filter: LogFilter = {},
package/src/test/index.ts CHANGED
@@ -2,3 +2,6 @@ export * from './mock_structs.js';
2
2
  export * from './mock_l2_block_source.js';
3
3
  export * from './mock_l1_to_l2_message_source.js';
4
4
  export * from './mock_archiver.js';
5
+ // NOTE: noop_l1_archiver.js is intentionally NOT exported here because it imports
6
+ // jest-mock-extended, which depends on @jest/globals and can only run inside Jest.
7
+ // Import it directly: import { NoopL1Archiver } from '@aztec/archiver/test/noop-l1';
@@ -8,9 +8,9 @@ import { createLogger } from '@aztec/foundation/log';
8
8
  import type { FunctionSelector } from '@aztec/stdlib/abi';
9
9
  import type { AztecAddress } from '@aztec/stdlib/aztec-address';
10
10
  import {
11
+ BlockHash,
11
12
  CheckpointedL2Block,
12
13
  L2Block,
13
- L2BlockHash,
14
14
  type L2BlockSource,
15
15
  type L2Tips,
16
16
  type ValidateCheckpointResult,
@@ -195,7 +195,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
195
195
  return checkpoint;
196
196
  }
197
197
 
198
- public async getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
198
+ public async getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
199
199
  for (const block of this.l2Blocks) {
200
200
  const hash = await block.hash();
201
201
  if (hash.equals(blockHash)) {
@@ -225,7 +225,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
225
225
  );
226
226
  }
227
227
 
228
- public async getL2BlockByHash(blockHash: Fr): Promise<L2Block | undefined> {
228
+ public async getL2BlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
229
229
  for (const block of this.l2Blocks) {
230
230
  const hash = await block.hash();
231
231
  if (hash.equals(blockHash)) {
@@ -240,7 +240,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
240
240
  return Promise.resolve(block);
241
241
  }
242
242
 
243
- public async getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
243
+ public async getBlockHeaderByHash(blockHash: BlockHash): Promise<BlockHeader | undefined> {
244
244
  for (const block of this.l2Blocks) {
245
245
  const hash = await block.hash();
246
246
  if (hash.equals(blockHash)) {
@@ -322,7 +322,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
322
322
  return {
323
323
  data: txEffect,
324
324
  l2BlockNumber: block.number,
325
- l2BlockHash: L2BlockHash.fromField(await block.hash()),
325
+ l2BlockHash: await block.hash(),
326
326
  txIndexInBlock: block.body.txEffects.indexOf(txEffect),
327
327
  };
328
328
  }
@@ -343,7 +343,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
343
343
  TxExecutionResult.SUCCESS,
344
344
  undefined,
345
345
  txEffect.transactionFee.toBigInt(),
346
- L2BlockHash.fromField(await block.hash()),
346
+ await block.hash(),
347
347
  block.number,
348
348
  );
349
349
  }
@@ -46,24 +46,40 @@ export function makeInboxMessage(
46
46
  }
47
47
 
48
48
  export function makeInboxMessages(
49
- count: number,
49
+ totalCount: number,
50
50
  opts: {
51
51
  initialHash?: Buffer16;
52
52
  initialCheckpointNumber?: CheckpointNumber;
53
+ messagesPerCheckpoint?: number;
53
54
  overrideFn?: (msg: InboxMessage, index: number) => InboxMessage;
54
55
  } = {},
55
56
  ): InboxMessage[] {
56
- const { initialHash = Buffer16.ZERO, overrideFn = msg => msg, initialCheckpointNumber = 1 } = opts;
57
+ const {
58
+ initialHash = Buffer16.ZERO,
59
+ overrideFn = msg => msg,
60
+ initialCheckpointNumber = CheckpointNumber(1),
61
+ messagesPerCheckpoint = 1,
62
+ } = opts;
63
+
57
64
  const messages: InboxMessage[] = [];
58
65
  let rollingHash = initialHash;
59
- for (let i = 0; i < count; i++) {
66
+ for (let i = 0; i < totalCount; i++) {
67
+ const msgIndex = i % messagesPerCheckpoint;
68
+ const checkpointNumber = CheckpointNumber.fromBigInt(
69
+ BigInt(initialCheckpointNumber) + BigInt(i) / BigInt(messagesPerCheckpoint),
70
+ );
60
71
  const leaf = Fr.random();
61
- const checkpointNumber = CheckpointNumber(i + initialCheckpointNumber);
62
- const message = overrideFn(makeInboxMessage(rollingHash, { leaf, checkpointNumber }), i);
72
+ const message = overrideFn(
73
+ makeInboxMessage(rollingHash, {
74
+ leaf,
75
+ checkpointNumber,
76
+ index: InboxLeaf.smallestIndexForCheckpoint(checkpointNumber) + BigInt(msgIndex),
77
+ }),
78
+ i,
79
+ );
63
80
  rollingHash = message.rollingHash;
64
81
  messages.push(message);
65
82
  }
66
-
67
83
  return messages;
68
84
  }
69
85