@aztec/stdlib 4.0.0-nightly.20260107 → 4.0.0-nightly.20260110

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/dest/abi/contract_artifact.d.ts +2 -2
  2. package/dest/abi/contract_artifact.d.ts.map +1 -1
  3. package/dest/abi/contract_artifact.js +1 -1
  4. package/dest/block/l2_block.d.ts +1 -1
  5. package/dest/block/l2_block.d.ts.map +1 -1
  6. package/dest/block/l2_block.js +4 -2
  7. package/dest/block/l2_block_code_to_purge.d.ts +2 -3
  8. package/dest/block/l2_block_code_to_purge.d.ts.map +1 -1
  9. package/dest/block/l2_block_code_to_purge.js +2 -8
  10. package/dest/block/l2_block_header.d.ts +8 -12
  11. package/dest/block/l2_block_header.d.ts.map +1 -1
  12. package/dest/block/l2_block_header.js +22 -17
  13. package/dest/interfaces/aztec-node.d.ts +10 -9
  14. package/dest/interfaces/aztec-node.d.ts.map +1 -1
  15. package/dest/interfaces/aztec-node.js +3 -3
  16. package/dest/interfaces/proving-job.d.ts +4 -4
  17. package/dest/interfaces/proving-job.d.ts.map +1 -1
  18. package/dest/interfaces/proving-job.js +1 -7
  19. package/dest/interfaces/server_circuit_prover.d.ts +4 -3
  20. package/dest/interfaces/server_circuit_prover.d.ts.map +1 -1
  21. package/dest/messaging/in_hash.d.ts +4 -2
  22. package/dest/messaging/in_hash.d.ts.map +1 -1
  23. package/dest/messaging/in_hash.js +3 -1
  24. package/dest/messaging/l2_to_l1_membership.d.ts +88 -6
  25. package/dest/messaging/l2_to_l1_membership.d.ts.map +1 -1
  26. package/dest/messaging/l2_to_l1_membership.js +158 -42
  27. package/dest/messaging/out_hash.d.ts +2 -1
  28. package/dest/messaging/out_hash.d.ts.map +1 -1
  29. package/dest/messaging/out_hash.js +13 -4
  30. package/dest/p2p/block_attestation.d.ts +3 -6
  31. package/dest/p2p/block_attestation.d.ts.map +1 -1
  32. package/dest/p2p/consensus_payload.d.ts +3 -6
  33. package/dest/p2p/consensus_payload.d.ts.map +1 -1
  34. package/dest/p2p/gossipable.d.ts +4 -3
  35. package/dest/p2p/gossipable.d.ts.map +1 -1
  36. package/dest/p2p/gossipable.js +16 -5
  37. package/dest/rollup/checkpoint_header.d.ts +11 -12
  38. package/dest/rollup/checkpoint_header.d.ts.map +1 -1
  39. package/dest/rollup/checkpoint_header.js +26 -18
  40. package/dest/rollup/checkpoint_rollup_public_inputs.d.ts +6 -1
  41. package/dest/rollup/checkpoint_rollup_public_inputs.d.ts.map +1 -1
  42. package/dest/rollup/checkpoint_rollup_public_inputs.js +6 -2
  43. package/dest/rollup/root_rollup_public_inputs.d.ts +8 -3
  44. package/dest/rollup/root_rollup_public_inputs.d.ts.map +1 -1
  45. package/dest/rollup/root_rollup_public_inputs.js +6 -3
  46. package/dest/tests/factories.d.ts +3 -7
  47. package/dest/tests/factories.d.ts.map +1 -1
  48. package/dest/tests/factories.js +6 -11
  49. package/dest/tests/mocks.d.ts +5 -2
  50. package/dest/tests/mocks.d.ts.map +1 -1
  51. package/dest/tests/mocks.js +5 -6
  52. package/dest/tx/global_variable_builder.d.ts +2 -2
  53. package/dest/tx/global_variable_builder.d.ts.map +1 -1
  54. package/dest/tx/index.d.ts +1 -2
  55. package/dest/tx/index.d.ts.map +1 -1
  56. package/dest/tx/index.js +0 -1
  57. package/dest/tx/tx.d.ts +1 -1
  58. package/dest/tx/tx.d.ts.map +1 -1
  59. package/dest/tx/tx.js +3 -1
  60. package/dest/tx/tx_execution_request.d.ts +1 -1
  61. package/dest/tx/tx_execution_request.d.ts.map +1 -1
  62. package/dest/tx/tx_execution_request.js +3 -1
  63. package/package.json +8 -8
  64. package/src/abi/contract_artifact.ts +10 -10
  65. package/src/block/l2_block.ts +3 -2
  66. package/src/block/l2_block_code_to_purge.ts +3 -11
  67. package/src/block/l2_block_header.ts +26 -17
  68. package/src/interfaces/aztec-node.ts +13 -10
  69. package/src/interfaces/proving-job.ts +2 -11
  70. package/src/interfaces/server_circuit_prover.ts +3 -2
  71. package/src/messaging/in_hash.ts +3 -1
  72. package/src/messaging/l2_to_l1_membership.ts +176 -52
  73. package/src/messaging/out_hash.ts +17 -7
  74. package/src/p2p/gossipable.ts +14 -4
  75. package/src/rollup/checkpoint_header.ts +30 -20
  76. package/src/rollup/checkpoint_rollup_public_inputs.ts +6 -0
  77. package/src/rollup/root_rollup_public_inputs.ts +6 -0
  78. package/src/tests/factories.ts +11 -15
  79. package/src/tests/mocks.ts +20 -14
  80. package/src/tx/global_variable_builder.ts +1 -1
  81. package/src/tx/index.ts +0 -1
  82. package/src/tx/tx.ts +2 -0
  83. package/src/tx/tx_execution_request.ts +2 -0
  84. package/dest/tx/content_commitment.d.ts +0 -49
  85. package/dest/tx/content_commitment.d.ts.map +0 -1
  86. package/dest/tx/content_commitment.js +0 -90
  87. package/src/tx/content_commitment.ts +0 -113
@@ -10,7 +10,7 @@ import { z } from 'zod';
10
10
 
11
11
  import { CheckpointHeader } from '../rollup/checkpoint_header.js';
12
12
  import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js';
13
- import { BlockHeader, ContentCommitment, GlobalVariables, StateReference } from '../tx/index.js';
13
+ import { BlockHeader, GlobalVariables, StateReference } from '../tx/index.js';
14
14
 
15
15
  /**
16
16
  * TO BE DELETED
@@ -25,8 +25,8 @@ export class L2BlockHeader {
25
25
  constructor(
26
26
  /** Snapshot of archive before the block is applied. */
27
27
  public lastArchive: AppendOnlyTreeSnapshot,
28
- /** Hash of the body of an L2 block. */
29
- public contentCommitment: ContentCommitment,
28
+ public blobsHash: Fr,
29
+ public inHash: Fr,
30
30
  /** State reference. */
31
31
  public state: StateReference,
32
32
  /** Global variables of an L2 block. */
@@ -45,7 +45,8 @@ export class L2BlockHeader {
45
45
  return z
46
46
  .object({
47
47
  lastArchive: AppendOnlyTreeSnapshot.schema,
48
- contentCommitment: ContentCommitment.schema,
48
+ blobsHash: schemas.Fr,
49
+ inHash: schemas.Fr,
49
50
  state: StateReference.schema,
50
51
  globalVariables: GlobalVariables.schema,
51
52
  totalFees: schemas.Fr,
@@ -59,7 +60,8 @@ export class L2BlockHeader {
59
60
  static getFields(fields: FieldsOf<L2BlockHeader>) {
60
61
  return [
61
62
  fields.lastArchive,
62
- fields.contentCommitment,
63
+ fields.blobsHash,
64
+ fields.inHash,
63
65
  fields.state,
64
66
  fields.globalVariables,
65
67
  fields.totalFees,
@@ -84,7 +86,8 @@ export class L2BlockHeader {
84
86
  getSize() {
85
87
  return (
86
88
  this.lastArchive.getSize() +
87
- this.contentCommitment.getSize() +
89
+ this.blobsHash.size +
90
+ this.inHash.size +
88
91
  this.state.getSize() +
89
92
  this.globalVariables.getSize() +
90
93
  this.totalFees.size +
@@ -111,7 +114,8 @@ export class L2BlockHeader {
111
114
 
112
115
  return new L2BlockHeader(
113
116
  reader.readObject(AppendOnlyTreeSnapshot),
114
- reader.readObject(ContentCommitment),
117
+ reader.readObject(Fr),
118
+ reader.readObject(Fr),
115
119
  reader.readObject(StateReference),
116
120
  reader.readObject(GlobalVariables),
117
121
  reader.readObject(Fr),
@@ -126,7 +130,8 @@ export class L2BlockHeader {
126
130
 
127
131
  return new L2BlockHeader(
128
132
  AppendOnlyTreeSnapshot.fromFields(reader),
129
- ContentCommitment.fromFields(reader),
133
+ reader.readField(),
134
+ reader.readField(),
130
135
  StateReference.fromFields(reader),
131
136
  GlobalVariables.fromFields(reader),
132
137
  reader.readField(),
@@ -139,7 +144,8 @@ export class L2BlockHeader {
139
144
  static empty(fields: Partial<FieldsOf<L2BlockHeader>> = {}) {
140
145
  return L2BlockHeader.from({
141
146
  lastArchive: AppendOnlyTreeSnapshot.empty(),
142
- contentCommitment: ContentCommitment.empty(),
147
+ blobsHash: Fr.ZERO,
148
+ inHash: Fr.ZERO,
143
149
  state: StateReference.empty(),
144
150
  globalVariables: GlobalVariables.empty(),
145
151
  totalFees: Fr.ZERO,
@@ -153,7 +159,8 @@ export class L2BlockHeader {
153
159
  isEmpty(): boolean {
154
160
  return (
155
161
  this.lastArchive.isEmpty() &&
156
- this.contentCommitment.isEmpty() &&
162
+ this.blobsHash.isZero() &&
163
+ this.inHash.isZero() &&
157
164
  this.state.isEmpty() &&
158
165
  this.globalVariables.isEmpty() &&
159
166
  this.totalFees.isZero() &&
@@ -179,7 +186,8 @@ export class L2BlockHeader {
179
186
  return new CheckpointHeader(
180
187
  this.lastArchive.root,
181
188
  this.blockHeadersHash,
182
- this.contentCommitment,
189
+ this.blobsHash,
190
+ this.inHash,
183
191
  this.globalVariables.slotNumber,
184
192
  this.globalVariables.timestamp,
185
193
  this.globalVariables.coinbase,
@@ -203,7 +211,8 @@ export class L2BlockHeader {
203
211
  toInspect() {
204
212
  return {
205
213
  lastArchive: this.lastArchive.root.toString(),
206
- contentCommitment: this.contentCommitment.toInspect(),
214
+ blobsHash: this.blobsHash.toString(),
215
+ inHash: this.inHash.toString(),
207
216
  state: this.state.toInspect(),
208
217
  globalVariables: this.globalVariables.toInspect(),
209
218
  totalFees: this.totalFees.toBigInt(),
@@ -216,9 +225,8 @@ export class L2BlockHeader {
216
225
  [inspect.custom]() {
217
226
  return `L2BlockHeader {
218
227
  lastArchive: ${inspect(this.lastArchive)},
219
- contentCommitment.blobsHash: ${inspect(this.contentCommitment.blobsHash)},
220
- contentCommitment.inHash: ${inspect(this.contentCommitment.inHash)},
221
- contentCommitment.outHash: ${inspect(this.contentCommitment.outHash)},
228
+ blobsHash: ${inspect(this.blobsHash)},
229
+ inHash: ${inspect(this.inHash)},
222
230
  state.l1ToL2MessageTree: ${inspect(this.state.l1ToL2MessageTree)},
223
231
  state.noteHashTree: ${inspect(this.state.partial.noteHashTree)},
224
232
  state.nullifierTree: ${inspect(this.state.partial.nullifierTree)},
@@ -233,12 +241,13 @@ export class L2BlockHeader {
233
241
 
234
242
  public equals(other: this): boolean {
235
243
  return (
236
- this.contentCommitment.equals(other.contentCommitment) &&
244
+ this.lastArchive.equals(other.lastArchive) &&
245
+ this.blobsHash.equals(other.blobsHash) &&
246
+ this.inHash.equals(other.inHash) &&
237
247
  this.state.equals(other.state) &&
238
248
  this.globalVariables.equals(other.globalVariables) &&
239
249
  this.totalFees.equals(other.totalFees) &&
240
250
  this.totalManaUsed.equals(other.totalManaUsed) &&
241
- this.lastArchive.equals(other.lastArchive) &&
242
251
  this.spongeBlobHash.equals(other.spongeBlobHash) &&
243
252
  this.blockHeadersHash.equals(other.blockHeadersHash)
244
253
  );
@@ -10,6 +10,8 @@ import {
10
10
  BlockNumber,
11
11
  BlockNumberPositiveSchema,
12
12
  BlockNumberSchema,
13
+ EpochNumber,
14
+ EpochNumberSchema,
13
15
  type SlotNumber,
14
16
  } from '@aztec/foundation/branded-types';
15
17
  import type { Fr } from '@aztec/foundation/curves/bn254';
@@ -216,11 +218,12 @@ export interface AztecNode
216
218
  isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise<boolean>;
217
219
 
218
220
  /**
219
- * Returns all the L2 to L1 messages in a block.
220
- * @param blockNumber - The block number at which to get the data.
221
- * @returns The L2 to L1 messages (undefined if the block number is not found).
221
+ * Returns all the L2 to L1 messages in an epoch.
222
+ * @param epoch - The epoch at which to get the data.
223
+ * @returns A nested array of the L2 to L1 messages in each tx of each block in each checkpoint in the epoch (empty
224
+ * array if the epoch is not found).
222
225
  */
223
- getL2ToL1Messages(blockNumber: BlockParameter): Promise<Fr[][] | undefined>;
226
+ getL2ToL1Messages(epoch: EpochNumber): Promise<Fr[][][][]>;
224
227
 
225
228
  /**
226
229
  * Get a block specified by its number.
@@ -277,10 +280,10 @@ export interface AztecNode
277
280
  getBlocks(from: BlockNumber, limit: number): Promise<L2Block[]>;
278
281
 
279
282
  /**
280
- * Method to fetch the current base fees.
281
- * @returns The current base fees.
283
+ * Method to fetch the current min fees.
284
+ * @returns The current min fees.
282
285
  */
283
- getCurrentBaseFees(): Promise<GasFees>;
286
+ getCurrentMinFees(): Promise<GasFees>;
284
287
 
285
288
  /**
286
289
  * Method to fetch the current max priority fee of txs in the mempool.
@@ -552,8 +555,8 @@ export const AztecNodeApiSchema: ApiSchemaFor<AztecNode> = {
552
555
 
553
556
  getL2ToL1Messages: z
554
557
  .function()
555
- .args(BlockParameterSchema)
556
- .returns(z.array(z.array(schemas.Fr)).optional()),
558
+ .args(EpochNumberSchema)
559
+ .returns(z.array(z.array(z.array(z.array(schemas.Fr))))),
557
560
 
558
561
  getBlock: z.function().args(BlockParameterSchema).returns(L2Block.schema.optional()),
559
562
 
@@ -579,7 +582,7 @@ export const AztecNodeApiSchema: ApiSchemaFor<AztecNode> = {
579
582
  .args(BlockNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_BLOCKS_LEN))
580
583
  .returns(z.array(PublishedL2Block.schema)),
581
584
 
582
- getCurrentBaseFees: z.function().returns(GasFees.schema),
585
+ getCurrentMinFees: z.function().returns(GasFees.schema),
583
586
 
584
587
  getMaxPriorityFees: z.function().returns(GasFees.schema),
585
588
 
@@ -47,15 +47,6 @@ export type ProofAndVerificationKey<N extends number> = {
47
47
  verificationKey: VerificationKeyData;
48
48
  };
49
49
 
50
- function schemaForRecursiveProofAndVerificationKey<N extends number>(
51
- proofLength: N,
52
- ): ZodFor<ProofAndVerificationKey<N>> {
53
- return z.object({
54
- proof: RecursiveProof.schemaFor(proofLength),
55
- verificationKey: VerificationKeyData.schema,
56
- });
57
- }
58
-
59
50
  export function makeProofAndVerificationKey<N extends number>(
60
51
  proof: RecursiveProof<N>,
61
52
  verificationKey: VerificationKeyData,
@@ -213,7 +204,7 @@ export type ProvingJobInputsMap = {
213
204
  export const ProvingJobResult = z.discriminatedUnion('type', [
214
205
  z.object({
215
206
  type: z.literal(ProvingRequestType.PUBLIC_VM),
216
- result: schemaForRecursiveProofAndVerificationKey(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED),
207
+ result: RecursiveProof.schemaFor(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED),
217
208
  }),
218
209
  z.object({
219
210
  type: z.literal(ProvingRequestType.PUBLIC_CHONK_VERIFIER),
@@ -328,7 +319,7 @@ export const ProvingJobResult = z.discriminatedUnion('type', [
328
319
  ]);
329
320
  export type ProvingJobResult = z.infer<typeof ProvingJobResult>;
330
321
  export type ProvingJobResultsMap = {
331
- [ProvingRequestType.PUBLIC_VM]: ProofAndVerificationKey<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>;
322
+ [ProvingRequestType.PUBLIC_VM]: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>;
332
323
  [ProvingRequestType.PUBLIC_CHONK_VERIFIER]: PublicInputsAndRecursiveProof<
333
324
  PublicChonkVerifierPublicInputs,
334
325
  typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
@@ -9,6 +9,7 @@ import type { AvmCircuitInputs } from '../avm/avm.js';
9
9
  import type { ParityBasePrivateInputs } from '../parity/parity_base_private_inputs.js';
10
10
  import type { ParityPublicInputs } from '../parity/parity_public_inputs.js';
11
11
  import type { ParityRootPrivateInputs } from '../parity/parity_root_private_inputs.js';
12
+ import type { RecursiveProof } from '../proofs/recursive_proof.js';
12
13
  import type { BlockMergeRollupPrivateInputs } from '../rollup/block_merge_rollup_private_inputs.js';
13
14
  import type { BlockRollupPublicInputs } from '../rollup/block_rollup_public_inputs.js';
14
15
  import type {
@@ -34,7 +35,7 @@ import type { RootRollupPublicInputs } from '../rollup/root_rollup_public_inputs
34
35
  import type { TxMergeRollupPrivateInputs } from '../rollup/tx_merge_rollup_private_inputs.js';
35
36
  import type { TxRollupPublicInputs } from '../rollup/tx_rollup_public_inputs.js';
36
37
  import type { Tx } from '../tx/tx.js';
37
- import type { ProofAndVerificationKey, PublicInputsAndRecursiveProof } from './proving-job.js';
38
+ import type { PublicInputsAndRecursiveProof } from './proving-job.js';
38
39
 
39
40
  /**
40
41
  * Generates proofs for parity and rollup circuits.
@@ -188,7 +189,7 @@ export interface ServerCircuitProver {
188
189
  inputs: AvmCircuitInputs,
189
190
  signal?: AbortSignal,
190
191
  epochNumber?: number,
191
- ): Promise<ProofAndVerificationKey<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>>;
192
+ ): Promise<RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>>;
192
193
  }
193
194
 
194
195
  export type IVCProofVerificationResult = {
@@ -3,7 +3,9 @@ import { padArrayEnd } from '@aztec/foundation/collection';
3
3
  import { Fr } from '@aztec/foundation/curves/bn254';
4
4
  import { computeBalancedShaRoot } from '@aztec/foundation/trees';
5
5
 
6
- /** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */
6
+ /**
7
+ * Computes the inHash for a checkpoint (or the first block in a checkpoint) given its l1 to l2 messages.
8
+ */
7
9
  export function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr[]): Fr {
8
10
  const l1ToL2Messages = padArrayEnd<Fr, number>(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
9
11
  return new Fr(computeBalancedShaRoot(l1ToL2Messages.map(msg => msg.toBuffer())));
@@ -1,9 +1,99 @@
1
- import type { BlockNumber } from '@aztec/foundation/branded-types';
1
+ import { AZTEC_MAX_EPOCH_DURATION } from '@aztec/constants';
2
+ import type { EpochNumber } from '@aztec/foundation/branded-types';
2
3
  import { Fr } from '@aztec/foundation/curves/bn254';
3
4
  import { SiblingPath, UnbalancedMerkleTreeCalculator, computeUnbalancedShaRoot } from '@aztec/foundation/trees';
4
5
 
6
+ /**
7
+ * # L2-to-L1 Message Tree Structure and Leaf IDs
8
+ *
9
+ * ## Overview
10
+ * L2-to-L1 messages are organized in a hierarchical 4-level tree structure within each epoch:
11
+ * Epoch → Checkpoints → Blocks → Transactions → Messages
12
+ *
13
+ * Each level uses an unbalanced Merkle tree, and some levels use compression (skipping zero hashes).
14
+ *
15
+ * ## Tree Levels
16
+ *
17
+ * 1. **Message Tree (TX Out Hash)**
18
+ * - Leaves: Individual L2-to-L1 messages within a transaction
19
+ * - Root: TX out hash
20
+ * - Type: Unbalanced, non-compressed (the circuits ensure that all messages are not empty.)
21
+ *
22
+ * 2. **Block Tree**
23
+ * - Leaves: TX out hashes from all transactions in a block
24
+ * - Root: Block out hash
25
+ * - Type: Unbalanced, compressed (zero hashes are skipped)
26
+ * - Compression: If a tx has no messages (out hash = 0), that branch is ignored
27
+ *
28
+ * 3. **Checkpoint Tree**
29
+ * - Leaves: Block out hashes from all blocks in a checkpoint
30
+ * - Root: Checkpoint out hash
31
+ * - Type: Unbalanced, compressed (zero hashes are skipped)
32
+ * - Compression: If a block has no messages (out hash = 0), that branch is ignored
33
+ *
34
+ * 4. **Epoch Tree**
35
+ * - Leaves: Checkpoint out hashes from all checkpoints in an epoch (padded to AZTEC_MAX_EPOCH_DURATION)
36
+ * - Root: Epoch out hash (set in the root rollup's public inputs and inserted into the Outbox on L1 when the epoch is proven)
37
+ * - Type: Unbalanced, non-compressed
38
+ * - **Important**: Padded with zeros up to AZTEC_MAX_EPOCH_DURATION to allow for proofs of partial epochs
39
+ *
40
+ * ## Combined Membership Proof
41
+ * To prove a message exists in an epoch, we combine the sibling paths from all 4 trees:
42
+ * [message siblings] + [tx siblings] + [block siblings] + [checkpoint siblings]
43
+ *
44
+ * ## Leaf ID: Stable Message Identification
45
+ *
46
+ * Each message gets a unique, stable **leaf ID** that identifies its position in the combined tree.
47
+ * The leaf ID is computed as:
48
+ * leafId = 2^pathSize + leafIndex
49
+ *
50
+ * Where:
51
+ * - `pathSize`: Total length of the combined sibling path (from all 4 tree levels)
52
+ * - `leafIndex`: The message's index in a balanced tree representation at that height
53
+ *
54
+ * ### Why Leaf IDs Are Stable
55
+ *
56
+ * The leaf ID is based on the message's position in the tree structure, which is determined by:
57
+ * - The checkpoint index within the epoch
58
+ * - The block index within the checkpoint
59
+ * - The transaction index within the block
60
+ * - The message index within the transaction
61
+ *
62
+ * These indices are structural and do NOT depend on the total number of blocks/checkpoints in the epoch.
63
+ *
64
+ * ### Critical Property: Preserving Consumed Status
65
+ *
66
+ * **Problem**: On L1, epoch proofs can be submitted incrementally. For example:
67
+ * - First, a proof for checkpoints 1-10 of epoch 0 is submitted (proves the first 10 checkpoints)
68
+ * - Later, a proof for checkpoints 1-20 of epoch 0 is submitted (proves all 20 checkpoints)
69
+ *
70
+ * When the longer proof is submitted, it updates the epoch's out hash root on L1 to reflect the complete epoch (all 20
71
+ * checkpoints). However, some messages from checkpoints 1-10 may have already been consumed.
72
+ *
73
+ * **Solution**: The Outbox on L1 tracks consumed messages using a bitmap indexed by leaf ID.
74
+ * Because leaf IDs are stable (they don't change when more checkpoints are added to the epoch), messages that were consumed
75
+ * under the shorter proof remain marked as consumed under the longer proof.
76
+ *
77
+ * This prevents double-spending of L2-to-L1 messages when longer epoch proofs are submitted.
78
+ */
79
+
80
+ /**
81
+ * Computes the unique leaf ID for an L2-to-L1 message.
82
+ *
83
+ * The leaf ID is stable across different epoch proof lengths and is used by the Outbox
84
+ * on L1 to track which messages have been consumed.
85
+ *
86
+ * @param membershipWitness - Contains the leafIndex and siblingPath for the message
87
+ * @returns The unique leaf ID used for tracking message consumption on L1
88
+ */
89
+ export function getL2ToL1MessageLeafId(
90
+ membershipWitness: Pick<L2ToL1MembershipWitness, 'leafIndex' | 'siblingPath'>,
91
+ ): bigint {
92
+ return 2n ** BigInt(membershipWitness.siblingPath.pathSize) + membershipWitness.leafIndex;
93
+ }
94
+
5
95
  export interface MessageRetrieval {
6
- getL2ToL1Messages(l2BlockNumber: BlockNumber): Promise<Fr[][] | undefined>;
96
+ getL2ToL1Messages(epoch: EpochNumber): Promise<Fr[][][][]>;
7
97
  }
8
98
 
9
99
  export type L2ToL1MembershipWitness = {
@@ -14,76 +104,99 @@ export type L2ToL1MembershipWitness = {
14
104
 
15
105
  export async function computeL2ToL1MembershipWitness(
16
106
  messageRetriever: MessageRetrieval,
17
- l2BlockNumber: BlockNumber,
107
+ epoch: EpochNumber,
18
108
  message: Fr,
19
109
  ): Promise<L2ToL1MembershipWitness | undefined> {
20
- const messagesForAllTxs = await messageRetriever.getL2ToL1Messages(l2BlockNumber);
21
- if (!messagesForAllTxs) {
110
+ const messagesInEpoch = await messageRetriever.getL2ToL1Messages(epoch);
111
+ if (messagesInEpoch.length === 0) {
22
112
  return undefined;
23
113
  }
24
114
 
25
- return computeL2ToL1MembershipWitnessFromMessagesForAllTxs(messagesForAllTxs, message);
115
+ return computeL2ToL1MembershipWitnessFromMessagesInEpoch(messagesInEpoch, message);
26
116
  }
27
117
 
28
118
  // TODO: Allow to specify the message to consume by its index or by an offset, in case there are multiple messages with
29
119
  // the same value.
30
- export function computeL2ToL1MembershipWitnessFromMessagesForAllTxs(
31
- messagesForAllTxs: Fr[][],
120
+ export function computeL2ToL1MembershipWitnessFromMessagesInEpoch(
121
+ messagesInEpoch: Fr[][][][],
32
122
  message: Fr,
33
123
  ): L2ToL1MembershipWitness {
34
- // Find index of message in subtree and index of tx in a block.
124
+ // Find the index of the message in the tx, index of the tx in the block, and index of the block in the epoch.
35
125
  let messageIndexInTx = -1;
36
- const txIndex = messagesForAllTxs.findIndex(messages => {
37
- messageIndexInTx = messages.findIndex(msg => msg.equals(message));
38
- return messageIndexInTx !== -1;
126
+ let txIndex = -1;
127
+ let blockIndex = -1;
128
+ const checkpointIndex = messagesInEpoch.findIndex(messagesInCheckpoint => {
129
+ blockIndex = messagesInCheckpoint.findIndex(messagesInBlock => {
130
+ txIndex = messagesInBlock.findIndex(messagesInTx => {
131
+ messageIndexInTx = messagesInTx.findIndex(msg => msg.equals(message));
132
+ return messageIndexInTx !== -1;
133
+ });
134
+ return txIndex !== -1;
135
+ });
136
+ return blockIndex !== -1;
39
137
  });
40
138
 
41
- if (txIndex === -1) {
139
+ if (checkpointIndex === -1) {
42
140
  throw new Error('The L2ToL1Message you are trying to prove inclusion of does not exist');
43
141
  }
44
142
 
45
- // Get the txOutHash and the sibling path of the message in the tx subtree.
46
- const txMessages = messagesForAllTxs[txIndex];
47
- const txOutHashTree = UnbalancedMerkleTreeCalculator.create(txMessages.map(msg => msg.toBuffer()));
48
- const txOutHash = txOutHashTree.getRoot();
49
- const messagePathInSubtree = txOutHashTree.getSiblingPath(message.toBuffer());
50
-
51
- // Calculate txOutHash for all txs.
52
- const txSubtreeRoots = messagesForAllTxs.map((messages, i) => {
53
- // For a tx with no messages, we have to set an out hash of 0 to match what the circuit does.
54
- if (messages.length === 0) {
55
- return Fr.ZERO;
56
- }
143
+ // Build the tx tree.
144
+ const messagesInTx = messagesInEpoch[checkpointIndex][blockIndex][txIndex];
145
+ const txTree = UnbalancedMerkleTreeCalculator.create(messagesInTx.map(msg => msg.toBuffer()));
146
+ // Get the sibling path of the target message in the tx tree.
147
+ const pathToMessageInTxSubtree = txTree.getSiblingPathByLeafIndex(messageIndexInTx);
57
148
 
58
- if (i === txIndex) {
59
- return Fr.fromBuffer(txOutHash);
60
- }
149
+ // Build the tree of the block containing the target message.
150
+ const blockTree = buildBlockTree(messagesInEpoch[checkpointIndex][blockIndex]);
151
+ // Get the sibling path of the tx out hash in the block tree.
152
+ const pathToTxOutHashInBlockTree = blockTree.getSiblingPathByLeafIndex(txIndex);
61
153
 
62
- const root = computeUnbalancedShaRoot(messages.map(msg => msg.toBuffer()));
63
- return Fr.fromBuffer(root);
64
- });
154
+ // Build the tree of the checkpoint containing the target message.
155
+ const checkpointTree = buildCheckpointTree(messagesInEpoch[checkpointIndex]);
156
+ // Get the sibling path of the block out hash in the checkpoint tree.
157
+ const pathToBlockOutHashInCheckpointTree = checkpointTree.getSiblingPathByLeafIndex(blockIndex);
65
158
 
66
- // Construct the top tree.
67
- // The leaves of this tree are the `txOutHashes`.
68
- // The root of this tree should match the `out_hash` calculated in the circuits. Zero hashes are compressed to reduce
69
- // cost if the non-zero leaves result in a shorter path.
70
- const valueToCompress = Buffer.alloc(32);
71
- const topTree = UnbalancedMerkleTreeCalculator.create(
72
- txSubtreeRoots.map(root => root.toBuffer()),
73
- valueToCompress,
159
+ // Compute the out hashes of all checkpoints in the epoch.
160
+ let checkpointOutHashes = messagesInEpoch.map((messagesInCheckpoint, i) => {
161
+ if (i === checkpointIndex) {
162
+ return checkpointTree.getRoot();
163
+ }
164
+ return buildCheckpointTree(messagesInCheckpoint).getRoot();
165
+ });
166
+ // Pad to AZTEC_MAX_EPOCH_DURATION with zeros.
167
+ checkpointOutHashes = checkpointOutHashes.concat(
168
+ Array.from({ length: AZTEC_MAX_EPOCH_DURATION - messagesInEpoch.length }, () => Buffer.alloc(32)),
74
169
  );
75
- const root = Fr.fromBuffer(topTree.getRoot());
76
170
 
77
- // Compute the combined sibling path by appending the tx subtree path to the top tree path.
78
- const txPathInTopTree = topTree.getSiblingPath(txOutHash);
79
- const combinedPath = messagePathInSubtree.toBufferArray().concat(txPathInTopTree.toBufferArray());
171
+ // Build the epoch tree with all the checkpoint out hashes, including the padded zeros
172
+ const epochTree = UnbalancedMerkleTreeCalculator.create(checkpointOutHashes);
173
+ // Get the sibling path of the checkpoint out hash in the epoch tree.
174
+ const pathToCheckpointOutHashInEpochTree = epochTree.getSiblingPathByLeafIndex(checkpointIndex);
175
+
176
+ // The root of the epoch tree should match the `out_hash` in the root rollup's public inputs.
177
+ const root = Fr.fromBuffer(epochTree.getRoot());
178
+
179
+ // Compute the combined sibling path by appending the tx subtree path to the block tree path, then to the checkpoint
180
+ // tree path, then to the epoch tree path.
181
+ const combinedPath = pathToMessageInTxSubtree
182
+ .toBufferArray()
183
+ .concat(pathToTxOutHashInBlockTree.toBufferArray())
184
+ .concat(pathToBlockOutHashInCheckpointTree.toBufferArray())
185
+ .concat(pathToCheckpointOutHashInEpochTree.toBufferArray());
80
186
 
81
187
  // Compute the combined index.
82
- // It is the index of the message in the balanced tree at its current height.
83
- const txLeafIndexAtLevel = topTree.getLeafLocation(txIndex).index;
84
- const messageLeafPosition = txOutHashTree.getLeafLocation(messageIndexInTx);
85
- const numLeavesInLeftSubtrees = txLeafIndexAtLevel * (1 << messageLeafPosition.level);
86
- const combinedIndex = numLeavesInLeftSubtrees + messageLeafPosition.index;
188
+ // It is the index of the message in the balanced tree (by filling up the wonky tree with empty nodes) at its current
189
+ // height. It's used to validate the membership proof.
190
+ const messageLeafPosition = txTree.getLeafLocation(messageIndexInTx);
191
+ const txLeafPosition = blockTree.getLeafLocation(txIndex);
192
+ const blockLeafPosition = checkpointTree.getLeafLocation(blockIndex);
193
+ const checkpointLeafPosition = epochTree.getLeafLocation(checkpointIndex);
194
+ const numLeavesInLeftCheckpoints = checkpointLeafPosition.index * (1 << blockLeafPosition.level);
195
+ const indexAtCheckpointLevel = numLeavesInLeftCheckpoints + blockLeafPosition.index;
196
+ const numLeavesInLeftBlocks = indexAtCheckpointLevel * (1 << txLeafPosition.level);
197
+ const indexAtTxLevel = numLeavesInLeftBlocks + txLeafPosition.index;
198
+ const numLeavesInLeftTxs = indexAtTxLevel * (1 << messageLeafPosition.level);
199
+ const combinedIndex = numLeavesInLeftTxs + messageLeafPosition.index;
87
200
 
88
201
  return {
89
202
  root,
@@ -92,8 +205,19 @@ export function computeL2ToL1MembershipWitnessFromMessagesForAllTxs(
92
205
  };
93
206
  }
94
207
 
95
- export function getL2ToL1MessageLeafId(
96
- membershipWitness: Pick<L2ToL1MembershipWitness, 'leafIndex' | 'siblingPath'>,
97
- ): bigint {
98
- return 2n ** BigInt(membershipWitness.siblingPath.pathSize) + membershipWitness.leafIndex;
208
+ function buildCheckpointTree(messagesInCheckpoint: Fr[][][]) {
209
+ const blockOutHashes = messagesInCheckpoint.map(messagesInBlock => buildBlockTree(messagesInBlock).getRoot());
210
+ return buildCompressedTree(blockOutHashes);
211
+ }
212
+
213
+ function buildBlockTree(messagesInBlock: Fr[][]) {
214
+ const txOutHashes = messagesInBlock.map(messages => computeUnbalancedShaRoot(messages.map(msg => msg.toBuffer())));
215
+ return buildCompressedTree(txOutHashes);
216
+ }
217
+
218
+ function buildCompressedTree(leaves: Buffer[]) {
219
+ // Note: If a block or tx has no messages (i.e. leaf == Buffer.alloc(32)), we ignore that branch and only accumulate
220
+ // the non-zero hashes to match what the circuits do.
221
+ const valueToCompress = Buffer.alloc(32);
222
+ return UnbalancedMerkleTreeCalculator.create(leaves, valueToCompress);
99
223
  }
@@ -1,5 +1,7 @@
1
+ import { AZTEC_MAX_EPOCH_DURATION } from '@aztec/constants';
2
+ import { padArrayEnd } from '@aztec/foundation/collection';
1
3
  import { Fr } from '@aztec/foundation/curves/bn254';
2
- import { UnbalancedMerkleTreeCalculator, computeUnbalancedShaRoot } from '@aztec/foundation/trees';
4
+ import { computeCompressedUnbalancedShaRoot, computeUnbalancedShaRoot } from '@aztec/foundation/trees';
3
5
 
4
6
  export function computeTxOutHash(messages: Fr[]): Fr {
5
7
  if (!messages.length) {
@@ -20,6 +22,19 @@ export function computeCheckpointOutHash(messagesForAllTxs: Fr[][][]): Fr {
20
22
  return aggregateOutHashes(blockOutHashes);
21
23
  }
22
24
 
25
+ export function computeEpochOutHash(messagesInEpoch: Fr[][][][]): Fr {
26
+ // Must match the implementation in `compute_epoch_out_hash.nr`.
27
+ const checkpointOutHashes = messagesInEpoch
28
+ .map(checkpoint => computeCheckpointOutHash(checkpoint))
29
+ .map(hash => hash.toBuffer());
30
+ if (checkpointOutHashes.every(hash => hash.equals(Buffer.alloc(32)))) {
31
+ return Fr.ZERO;
32
+ }
33
+
34
+ const paddedOutHashes = padArrayEnd(checkpointOutHashes, Buffer.alloc(32), AZTEC_MAX_EPOCH_DURATION);
35
+ return Fr.fromBuffer(computeUnbalancedShaRoot(paddedOutHashes));
36
+ }
37
+
23
38
  // The root of this tree should match the `out_hash` calculated in the circuits. Zero hashes are compressed to reduce
24
39
  // cost if the non-zero leaves result in a shorter path.
25
40
  function aggregateOutHashes(outHashes: Fr[]): Fr {
@@ -27,10 +42,5 @@ function aggregateOutHashes(outHashes: Fr[]): Fr {
27
42
  return Fr.ZERO;
28
43
  }
29
44
 
30
- const valueToCompress = Buffer.alloc(32);
31
- const tree = UnbalancedMerkleTreeCalculator.create(
32
- outHashes.map(hash => hash.toBuffer()),
33
- valueToCompress,
34
- );
35
- return Fr.fromBuffer(tree.getRoot());
45
+ return Fr.fromBuffer(computeCompressedUnbalancedShaRoot(outHashes.map(hash => hash.toBuffer())));
36
46
  }
@@ -7,23 +7,33 @@ export class P2PMessage {
7
7
  constructor(
8
8
  public readonly payload: Buffer,
9
9
  public readonly timestamp?: Date,
10
+ public readonly traceContext?: string,
10
11
  ) {}
11
12
 
12
- static fromGossipable(message: Gossipable, instrumentMessages = false): P2PMessage {
13
- return new P2PMessage(message.toBuffer(), instrumentMessages ? new Date() : undefined);
13
+ static fromGossipable(message: Gossipable, instrumentMessages = false, traceContext?: string): P2PMessage {
14
+ if (!instrumentMessages) {
15
+ return new P2PMessage(message.toBuffer());
16
+ }
17
+ return new P2PMessage(message.toBuffer(), new Date(), traceContext);
14
18
  }
15
19
 
16
20
  static fromMessageData(messageData: Buffer, instrumentMessages = false): P2PMessage {
17
21
  const reader = new BufferReader(messageData);
18
- const timestamp = instrumentMessages ? new Date(Number(reader.readUInt64())) : undefined;
22
+ let timestamp: Date | undefined;
23
+ let traceContext: string | undefined;
24
+ if (instrumentMessages) {
25
+ timestamp = new Date(Number(reader.readUInt64()));
26
+ traceContext = reader.readString();
27
+ }
19
28
  const payload = reader.readBuffer();
20
- return new P2PMessage(payload, timestamp);
29
+ return new P2PMessage(payload, timestamp, traceContext);
21
30
  }
22
31
 
23
32
  toMessageData(): Buffer {
24
33
  const arr: Buffer[] = [];
25
34
  if (this.timestamp) {
26
35
  arr.push(bigintToUInt64BE(BigInt(this.timestamp.getTime())));
36
+ arr.push(serializeToBuffer(this.traceContext ?? ''));
27
37
  }
28
38
  arr.push(serializeToBuffer(this.payload.length, this.payload));
29
39
  return serializeToBuffer(arr);