@aztec/end-to-end 0.0.1-commit.f1df4d2 → 0.0.1-commit.f2ce05ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dest/bench/client_flows/config.d.ts +2 -2
  2. package/dest/bench/client_flows/config.d.ts.map +1 -1
  3. package/dest/bench/client_flows/config.js +18 -0
  4. package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts +1 -1
  5. package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts.map +1 -1
  6. package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.js +3 -2
  7. package/dest/e2e_epochs/epochs_test.d.ts +1 -1
  8. package/dest/e2e_epochs/epochs_test.d.ts.map +1 -1
  9. package/dest/e2e_epochs/epochs_test.js +2 -1
  10. package/dest/e2e_p2p/shared.d.ts +1 -1
  11. package/dest/e2e_p2p/shared.d.ts.map +1 -1
  12. package/dest/e2e_p2p/shared.js +3 -0
  13. package/dest/e2e_storage_proof/fixtures/storage_proof_fetcher.d.ts +2 -0
  14. package/dest/e2e_storage_proof/fixtures/storage_proof_fetcher.d.ts.map +1 -0
  15. package/dest/e2e_storage_proof/fixtures/storage_proof_fetcher.js +184 -0
  16. package/dest/e2e_storage_proof/fixtures/storage_proof_fixture.d.ts +18 -0
  17. package/dest/e2e_storage_proof/fixtures/storage_proof_fixture.d.ts.map +1 -0
  18. package/dest/e2e_storage_proof/fixtures/storage_proof_fixture.js +120 -0
  19. package/dest/fixtures/setup_p2p_test.d.ts +10 -5
  20. package/dest/fixtures/setup_p2p_test.d.ts.map +1 -1
  21. package/dest/fixtures/setup_p2p_test.js +6 -3
  22. package/dest/shared/uniswap_l1_l2.d.ts +1 -1
  23. package/dest/shared/uniswap_l1_l2.d.ts.map +1 -1
  24. package/dest/shared/uniswap_l1_l2.js +7 -5
  25. package/package.json +39 -39
  26. package/src/bench/client_flows/config.ts +9 -1
  27. package/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts +3 -4
  28. package/src/e2e_epochs/epochs_test.ts +1 -0
  29. package/src/e2e_p2p/shared.ts +1 -0
  30. package/src/e2e_storage_proof/fixtures/storage_proof.json +915 -0
  31. package/src/e2e_storage_proof/fixtures/storage_proof_fetcher.ts +190 -0
  32. package/src/e2e_storage_proof/fixtures/storage_proof_fixture.ts +173 -0
  33. package/src/fixtures/setup_p2p_test.ts +16 -7
  34. package/src/shared/uniswap_l1_l2.ts +7 -9
@@ -0,0 +1,190 @@
1
+ /**
2
+ * Fetches a ERC20 balance storage proof from the Ethereum mainnet and saves it to a Prover.toml compatible JSON.
3
+ * The JSON can be converted to toml for use with nargo, or used directly as a JSON file when used in
4
+ * Aztec contracts. This script is not using any Aztec library code, so it's easily portable.
5
+ */
6
+ import fs from 'fs';
7
+ import { dirname, join } from 'path';
8
+ import { fileURLToPath } from 'url';
9
+ import { createPublicClient, encodeAbiParameters, fromRlp, hexToBytes, http, keccak256 } from 'viem';
10
+ import { mainnet } from 'viem/chains';
11
+
12
+ const __dirname = dirname(fileURLToPath(import.meta.url));
13
+
14
+ const RPC_URL = process.env.RPC_URL;
15
+ const ERC20_CONTRACT = (process.env.ERC20_CONTRACT || '0xdAC17F958D2ee523a2206206994597C13D831ec7') as `0x${string}`;
16
+ const HOLDER = (process.env.HOLDER || '0x23878914EFE38d27C4D67Ab83ed1b93A74D4086a') as `0x${string}`;
17
+ const SLOT = BigInt(process.env.SLOT || '2');
18
+ const BLOCK_TAG = process.env.BLOCK_NUMBER ? BigInt(process.env.BLOCK_NUMBER) : 'latest';
19
+ const MAX_ACCOUNT_PATH = 15;
20
+ const MAX_STORAGE_PATH = 10;
21
+
22
+ function padTo(arr: number[], len: number) {
23
+ return [...arr, ...Array(len - arr.length).fill(0)].slice(0, len);
24
+ }
25
+
26
+ function toBytes(hex: `0x${string}`) {
27
+ return Array.from(hexToBytes(hex));
28
+ }
29
+
30
+ function bytesToU64s(bytes: number[]) {
31
+ const paddedBytes = padTo(bytes, 32);
32
+ return Array.from({ length: 4 }, (_, i) => {
33
+ let val = 0n;
34
+ for (let j = 0; j < 8; j++) {
35
+ val += BigInt(paddedBytes[i * 8 + j]) << BigInt(j * 8);
36
+ }
37
+ return val.toString();
38
+ });
39
+ }
40
+
41
+ function toBytesAndLen(val: bigint | number) {
42
+ if (val === 0n || val === 0) {
43
+ return { bytes: [0], length: 0 };
44
+ }
45
+ let hex = val.toString(16);
46
+ if (hex.length % 2) {
47
+ hex = '0' + hex;
48
+ }
49
+ const bytes = toBytes(`0x${hex}`);
50
+ return { bytes, length: bytes.length };
51
+ }
52
+
53
+ function parseNode(rlp: `0x${string}`) {
54
+ // Should be safe when working with branches and extensions without embedded children.
55
+ const decoded = fromRlp(rlp) as `0x${string}`[];
56
+ const node = {
57
+ rows: Array(16)
58
+ .fill(0)
59
+ .map(() => Array(32).fill(0)),
60
+ row_exist: Array(16).fill(false),
61
+ node_type: 0,
62
+ };
63
+
64
+ if (decoded.length === 17) {
65
+ for (let i = 0; i < 16; i++) {
66
+ if (decoded[i] !== '0x') {
67
+ node.row_exist[i] = true;
68
+ node.rows[i] = padTo(toBytes(decoded[i]), 32);
69
+ }
70
+ }
71
+ } else if (decoded.length === 2) {
72
+ const keyBytes = toBytes(decoded[0]);
73
+ const prefix = keyBytes[0];
74
+ if (prefix >> 4 >= 2) {
75
+ throw new Error('Unsupported: leaf node in proof path');
76
+ }
77
+ node.node_type = 1;
78
+ // Extension header format expected by the noir code: check out storage_proof types.nr.
79
+ node.rows[0][0] = prefix >> 4;
80
+ node.rows[0][8] = prefix & 0x0f;
81
+ node.rows[0][16] = keyBytes.length - 1;
82
+
83
+ for (let i = 1; i < keyBytes.length && i < 32; i++) {
84
+ node.rows[1][i - 1] = keyBytes[i];
85
+ }
86
+ node.rows[2] = padTo(toBytes(decoded[1]), 32);
87
+ node.row_exist[0] = node.row_exist[1] = node.row_exist[2] = true;
88
+ }
89
+ return node;
90
+ }
91
+
92
+ function parseProof(proof: `0x${string}`[], maxLen: number) {
93
+ const nodes = proof.slice(0, -1).slice(0, maxLen).map(parseNode);
94
+ while (nodes.length < maxLen) {
95
+ nodes.push({
96
+ rows: Array(16)
97
+ .fill(0)
98
+ .map(() => Array(32).fill(0)),
99
+ row_exist: Array(16).fill(false),
100
+ node_type: 0,
101
+ });
102
+ }
103
+ return nodes;
104
+ }
105
+
106
+ function nodeToLibFormat(node: { rows: number[][]; row_exist: boolean[]; node_type: number }) {
107
+ return {
108
+ rows: node.rows.map(bytesToU64s),
109
+ row_exist: node.row_exist,
110
+ node_type: String(node.node_type),
111
+ };
112
+ }
113
+
114
+ async function main() {
115
+ if (!RPC_URL) {
116
+ throw new Error('RPC_URL is not set');
117
+ }
118
+ const storageKey = keccak256(encodeAbiParameters([{ type: 'address' }, { type: 'uint256' }], [HOLDER, SLOT]));
119
+ console.log(`Fetching storage proof for ${ERC20_CONTRACT}, holder ${HOLDER}, slot ${SLOT}`);
120
+ console.log(`Storage key: ${storageKey}`);
121
+
122
+ const client = createPublicClient({
123
+ chain: mainnet,
124
+ transport: http(RPC_URL),
125
+ });
126
+
127
+ const [blockNumber, proof, block] = await Promise.all([
128
+ client.getBlockNumber(),
129
+ client.getProof({
130
+ address: ERC20_CONTRACT,
131
+ storageKeys: [storageKey],
132
+ blockNumber: BLOCK_TAG === 'latest' ? undefined : BLOCK_TAG,
133
+ }),
134
+ client.getBlock({
135
+ blockNumber: BLOCK_TAG === 'latest' ? undefined : BLOCK_TAG,
136
+ }),
137
+ ]);
138
+
139
+ const storageProof = proof.storageProof[0];
140
+ console.log(
141
+ `Block: ${blockNumber}, Account nodes: ${proof.accountProof.length}, Storage nodes: ${storageProof.proof.length}`,
142
+ );
143
+ console.log(`Value: ${storageProof.value}`);
144
+
145
+ // The -1 is because the last node in the proof is the leaf, which is excluded from path verification.
146
+ const accountPathLen = proof.accountProof.length - 1;
147
+ const storagePathLen = storageProof.proof.length - 1;
148
+ if (accountPathLen > MAX_ACCOUNT_PATH) {
149
+ throw new Error(
150
+ `Account proof path length ${accountPathLen} exceeds MAX_ACCOUNT_PATH ${MAX_ACCOUNT_PATH}. Increase the limit.`,
151
+ );
152
+ }
153
+ if (storagePathLen > MAX_STORAGE_PATH) {
154
+ throw new Error(
155
+ `Storage proof path length ${storagePathLen} exceeds MAX_STORAGE_PATH ${MAX_STORAGE_PATH}. Increase the limit.`,
156
+ );
157
+ }
158
+
159
+ const nonce = toBytesAndLen(proof.nonce);
160
+ const balance = toBytesAndLen(proof.balance);
161
+ const slotValue = toBytesAndLen(storageProof.value);
162
+
163
+ const data = {
164
+ account_nodes: parseProof(proof.accountProof, MAX_ACCOUNT_PATH).map(nodeToLibFormat),
165
+ account_node_length: String(accountPathLen),
166
+ storage_nodes: parseProof(storageProof.proof, MAX_STORAGE_PATH).map(nodeToLibFormat),
167
+ storage_node_length: String(storagePathLen),
168
+ account: {
169
+ nonce: padTo(nonce.bytes, 8).map(String),
170
+ nonce_length: String(nonce.length),
171
+ balance: padTo(balance.bytes, 32).map(String),
172
+ balance_length: String(balance.length),
173
+ address: toBytes(ERC20_CONTRACT).map(String),
174
+ storage_hash: bytesToU64s(toBytes(proof.storageHash)),
175
+ code_hash: bytesToU64s(toBytes(proof.codeHash)),
176
+ },
177
+ slot: {
178
+ value: padTo(slotValue.bytes, 32).map(String),
179
+ value_length: String(slotValue.length),
180
+ },
181
+ slot_key: toBytes(storageKey).map(String),
182
+ root: bytesToU64s(toBytes(block.stateRoot)),
183
+ block_number: String(blockNumber),
184
+ };
185
+
186
+ fs.writeFileSync(join(__dirname, 'storage_proof.json'), JSON.stringify(data, null, 2));
187
+ console.log('storage_proof.json generated');
188
+ }
189
+
190
+ main().catch(console.error);
@@ -0,0 +1,173 @@
1
+ import type { AztecAddress } from '@aztec/aztec.js/addresses';
2
+ import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
4
+ import { EthAddress } from '@aztec/foundation/eth-address';
5
+ import { Capsule } from '@aztec/stdlib/tx';
6
+
7
+ import { readFileSync } from 'fs';
8
+ import { dirname, join } from 'path';
9
+ import { fileURLToPath } from 'url';
10
+
11
+ const __dirname = dirname(fileURLToPath(import.meta.url));
12
+ const FIXTURE_PATH = join(__dirname, './storage_proof.json');
13
+
14
+ // Constants matching the Noir contract
15
+ const ACCOUNT_CAPSULE_KEY_SEPARATOR = 100;
16
+ const ACCOUNT_PROOF_CAPSULE_KEY_SEPARATOR = 101;
17
+ const STORAGE_PROOF_CAPSULE_KEY_SEPARATOR = 102;
18
+ const STORAGE_PROOF_NODE_CAPSULE_KEY_SEPARATOR = 103;
19
+ const MAX_ACCOUNT_PROOF_LENGTH = 15;
20
+ /** Node: rows [[u64;4];16] (64) + row_exist [bool;16] (16) + node_type u8 (1) = 81 fields */
21
+ const NODE_FIELD_COUNT = 81;
22
+
23
+ // --- JSON fixture types ---
24
+
25
+ type JsonNode = { rows: string[][]; row_exist: boolean[]; node_type: string };
26
+
27
+ type JsonAccount = {
28
+ nonce: string[];
29
+ balance: string[];
30
+ address: string[];
31
+ nonce_length: string;
32
+ balance_length: string;
33
+ storage_hash: string[];
34
+ code_hash: string[];
35
+ };
36
+
37
+ type StorageProofJSON = {
38
+ root: string[];
39
+ slot_key: string[];
40
+ account_node_length: string;
41
+ storage_node_length: string;
42
+ account_nodes: JsonNode[];
43
+ storage_nodes: JsonNode[];
44
+ account: JsonAccount;
45
+ slot: { value: string[]; value_length: string };
46
+ };
47
+
48
+ // --- Serialization helpers (Noir struct Serialize layout) ---
49
+
50
+ function serializeNode(node: JsonNode): Fr[] {
51
+ const fields: Fr[] = [];
52
+ for (const row of node.rows) {
53
+ for (const val of row) {
54
+ fields.push(new Fr(BigInt(val)));
55
+ }
56
+ }
57
+ for (const exists of node.row_exist) {
58
+ fields.push(new Fr(exists ? 1n : 0n));
59
+ }
60
+ fields.push(new Fr(BigInt(node.node_type)));
61
+ return fields;
62
+ }
63
+
64
+ /** Account: nonce [u8;8] + balance [u8;32] + address [u8;20] + nonce_length u8 + balance_length u8 + storage_hash [u64;4] + code_hash [u64;4] = 70 fields */
65
+ function serializeAccount(account: JsonAccount): Fr[] {
66
+ const fields: Fr[] = [];
67
+ for (const v of account.nonce) {
68
+ fields.push(new Fr(BigInt(v)));
69
+ }
70
+ for (const v of account.balance) {
71
+ fields.push(new Fr(BigInt(v)));
72
+ }
73
+ for (const v of account.address) {
74
+ fields.push(new Fr(BigInt(v)));
75
+ }
76
+ fields.push(new Fr(BigInt(account.nonce_length)));
77
+ fields.push(new Fr(BigInt(account.balance_length)));
78
+ for (const v of account.storage_hash) {
79
+ fields.push(new Fr(BigInt(v)));
80
+ }
81
+ for (const v of account.code_hash) {
82
+ fields.push(new Fr(BigInt(v)));
83
+ }
84
+ return fields;
85
+ }
86
+
87
+ function zeroNode(): Fr[] {
88
+ return Array(NODE_FIELD_COUNT).fill(Fr.ZERO);
89
+ }
90
+
91
+ // --- Public API ---
92
+
93
+ /** Parsed + typed fixture data ready for use as contract function arguments. */
94
+ export type StorageProofArgs = {
95
+ ethAddress: EthAddress;
96
+ slotKey: number[];
97
+ slotContents: { value: number[]; value_length: number };
98
+ root: bigint[];
99
+ };
100
+
101
+ /** Loads the storage proof fixture from disk and returns the contract args. */
102
+ export function loadStorageProofArgs(): StorageProofArgs {
103
+ const fixture: StorageProofJSON = JSON.parse(readFileSync(FIXTURE_PATH, 'utf8'));
104
+ const addressBytes = Buffer.from(fixture.account.address.map(v => Number(v)));
105
+ return {
106
+ ethAddress: EthAddress.fromString('0x' + addressBytes.toString('hex')),
107
+ slotKey: fixture.slot_key.map(v => Number(v)),
108
+ slotContents: {
109
+ value: fixture.slot.value.map(v => Number(v)),
110
+ // eslint-disable-next-line camelcase
111
+ value_length: Number(fixture.slot.value_length),
112
+ },
113
+ root: fixture.root.map(v => BigInt(v)),
114
+ };
115
+ }
116
+
117
+ /** Builds all the capsules the StorageProofTest contract expects during private execution. */
118
+ export async function buildStorageProofCapsules(contractAddress: AztecAddress): Promise<Capsule[]> {
119
+ const fixture: StorageProofJSON = JSON.parse(readFileSync(FIXTURE_PATH, 'utf8'));
120
+
121
+ const root = fixture.root.map(v => BigInt(v));
122
+ const slotKey = fixture.slot_key.map(v => Number(v));
123
+ const accountNodeLength = Number(fixture.account_node_length);
124
+ const storageNodeLength = Number(fixture.storage_node_length);
125
+ const ethAddress = EthAddress.fromBuffer(Buffer.from(fixture.account.address.map(v => Number(v))));
126
+
127
+ // Compute capsule keys (must match the Noir contract's poseidon2_hash computations)
128
+ const addressCapsuleKey = await poseidon2Hash([
129
+ new Fr(ACCOUNT_CAPSULE_KEY_SEPARATOR),
130
+ ...root.map(v => new Fr(v)),
131
+ ethAddress.toField(),
132
+ ]);
133
+
134
+ const accountProofCapsuleKey = await poseidon2Hash([new Fr(ACCOUNT_PROOF_CAPSULE_KEY_SEPARATOR), addressCapsuleKey]);
135
+
136
+ const storageProofCapsuleKey = await poseidon2Hash([
137
+ new Fr(STORAGE_PROOF_CAPSULE_KEY_SEPARATOR),
138
+ addressCapsuleKey,
139
+ ...slotKey.map(v => new Fr(v)),
140
+ ]);
141
+
142
+ // Build capsule data
143
+
144
+ // 1. Account data
145
+ const accountData = serializeAccount(fixture.account);
146
+
147
+ // 2. Account proof nodes padded to MAX_ACCOUNT_PROOF_LENGTH
148
+ const accountProofData: Fr[] = [new Fr(accountNodeLength)];
149
+ for (let i = 0; i < MAX_ACCOUNT_PROOF_LENGTH; i++) {
150
+ accountProofData.push(...(i < fixture.account_nodes.length ? serializeNode(fixture.account_nodes[i]) : zeroNode()));
151
+ }
152
+
153
+ // 3. Storage proof length (u32)
154
+ const storageProofLengthData = [new Fr(storageNodeLength)];
155
+
156
+ const capsules: Capsule[] = [
157
+ new Capsule(contractAddress, addressCapsuleKey, accountData),
158
+ new Capsule(contractAddress, accountProofCapsuleKey, accountProofData),
159
+ new Capsule(contractAddress, storageProofCapsuleKey, storageProofLengthData),
160
+ ];
161
+
162
+ // 4. Individual storage node capsules for private recursion.
163
+ for (let i = 0; i < storageNodeLength; i++) {
164
+ const nodeCapsuleKey = await poseidon2Hash([
165
+ new Fr(STORAGE_PROOF_NODE_CAPSULE_KEY_SEPARATOR),
166
+ storageProofCapsuleKey,
167
+ new Fr(i),
168
+ ]);
169
+ capsules.push(new Capsule(contractAddress, nodeCapsuleKey, serializeNode(fixture.storage_nodes[i])));
170
+ }
171
+
172
+ return capsules;
173
+ }
@@ -83,9 +83,17 @@ export async function createNodes(
83
83
  return nodes;
84
84
  }
85
85
 
86
- /** Creates a P2P enabled instance of Aztec Node Service with a validator */
86
+ /** Extended config type for createNode with test-specific overrides. */
87
+ export type CreateNodeConfig = AztecNodeConfig & {
88
+ /** Whether to skip starting the sequencer. */
89
+ dontStartSequencer?: boolean;
90
+ /** Override the private key (instead of deriving from addressIndex). */
91
+ validatorPrivateKey?: `0x${string}`;
92
+ };
93
+
94
+ /** Creates a P2P enabled instance of Aztec Node Service with a validator. */
87
95
  export async function createNode(
88
- config: AztecNodeConfig & { dontStartSequencer?: boolean },
96
+ config: CreateNodeConfig,
89
97
  dateProvider: DateProvider,
90
98
  tcpPort: number,
91
99
  bootstrapNode: string | undefined,
@@ -187,20 +195,21 @@ export async function createP2PConfig(
187
195
  }
188
196
 
189
197
  export async function createValidatorConfig(
190
- config: AztecNodeConfig,
198
+ config: CreateNodeConfig,
191
199
  bootstrapNodeEnr?: string,
192
200
  port?: number,
193
201
  addressIndex: number | number[] = 1,
194
202
  dataDirectory?: string,
195
203
  ) {
196
204
  const addressIndices = Array.isArray(addressIndex) ? addressIndex : [addressIndex];
197
- if (addressIndices.length === 0) {
205
+ if (addressIndices.length === 0 && !config.validatorPrivateKey) {
198
206
  throw new Error('At least one address index must be provided to create a validator config');
199
207
  }
200
208
 
201
- const attesterPrivateKeys = addressIndices.map(index =>
202
- bufferToHex(getPrivateKeyFromIndex(ATTESTER_PRIVATE_KEYS_START_INDEX + index)!),
203
- );
209
+ // Use override private key if provided, otherwise derive from address indices
210
+ const attesterPrivateKeys = config.validatorPrivateKey
211
+ ? [config.validatorPrivateKey]
212
+ : addressIndices.map(index => bufferToHex(getPrivateKeyFromIndex(ATTESTER_PRIVATE_KEYS_START_INDEX + index)!));
204
213
  const p2pConfig = await createP2PConfig(config, bootstrapNodeEnr, port, dataDirectory);
205
214
  const nodeConfig: AztecNodeConfig = {
206
215
  ...config,
@@ -11,7 +11,7 @@ import type { DeployAztecL1ContractsReturnType } from '@aztec/ethereum/deploy-az
11
11
  import { deployL1Contract } from '@aztec/ethereum/deploy-l1-contract';
12
12
  import type { ExtendedViemWalletClient } from '@aztec/ethereum/types';
13
13
  import { extractEvent } from '@aztec/ethereum/utils';
14
- import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
14
+ import { EpochNumber } from '@aztec/foundation/branded-types';
15
15
  import { sha256ToField } from '@aztec/foundation/crypto/sha256';
16
16
  import { InboxAbi, UniswapPortalAbi, UniswapPortalBytecode } from '@aztec/l1-artifacts';
17
17
  import { UniswapContract } from '@aztec/noir-contracts.js/Uniswap';
@@ -250,8 +250,8 @@ export const uniswapL1L2TestSuite = (
250
250
  await wethCrossChainHarness.expectPublicBalanceOnL2(uniswapL2Contract.address, 0n);
251
251
 
252
252
  // Since the outbox is only consumable when the epoch is proven, we need to advance to the next epoch.
253
- const checkpointNumber = CheckpointNumber.fromBlockNumber(l2UniswapInteractionReceipt.blockNumber!);
254
- const epoch = await rollup.getEpochNumberForCheckpoint(checkpointNumber);
253
+ const block = await aztecNode.getBlock(l2UniswapInteractionReceipt.blockNumber!);
254
+ const epoch = await rollup.getEpochNumberForCheckpoint(block!.checkpointNumber);
255
255
  await cheatCodes.rollup.advanceToEpoch(EpochNumber(epoch + 1));
256
256
  await waitForProven(aztecNode, l2UniswapInteractionReceipt, { provenTimeout: 300 });
257
257
 
@@ -838,9 +838,8 @@ export const uniswapL1L2TestSuite = (
838
838
  chainId: new Fr(l1Client.chain.id),
839
839
  });
840
840
 
841
- const epoch = await rollup.getEpochNumberForCheckpoint(
842
- CheckpointNumber.fromBlockNumber(withdrawReceipt.blockNumber!),
843
- );
841
+ const block = await aztecNode.getBlock(withdrawReceipt.blockNumber!);
842
+ const epoch = await rollup.getEpochNumberForCheckpoint(block!.checkpointNumber);
844
843
  const swapResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, swapPrivateLeaf);
845
844
  const withdrawResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, withdrawLeaf);
846
845
 
@@ -972,9 +971,8 @@ export const uniswapL1L2TestSuite = (
972
971
  chainId: new Fr(l1Client.chain.id),
973
972
  });
974
973
 
975
- const epoch = await rollup.getEpochNumberForCheckpoint(
976
- CheckpointNumber.fromBlockNumber(withdrawReceipt.blockNumber!),
977
- );
974
+ const block = await aztecNode.getBlock(withdrawReceipt.blockNumber!);
975
+ const epoch = await rollup.getEpochNumberForCheckpoint(block!.checkpointNumber);
978
976
  const swapResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, swapPublicLeaf);
979
977
  const withdrawResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, withdrawLeaf);
980
978