@aztec/simulator 0.0.1-commit.c80b6263 → 0.0.1-commit.cd76b27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +4 -4
  2. package/dest/public/avm/avm_gas.js +3 -3
  3. package/dest/public/avm/fixtures/account_proof_fetcher.d.ts +2 -0
  4. package/dest/public/avm/fixtures/account_proof_fetcher.d.ts.map +1 -0
  5. package/dest/public/avm/fixtures/account_proof_fetcher.js +152 -0
  6. package/dest/public/avm/opcodes/accrued_substate.d.ts +2 -2
  7. package/dest/public/avm/opcodes/accrued_substate.d.ts.map +1 -1
  8. package/dest/public/avm/opcodes/accrued_substate.js +3 -4
  9. package/dest/public/avm/serialization/bytecode_serialization.js +3 -3
  10. package/dest/public/avm/serialization/instruction_serialization.d.ts +2 -2
  11. package/dest/public/avm/serialization/instruction_serialization.d.ts.map +1 -1
  12. package/dest/public/avm/serialization/instruction_serialization.js +1 -1
  13. package/dest/public/fixtures/amm_test.js +2 -2
  14. package/dest/public/fixtures/opcode_spammer.d.ts +1 -1
  15. package/dest/public/fixtures/opcode_spammer.d.ts.map +1 -1
  16. package/dest/public/fixtures/opcode_spammer.js +6 -6
  17. package/dest/public/fixtures/utils.js +4 -4
  18. package/dest/public/hinting_db_sources.d.ts +2 -2
  19. package/dest/public/hinting_db_sources.d.ts.map +1 -1
  20. package/dest/public/hinting_db_sources.js +1 -1
  21. package/dest/public/public_processor/guarded_merkle_tree.d.ts +2 -2
  22. package/dest/public/public_processor/guarded_merkle_tree.d.ts.map +1 -1
  23. package/dest/public/public_processor/guarded_merkle_tree.js +1 -1
  24. package/package.json +16 -16
  25. package/src/public/avm/avm_gas.ts +2 -2
  26. package/src/public/avm/fixtures/account_proof.json +553 -0
  27. package/src/public/avm/fixtures/account_proof_fetcher.ts +166 -0
  28. package/src/public/avm/opcodes/accrued_substate.ts +3 -4
  29. package/src/public/avm/serialization/bytecode_serialization.ts +2 -2
  30. package/src/public/avm/serialization/instruction_serialization.ts +1 -1
  31. package/src/public/fixtures/amm_test.ts +2 -2
  32. package/src/public/fixtures/opcode_spammer.ts +6 -10
  33. package/src/public/fixtures/utils.ts +4 -4
  34. package/src/public/fuzzing/avm_fuzzer_simulator.ts +1 -1
  35. package/src/public/hinting_db_sources.ts +1 -1
  36. package/src/public/public_processor/guarded_merkle_tree.ts +1 -1
@@ -0,0 +1,166 @@
1
+ /**
2
+ * Fetches an account proof from the Ethereum mainnet and saves it as account_proof.json.
3
+ * This script is not using any Aztec library code, so it's easily portable.
4
+ */
5
+ import fs from 'fs';
6
+ import { dirname, join } from 'path';
7
+ import { fileURLToPath } from 'url';
8
+ import { createPublicClient, fromRlp, hexToBytes, http } from 'viem';
9
+ import { mainnet } from 'viem/chains';
10
+
11
+ const __dirname = dirname(fileURLToPath(import.meta.url));
12
+
13
+ const RPC_URL = process.env.RPC_URL;
14
+ const ADDRESS = (process.env.ADDRESS || '0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045') as `0x${string}`;
15
+ const BLOCK_TAG = process.env.BLOCK_NUMBER ? BigInt(process.env.BLOCK_NUMBER) : 'latest';
16
+ const MAX_ACCOUNT_PATH = 15;
17
+
18
+ function padTo(arr: number[], len: number) {
19
+ return [...arr, ...Array(len - arr.length).fill(0)].slice(0, len);
20
+ }
21
+
22
+ function toBytes(hex: `0x${string}`) {
23
+ return Array.from(hexToBytes(hex));
24
+ }
25
+
26
+ function bytesToU64s(bytes: number[]) {
27
+ const paddedBytes = padTo(bytes, 32);
28
+ return Array.from({ length: 4 }, (_, i) => {
29
+ let val = 0n;
30
+ for (let j = 0; j < 8; j++) {
31
+ val += BigInt(paddedBytes[i * 8 + j]) << BigInt(j * 8);
32
+ }
33
+ return val.toString();
34
+ });
35
+ }
36
+
37
+ function toBytesAndLen(val: bigint | number) {
38
+ if (val === 0n || val === 0) {
39
+ return { bytes: [0], length: 0 };
40
+ }
41
+ let hex = val.toString(16);
42
+ if (hex.length % 2) {
43
+ hex = '0' + hex;
44
+ }
45
+ const bytes = toBytes(`0x${hex}`);
46
+ return { bytes, length: bytes.length };
47
+ }
48
+
49
+ function parseNode(rlp: `0x${string}`) {
50
+ // Should be safe when working with branches and extensions without embedded children.
51
+ const decoded = fromRlp(rlp) as `0x${string}`[];
52
+ const node = {
53
+ rows: Array(16)
54
+ .fill(0)
55
+ .map(() => Array(32).fill(0)),
56
+ row_exist: Array(16).fill(false),
57
+ node_type: 0,
58
+ };
59
+
60
+ if (decoded.length === 17) {
61
+ for (let i = 0; i < 16; i++) {
62
+ if (decoded[i] !== '0x') {
63
+ node.row_exist[i] = true;
64
+ node.rows[i] = padTo(toBytes(decoded[i]), 32);
65
+ }
66
+ }
67
+ } else if (decoded.length === 2) {
68
+ const keyBytes = toBytes(decoded[0]);
69
+ const prefix = keyBytes[0];
70
+ if (prefix >> 4 >= 2) {
71
+ throw new Error('Unsupported: leaf node in proof path');
72
+ }
73
+ node.node_type = 1;
74
+ // Extension header format expected by the noir code: check out storage_proof types.nr.
75
+ node.rows[0][0] = prefix >> 4;
76
+ node.rows[0][8] = prefix & 0x0f;
77
+ node.rows[0][16] = keyBytes.length - 1;
78
+
79
+ for (let i = 1; i < keyBytes.length && i < 32; i++) {
80
+ node.rows[1][i - 1] = keyBytes[i];
81
+ }
82
+ node.rows[2] = padTo(toBytes(decoded[1]), 32);
83
+ node.row_exist[0] = node.row_exist[1] = node.row_exist[2] = true;
84
+ }
85
+ return node;
86
+ }
87
+
88
+ function parseProof(proof: `0x${string}`[], maxLen: number) {
89
+ const nodes = proof.slice(0, -1).slice(0, maxLen).map(parseNode);
90
+ while (nodes.length < maxLen) {
91
+ nodes.push({
92
+ rows: Array(16)
93
+ .fill(0)
94
+ .map(() => Array(32).fill(0)),
95
+ row_exist: Array(16).fill(false),
96
+ node_type: 0,
97
+ });
98
+ }
99
+ return nodes;
100
+ }
101
+
102
+ function nodeToLibFormat(node: { rows: number[][]; row_exist: boolean[]; node_type: number }) {
103
+ return {
104
+ rows: node.rows.map(bytesToU64s),
105
+ row_exist: node.row_exist,
106
+ node_type: String(node.node_type),
107
+ };
108
+ }
109
+
110
+ async function main() {
111
+ if (!RPC_URL) {
112
+ throw new Error('RPC_URL is not set');
113
+ }
114
+ console.log(`Fetching account proof for ${ADDRESS}`);
115
+
116
+ const client = createPublicClient({
117
+ chain: mainnet,
118
+ transport: http(RPC_URL),
119
+ });
120
+
121
+ const [blockNumber, proof, block] = await Promise.all([
122
+ client.getBlockNumber(),
123
+ client.getProof({
124
+ address: ADDRESS,
125
+ storageKeys: [],
126
+ blockNumber: BLOCK_TAG === 'latest' ? undefined : BLOCK_TAG,
127
+ }),
128
+ client.getBlock({
129
+ blockNumber: BLOCK_TAG === 'latest' ? undefined : BLOCK_TAG,
130
+ }),
131
+ ]);
132
+
133
+ console.log(`Block: ${blockNumber}, Account nodes: ${proof.accountProof.length}`);
134
+
135
+ // The -1 is because the last node in the proof is the leaf, which is excluded from path verification.
136
+ const accountPathLen = proof.accountProof.length - 1;
137
+ if (accountPathLen > MAX_ACCOUNT_PATH) {
138
+ throw new Error(
139
+ `Account proof path length ${accountPathLen} exceeds MAX_ACCOUNT_PATH ${MAX_ACCOUNT_PATH}. Increase the limit.`,
140
+ );
141
+ }
142
+
143
+ const nonce = toBytesAndLen(proof.nonce);
144
+ const balance = toBytesAndLen(proof.balance);
145
+
146
+ const data = {
147
+ block_number: String(blockNumber),
148
+ node_length: String(accountPathLen),
149
+ root: bytesToU64s(toBytes(block.stateRoot)),
150
+ nodes: parseProof(proof.accountProof, MAX_ACCOUNT_PATH).map(nodeToLibFormat),
151
+ account: {
152
+ address: toBytes(ADDRESS).map(String),
153
+ balance: padTo(balance.bytes, 32).map(String),
154
+ balance_length: String(balance.length),
155
+ code_hash: bytesToU64s(toBytes(proof.codeHash)),
156
+ nonce: padTo(nonce.bytes, 8).map(String),
157
+ nonce_length: String(nonce.length),
158
+ storage_hash: bytesToU64s(toBytes(proof.storageHash)),
159
+ },
160
+ };
161
+
162
+ fs.writeFileSync(join(__dirname, 'account_proof.json'), JSON.stringify(data, null, 2));
163
+ console.log('account_proof.json generated');
164
+ }
165
+
166
+ main().catch(console.error);
@@ -204,10 +204,9 @@ export class L1ToL2MessageExists extends Instruction {
204
204
  }
205
205
  }
206
206
 
207
- export class EmitUnencryptedLog extends Instruction {
208
- // TODO(#11124): rename unencrypted -> public
209
- static type: string = 'EMITUNENCRYPTEDLOG';
210
- static readonly opcode: Opcode = Opcode.EMITUNENCRYPTEDLOG;
207
+ export class EmitPublicLog extends Instruction {
208
+ static type: string = 'EMITPUBLICLOG';
209
+ static readonly opcode: Opcode = Opcode.EMITPUBLICLOG;
211
210
  // Informs (de)serialization. See Instruction.deserialize.
212
211
  static readonly wireFormat = [OperandType.UINT8, OperandType.UINT8, OperandType.UINT16, OperandType.UINT16];
213
212
 
@@ -19,7 +19,7 @@ import {
19
19
  EcAdd,
20
20
  EmitNoteHash,
21
21
  EmitNullifier,
22
- EmitUnencryptedLog,
22
+ EmitPublicLog,
23
23
  Eq,
24
24
  FieldDiv,
25
25
  GetContractInstance,
@@ -129,7 +129,7 @@ export const INSTRUCTION_SET = new Map<Opcode, InstructionDeserializer>([
129
129
  [L1ToL2MessageExists.opcode, Instruction.fromBuffer.bind(L1ToL2MessageExists)], // Messages
130
130
 
131
131
  // Accrued Substate
132
- [EmitUnencryptedLog.opcode, Instruction.fromBuffer.bind(EmitUnencryptedLog)],
132
+ [EmitPublicLog.opcode, Instruction.fromBuffer.bind(EmitPublicLog)],
133
133
  [SendL2ToL1Message.opcode, Instruction.fromBuffer.bind(SendL2ToL1Message)],
134
134
  [GetContractInstance.opcode, Instruction.fromBuffer.bind(GetContractInstance)],
135
135
 
@@ -70,7 +70,7 @@ export enum Opcode {
70
70
  EMITNULLIFIER,
71
71
  L1TOL2MSGEXISTS,
72
72
  GETCONTRACTINSTANCE,
73
- EMITUNENCRYPTEDLOG,
73
+ EMITPUBLICLOG,
74
74
  SENDL2TOL1MSG,
75
75
  // External calls
76
76
  CALL,
@@ -1,4 +1,4 @@
1
- import { GeneratorIndex } from '@aztec/constants';
1
+ import { DomainSeparator } from '@aztec/constants';
2
2
  import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto/poseidon';
3
3
  import { Fr } from '@aztec/foundation/curves/bn254';
4
4
  import type { Logger } from '@aztec/foundation/log';
@@ -326,6 +326,6 @@ async function removeLiquidity(
326
326
  async function computePartialNoteValidityCommitment(partialNote: { commitment: Fr }, completer: AztecAddress) {
327
327
  return await poseidon2HashWithSeparator(
328
328
  [partialNote.commitment, completer],
329
- GeneratorIndex.PARTIAL_NOTE_VALIDITY_COMMITMENT,
329
+ DomainSeparator.PARTIAL_NOTE_VALIDITY_COMMITMENT,
330
330
  );
331
331
  }
@@ -143,7 +143,7 @@
143
143
  * - `EMITNOTEHASH`: max 64 per TX
144
144
  * - `EMITNULLIFIER`: max 63 per TX (one reserved for TX nullifier)
145
145
  * - `SENDL2TOL1MSG`: max 8 per TX
146
- * - `EMITUNENCRYPTEDLOG`: limited by total log payload size
146
+ * - `EMITPUBLICLOG`: limited by total log payload size
147
147
  *
148
148
  * By having the inner contract REVERT after emitting side effects, those effects are discarded, allowing the outer contract to call it again. This enables thousands of opcode executions per TX instead of just the limit.
149
149
  *
@@ -182,7 +182,7 @@ import {
182
182
  EcAdd,
183
183
  EmitNoteHash,
184
184
  EmitNullifier,
185
- EmitUnencryptedLog,
185
+ EmitPublicLog,
186
186
  Eq,
187
187
  FieldDiv,
188
188
  GetContractInstance,
@@ -1242,17 +1242,15 @@ export const SPAM_CONFIGS: Partial<Record<Opcode, SpamConfig[]>> = {
1242
1242
  },
1243
1243
  ],
1244
1244
 
1245
- // EMITUNENCRYPTEDLOG - two configs: minimal (many small logs) and max-size (one large log)
1246
- [Opcode.EMITUNENCRYPTEDLOG]: [
1245
+ // EMITPUBLICLOG - two configs: minimal (many small logs) and max-size (one large log)
1246
+ [Opcode.EMITPUBLICLOG]: [
1247
1247
  {
1248
1248
  label: 'Many empty logs, revert, repeat',
1249
1249
  setup: [
1250
1250
  { offset: 0, value: new Uint32(0n) }, // logSize = 0 fields (minimal)
1251
1251
  { offset: 1, value: new Uint32(0n) }, // revertSize
1252
1252
  ],
1253
- targetInstructions: () => [
1254
- new EmitUnencryptedLog(/*addressing_mode=*/ 0, /*logSizeOffset=*/ 0, /*logOffset=*/ 1),
1255
- ], // logOffset doesn't matter when size is 0
1253
+ targetInstructions: () => [new EmitPublicLog(/*addressing_mode=*/ 0, /*logSizeOffset=*/ 0, /*logOffset=*/ 1)], // logOffset doesn't matter when size is 0
1256
1254
  cleanupInstructions: () => [
1257
1255
  new Revert(/*addressing_mode=*/ 0, /*retSizeOffset=*/ 1, /*returnOffset=*/ 0).as(
1258
1256
  Opcode.REVERT_8,
@@ -1276,9 +1274,7 @@ export const SPAM_CONFIGS: Partial<Record<Opcode, SpamConfig[]>> = {
1276
1274
  // value: new Field(0n),
1277
1275
  //})),
1278
1276
  ],
1279
- targetInstructions: () => [
1280
- new EmitUnencryptedLog(/*addressing_mode=*/ 0, /*logSizeOffset=*/ 0, /*logOffset=*/ 2),
1281
- ], // uses logOffset 2 (uninitialized Field(0))
1277
+ targetInstructions: () => [new EmitPublicLog(/*addressing_mode=*/ 0, /*logSizeOffset=*/ 0, /*logOffset=*/ 2)], // uses logOffset 2 (uninitialized Field(0))
1282
1278
  cleanupInstructions: () => [
1283
1279
  new Revert(/*addressing_mode=*/ 0, /*retSizeOffset=*/ 1, /*returnOffset=*/ 0).as(
1284
1280
  Opcode.REVERT_8,
@@ -134,13 +134,13 @@ export async function createTxForPublicCalls(
134
134
  const txContext = new TxContext(Fr.zero(), Fr.zero(), gasSettings);
135
135
  const header = BlockHeader.empty({ globalVariables: globals });
136
136
  const constantData = new TxConstantData(header, txContext, Fr.zero(), Fr.zero());
137
- const includeByTimestamp = 0n; // Not used in the simulator.
137
+ const expirationTimestamp = 0n; // Not used in the simulator.
138
138
 
139
139
  const txData = new PrivateKernelTailCircuitPublicInputs(
140
140
  constantData,
141
141
  /*gasUsed=*/ gasUsedByPrivate,
142
142
  feePayer,
143
- includeByTimestamp,
143
+ expirationTimestamp,
144
144
  forPublic,
145
145
  );
146
146
 
@@ -171,13 +171,13 @@ export async function createTxForPrivateOnly(
171
171
  const gasSettings = new GasSettings(gasLimits, Gas.empty(), maxFeesPerGas, GasFees.empty());
172
172
  const txContext = new TxContext(Fr.zero(), Fr.zero(), gasSettings);
173
173
  const constantData = new TxConstantData(BlockHeader.empty(), txContext, Fr.zero(), Fr.zero());
174
- const includeByTimestamp = 0n; // Not used in the simulator.
174
+ const expirationTimestamp = 0n; // Not used in the simulator.
175
175
 
176
176
  const txData = new PrivateKernelTailCircuitPublicInputs(
177
177
  constantData,
178
178
  /*gasUsed=*/ gasUsedByPrivate,
179
179
  feePayer,
180
- includeByTimestamp,
180
+ expirationTimestamp,
181
181
  /*forPublic=*/ undefined,
182
182
  forRollup,
183
183
  );
@@ -146,7 +146,7 @@ async function createTxFromHint(cppTx: AvmTxHint): Promise<Tx> {
146
146
  constants,
147
147
  cppTx.gasUsedByPrivate,
148
148
  cppTx.feePayer,
149
- 0n, // includeByTimestamp
149
+ 0n, // expirationTimestamp
150
150
  forPublic,
151
151
  undefined, // forRollup - not needed for public simulation
152
152
  );
@@ -572,7 +572,7 @@ export class HintingMerkleWriteOperations implements MerkleTreeWriteOperations {
572
572
  return await this.db.close();
573
573
  }
574
574
 
575
- async [Symbol.dispose](): Promise<void> {
575
+ async [Symbol.asyncDispose](): Promise<void> {
576
576
  await this.close();
577
577
  }
578
578
 
@@ -82,7 +82,7 @@ export class GuardedMerkleTreeOperations implements MerkleTreeWriteOperations {
82
82
  return this.guardAndPush(() => this.target.close());
83
83
  }
84
84
 
85
- async [Symbol.dispose](): Promise<void> {
85
+ async [Symbol.asyncDispose](): Promise<void> {
86
86
  await this.close();
87
87
  }
88
88
  getTreeInfo(treeId: MerkleTreeId): Promise<TreeInfo> {