@aztec/aztec 0.0.1-commit.d431d1c → 0.0.1-commit.dbf9cec

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/dest/bin/index.js +5 -1
  2. package/dest/cli/admin_api_key_store.d.ts +45 -0
  3. package/dest/cli/admin_api_key_store.d.ts.map +1 -0
  4. package/dest/cli/admin_api_key_store.js +98 -0
  5. package/dest/cli/aztec_start_action.d.ts +1 -1
  6. package/dest/cli/aztec_start_action.d.ts.map +1 -1
  7. package/dest/cli/aztec_start_action.js +46 -10
  8. package/dest/cli/aztec_start_options.d.ts +1 -1
  9. package/dest/cli/aztec_start_options.d.ts.map +1 -1
  10. package/dest/cli/aztec_start_options.js +27 -8
  11. package/dest/cli/cli.d.ts +1 -1
  12. package/dest/cli/cli.d.ts.map +1 -1
  13. package/dest/cli/cli.js +0 -1
  14. package/dest/cli/cmds/compile.d.ts +4 -0
  15. package/dest/cli/cmds/compile.d.ts.map +1 -0
  16. package/dest/cli/cmds/compile.js +160 -0
  17. package/dest/cli/cmds/profile.d.ts +4 -0
  18. package/dest/cli/cmds/profile.d.ts.map +1 -0
  19. package/dest/cli/cmds/profile.js +8 -0
  20. package/dest/cli/cmds/profile_flamegraph.d.ts +4 -0
  21. package/dest/cli/cmds/profile_flamegraph.d.ts.map +1 -0
  22. package/dest/cli/cmds/profile_flamegraph.js +51 -0
  23. package/dest/cli/cmds/profile_gates.d.ts +4 -0
  24. package/dest/cli/cmds/profile_gates.d.ts.map +1 -0
  25. package/dest/cli/cmds/profile_gates.js +57 -0
  26. package/dest/cli/cmds/profile_utils.d.ts +18 -0
  27. package/dest/cli/cmds/profile_utils.d.ts.map +1 -0
  28. package/dest/cli/cmds/profile_utils.js +50 -0
  29. package/dest/cli/cmds/start_bot.d.ts +3 -3
  30. package/dest/cli/cmds/start_bot.d.ts.map +1 -1
  31. package/dest/cli/cmds/start_bot.js +9 -5
  32. package/dest/cli/cmds/start_node.d.ts +1 -1
  33. package/dest/cli/cmds/start_node.d.ts.map +1 -1
  34. package/dest/cli/cmds/start_node.js +62 -10
  35. package/dest/cli/cmds/start_p2p_bootstrap.d.ts +2 -2
  36. package/dest/cli/cmds/start_p2p_bootstrap.d.ts.map +1 -1
  37. package/dest/cli/cmds/start_p2p_bootstrap.js +1 -2
  38. package/dest/cli/cmds/start_prover_agent.js +2 -2
  39. package/dest/cli/cmds/start_prover_broker.d.ts +1 -1
  40. package/dest/cli/cmds/start_prover_broker.d.ts.map +1 -1
  41. package/dest/cli/cmds/start_prover_broker.js +2 -2
  42. package/dest/cli/cmds/utils/artifacts.d.ts +21 -0
  43. package/dest/cli/cmds/utils/artifacts.d.ts.map +1 -0
  44. package/dest/cli/cmds/utils/artifacts.js +24 -0
  45. package/dest/cli/cmds/utils/spawn.d.ts +3 -0
  46. package/dest/cli/cmds/utils/spawn.d.ts.map +1 -0
  47. package/dest/cli/cmds/utils/spawn.js +16 -0
  48. package/dest/cli/util.d.ts +5 -14
  49. package/dest/cli/util.d.ts.map +1 -1
  50. package/dest/cli/util.js +10 -5
  51. package/dest/examples/token.js +5 -5
  52. package/dest/local-network/banana_fpc.d.ts +1 -1
  53. package/dest/local-network/banana_fpc.d.ts.map +1 -1
  54. package/dest/local-network/banana_fpc.js +2 -2
  55. package/dest/local-network/local-network.d.ts +4 -3
  56. package/dest/local-network/local-network.d.ts.map +1 -1
  57. package/dest/local-network/local-network.js +34 -16
  58. package/dest/testing/anvil_test_watcher.d.ts +9 -1
  59. package/dest/testing/anvil_test_watcher.d.ts.map +1 -1
  60. package/dest/testing/anvil_test_watcher.js +52 -15
  61. package/dest/testing/epoch_test_settler.d.ts +4 -2
  62. package/dest/testing/epoch_test_settler.d.ts.map +1 -1
  63. package/dest/testing/epoch_test_settler.js +15 -5
  64. package/package.json +34 -34
  65. package/scripts/aztec.sh +8 -5
  66. package/scripts/init.sh +23 -13
  67. package/scripts/new.sh +17 -16
  68. package/scripts/setup_workspace.sh +124 -0
  69. package/src/bin/index.ts +5 -1
  70. package/src/cli/admin_api_key_store.ts +128 -0
  71. package/src/cli/aztec_start_action.ts +50 -6
  72. package/src/cli/aztec_start_options.ts +28 -6
  73. package/src/cli/cli.ts +0 -1
  74. package/src/cli/cmds/compile.ts +184 -0
  75. package/src/cli/cmds/profile.ts +25 -0
  76. package/src/cli/cmds/profile_flamegraph.ts +63 -0
  77. package/src/cli/cmds/profile_gates.ts +67 -0
  78. package/src/cli/cmds/profile_utils.ts +58 -0
  79. package/src/cli/cmds/start_bot.ts +8 -5
  80. package/src/cli/cmds/start_node.ts +51 -9
  81. package/src/cli/cmds/start_p2p_bootstrap.ts +2 -2
  82. package/src/cli/cmds/start_prover_agent.ts +2 -2
  83. package/src/cli/cmds/start_prover_broker.ts +5 -1
  84. package/src/cli/cmds/utils/artifacts.ts +44 -0
  85. package/src/cli/cmds/utils/spawn.ts +16 -0
  86. package/src/cli/util.ts +14 -19
  87. package/src/examples/token.ts +5 -7
  88. package/src/local-network/banana_fpc.ts +10 -6
  89. package/src/local-network/local-network.ts +49 -20
  90. package/src/testing/anvil_test_watcher.ts +59 -15
  91. package/src/testing/epoch_test_settler.ts +16 -4
  92. package/dest/cli/cmds/start_prover_node.d.ts +0 -7
  93. package/dest/cli/cmds/start_prover_node.d.ts.map +0 -1
  94. package/dest/cli/cmds/start_prover_node.js +0 -108
  95. package/scripts/compile.sh +0 -44
  96. package/scripts/extract_function.js +0 -47
  97. package/scripts/flamegraph.sh +0 -59
  98. package/scripts/setup_project.sh +0 -31
  99. package/src/cli/cmds/start_prover_node.ts +0 -124
package/src/cli/util.ts CHANGED
@@ -2,13 +2,13 @@ import type { AztecNodeConfig } from '@aztec/aztec-node';
2
2
  import type { AccountManager } from '@aztec/aztec.js/wallet';
3
3
  import type { ViemClient } from '@aztec/ethereum/types';
4
4
  import type { ConfigMappingsType } from '@aztec/foundation/config';
5
- import { Fr } from '@aztec/foundation/curves/bn254';
6
5
  import { EthAddress } from '@aztec/foundation/eth-address';
6
+ import { jsonStringify } from '@aztec/foundation/json-rpc';
7
7
  import { type LogFn, createLogger } from '@aztec/foundation/log';
8
8
  import type { SharedNodeConfig } from '@aztec/node-lib/config';
9
9
  import type { ProverConfig } from '@aztec/stdlib/interfaces/server';
10
10
  import { getTelemetryClient } from '@aztec/telemetry-client/start';
11
- import type { TestWallet } from '@aztec/test-wallet/server';
11
+ import type { EmbeddedWallet } from '@aztec/wallets/embedded';
12
12
 
13
13
  import chalk from 'chalk';
14
14
  import type { Command } from 'commander';
@@ -68,30 +68,19 @@ export const installSignalHandlers = (logFn: LogFn, cb?: Array<() => Promise<voi
68
68
  /**
69
69
  * Creates logs for the initial accounts
70
70
  * @param accounts - The initial accounts
71
- * @param wallet - A TestWallet instance to get the registered accounts
71
+ * @param wallet - A EmbeddedWallet instance to get the registered accounts
72
72
  * @returns A string array containing the initial accounts details
73
73
  */
74
- export async function createAccountLogs(
75
- accountsWithSecretKeys: {
76
- /**
77
- * The account object
78
- */
79
- account: AccountManager;
80
- /**
81
- * The secret key of the account
82
- */
83
- secretKey: Fr;
84
- }[],
85
- wallet: TestWallet,
86
- ) {
74
+ export async function createAccountLogs(accountManagers: AccountManager[], wallet: EmbeddedWallet) {
87
75
  const registeredAccounts = await wallet.getAccounts();
88
76
  const accountLogStrings = [`Initial Accounts:\n\n`];
89
- for (const accountWithSecretKey of accountsWithSecretKeys) {
90
- const completeAddress = await accountWithSecretKey.account.getCompleteAddress();
77
+ for (const accountManager of accountManagers) {
78
+ const account = await accountManager.getAccount();
79
+ const completeAddress = account.getCompleteAddress();
91
80
  if (registeredAccounts.find(a => a.item.equals(completeAddress.address))) {
92
81
  accountLogStrings.push(` Address: ${completeAddress.address.toString()}\n`);
93
82
  accountLogStrings.push(` Partial Address: ${completeAddress.partialAddress.toString()}\n`);
94
- accountLogStrings.push(` Secret Key: ${accountWithSecretKey.secretKey.toString()}\n`);
83
+ accountLogStrings.push(` Secret Key: ${account.getSecretKey().toString()}\n`);
95
84
  accountLogStrings.push(
96
85
  ` Master nullifier public key: ${completeAddress.publicKeys.masterNullifierPublicKey.toString()}\n`,
97
86
  );
@@ -388,3 +377,9 @@ export async function setupUpdateMonitor(
388
377
 
389
378
  checker.start();
390
379
  }
380
+
381
+ export function stringifyConfig(config: object): string {
382
+ return Object.entries(config)
383
+ .map(([key, value]) => `${key}=${jsonStringify(value)}`)
384
+ .join(' ');
385
+ }
@@ -2,7 +2,7 @@ import { getInitialTestAccountsData } from '@aztec/accounts/testing';
2
2
  import { createAztecNodeClient } from '@aztec/aztec.js/node';
3
3
  import { createLogger } from '@aztec/foundation/log';
4
4
  import { TokenContract } from '@aztec/noir-contracts.js/Token';
5
- import { TestWallet } from '@aztec/test-wallet/server';
5
+ import { EmbeddedWallet } from '@aztec/wallets/embedded';
6
6
 
7
7
  const logger = createLogger('example:token');
8
8
 
@@ -19,7 +19,7 @@ const TRANSFER_AMOUNT = 33n;
19
19
  async function main() {
20
20
  logger.info('Running token contract test on HTTP interface.');
21
21
 
22
- const wallet = await TestWallet.create(node);
22
+ const wallet = await EmbeddedWallet.create(node);
23
23
 
24
24
  // During local network setup we deploy a few accounts. Below we add them to our wallet.
25
25
  const [aliceInitialAccountData, bobInitialAccountData] = await getInitialTestAccountsData();
@@ -32,14 +32,12 @@ async function main() {
32
32
  logger.info(`Fetched Alice and Bob accounts: ${alice.toString()}, ${bob.toString()}`);
33
33
 
34
34
  logger.info('Deploying Token...');
35
- const token = await TokenContract.deploy(wallet, alice, 'TokenName', 'TokenSymbol', 18)
36
- .send({ from: alice })
37
- .deployed();
35
+ const token = await TokenContract.deploy(wallet, alice, 'TokenName', 'TokenSymbol', 18).send({ from: alice });
38
36
  logger.info('Token deployed');
39
37
 
40
38
  // Mint tokens to Alice
41
39
  logger.info(`Minting ${ALICE_MINT_BALANCE} more coins to Alice...`);
42
- await token.methods.mint_to_private(alice, ALICE_MINT_BALANCE).send({ from: alice }).wait();
40
+ await token.methods.mint_to_private(alice, ALICE_MINT_BALANCE).send({ from: alice });
43
41
 
44
42
  logger.info(`${ALICE_MINT_BALANCE} tokens were successfully minted by Alice and transferred to private`);
45
43
 
@@ -48,7 +46,7 @@ async function main() {
48
46
 
49
47
  // We will now transfer tokens from Alice to Bob
50
48
  logger.info(`Transferring ${TRANSFER_AMOUNT} tokens from Alice to Bob...`);
51
- await token.methods.transfer(bob, TRANSFER_AMOUNT).send({ from: alice }).wait();
49
+ await token.methods.transfer(bob, TRANSFER_AMOUNT).send({ from: alice });
52
50
 
53
51
  // Check the new balances
54
52
  const aliceBalance = await token.methods.balance_of_private(alice).simulate({ from: alice });
@@ -49,12 +49,16 @@ export async function setupBananaFPC(initialAccounts: InitialAccountData[], wall
49
49
  const bananaCoinAddress = await getBananaCoinAddress(initialAccounts);
50
50
  const admin = getBananaAdmin(initialAccounts);
51
51
  const [bananaCoin, fpc] = await Promise.all([
52
- TokenContract.deploy(wallet, admin, bananaCoinArgs.name, bananaCoinArgs.symbol, bananaCoinArgs.decimal)
53
- .send({ from: admin, contractAddressSalt: BANANA_COIN_SALT, universalDeploy: true })
54
- .deployed(),
55
- FPCContract.deploy(wallet, bananaCoinAddress, admin)
56
- .send({ from: admin, contractAddressSalt: BANANA_FPC_SALT, universalDeploy: true })
57
- .deployed(),
52
+ TokenContract.deploy(wallet, admin, bananaCoinArgs.name, bananaCoinArgs.symbol, bananaCoinArgs.decimal).send({
53
+ from: admin,
54
+ contractAddressSalt: BANANA_COIN_SALT,
55
+ universalDeploy: true,
56
+ }),
57
+ FPCContract.deploy(wallet, bananaCoinAddress, admin).send({
58
+ from: admin,
59
+ contractAddressSalt: BANANA_FPC_SALT,
60
+ universalDeploy: true,
61
+ }),
58
62
  ]);
59
63
 
60
64
  log(`BananaCoin: ${bananaCoin.address}`);
@@ -18,13 +18,16 @@ import type { LogFn } from '@aztec/foundation/log';
18
18
  import { DateProvider, TestDateProvider } from '@aztec/foundation/timer';
19
19
  import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
20
20
  import { protocolContractsHash } from '@aztec/protocol-contracts';
21
+ import { SequencerState } from '@aztec/sequencer-client';
22
+ import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server';
21
23
  import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees';
22
24
  import {
23
25
  type TelemetryClient,
24
26
  getConfigEnvVars as getTelemetryClientConfig,
25
27
  initTelemetryClient,
26
28
  } from '@aztec/telemetry-client';
27
- import { TestWallet, deployFundedSchnorrAccounts } from '@aztec/test-wallet/server';
29
+ import { EmbeddedWallet } from '@aztec/wallets/embedded';
30
+ import { deployFundedSchnorrAccounts } from '@aztec/wallets/testing';
28
31
  import { getGenesisValues } from '@aztec/world-state/testing';
29
32
 
30
33
  import { type Hex, createPublicClient, fallback, http as httpViemTransport } from 'viem';
@@ -51,7 +54,6 @@ export async function deployContractsToL1(
51
54
  aztecNodeConfig: AztecNodeConfig,
52
55
  privateKey: Hex,
53
56
  opts: {
54
- assumeProvenThroughBlockNumber?: number;
55
57
  genesisArchiveRoot?: Fr;
56
58
  feeJuicePortalInitialBalance?: bigint;
57
59
  } = {},
@@ -105,12 +107,14 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
105
107
  };
106
108
  const hdAccount = mnemonicToAccount(config.l1Mnemonic || DefaultMnemonic);
107
109
  if (
108
- aztecNodeConfig.publisherPrivateKeys == undefined ||
109
- !aztecNodeConfig.publisherPrivateKeys.length ||
110
- aztecNodeConfig.publisherPrivateKeys[0].getValue() === NULL_KEY
110
+ aztecNodeConfig.sequencerPublisherPrivateKeys == undefined ||
111
+ !aztecNodeConfig.sequencerPublisherPrivateKeys.length ||
112
+ aztecNodeConfig.sequencerPublisherPrivateKeys[0].getValue() === NULL_KEY
111
113
  ) {
112
114
  const privKey = hdAccount.getHdKey().privateKey;
113
- aztecNodeConfig.publisherPrivateKeys = [new SecretValue(`0x${Buffer.from(privKey!).toString('hex')}` as const)];
115
+ aztecNodeConfig.sequencerPublisherPrivateKeys = [
116
+ new SecretValue(`0x${Buffer.from(privKey!).toString('hex')}` as const),
117
+ ];
114
118
  }
115
119
  if (!aztecNodeConfig.validatorPrivateKeys?.getValue().length) {
116
120
  const privKey = hdAccount.getHdKey().privateKey;
@@ -149,7 +153,6 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
149
153
  aztecNodeConfig,
150
154
  aztecNodeConfig.validatorPrivateKeys.getValue()[0],
151
155
  {
152
- assumeProvenThroughBlockNumber: Number.MAX_SAFE_INTEGER,
153
156
  genesisArchiveRoot,
154
157
  feeJuicePortalInitialBalance: fundingNeeded,
155
158
  },
@@ -179,25 +182,42 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
179
182
  const blobClient = createBlobClient();
180
183
  const node = await createAztecNode(aztecNodeConfig, { telemetry, blobClient, dateProvider }, { prefilledPublicData });
181
184
 
185
+ // Now that the node is up, let the watcher check for pending txs so it can skip unfilled slots faster when
186
+ // transactions are waiting in the mempool. Also let it check if the sequencer is actively building, to avoid
187
+ // warping time out from under an in-progress block.
188
+ watcher?.setGetPendingTxCount(() => node.getPendingTxCount());
189
+ const sequencer = node.getSequencer()?.getSequencer();
190
+ if (sequencer) {
191
+ const idleStates: Set<string> = new Set([
192
+ SequencerState.STOPPED,
193
+ SequencerState.STOPPING,
194
+ SequencerState.IDLE,
195
+ SequencerState.SYNCHRONIZING,
196
+ ]);
197
+ watcher?.setIsSequencerBuilding(() => !idleStates.has(sequencer.getState()));
198
+ }
199
+
182
200
  let epochTestSettler: EpochTestSettler | undefined;
183
201
  if (!aztecNodeConfig.p2pEnabled) {
184
- epochTestSettler = new EpochTestSettler(cheatcodes!, rollupAddress!, node.getBlockSource(), {
185
- pollingIntervalMs: 200,
186
- });
202
+ epochTestSettler = new EpochTestSettler(
203
+ cheatcodes!,
204
+ rollupAddress!,
205
+ node.getBlockSource(),
206
+ logger.createChild('epoch-settler'),
207
+ { pollingIntervalMs: 200 },
208
+ );
187
209
  await epochTestSettler.start();
188
210
  }
189
211
 
190
212
  if (initialAccounts.length) {
191
- const PXEConfig = { proverEnabled: aztecNodeConfig.realProofs };
192
- const wallet = await TestWallet.create(node, PXEConfig);
213
+ const wallet = await EmbeddedWallet.create(node, {
214
+ pxeConfig: { proverEnabled: aztecNodeConfig.realProofs },
215
+ ephemeral: true,
216
+ });
193
217
 
194
218
  userLog('Setting up funded test accounts...');
195
- const accountManagers = await deployFundedSchnorrAccounts(wallet, node, initialAccounts);
196
- const accountsWithSecrets = accountManagers.map((manager, i) => ({
197
- account: manager,
198
- secretKey: initialAccounts[i].secret,
199
- }));
200
- const accLogs = await createAccountLogs(accountsWithSecrets, wallet);
219
+ const accountManagers = await deployFundedSchnorrAccounts(wallet, initialAccounts);
220
+ const accLogs = await createAccountLogs(accountManagers, wallet);
201
221
  userLog(accLogs.join(''));
202
222
 
203
223
  await setupBananaFPC(initialAccounts, wallet, userLog);
@@ -223,7 +243,12 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
223
243
  */
224
244
  export async function createAztecNode(
225
245
  config: Partial<AztecNodeConfig> = {},
226
- deps: { telemetry?: TelemetryClient; blobClient?: BlobClientInterface; dateProvider?: DateProvider } = {},
246
+ deps: {
247
+ telemetry?: TelemetryClient;
248
+ blobClient?: BlobClientInterface;
249
+ dateProvider?: DateProvider;
250
+ proverBroker?: ProvingJobBroker;
251
+ } = {},
227
252
  options: { prefilledPublicData?: PublicDataTreeLeaf[] } = {},
228
253
  ) {
229
254
  // TODO(#12272): will clean this up. This is criminal.
@@ -233,6 +258,10 @@ export async function createAztecNode(
233
258
  ...config,
234
259
  l1Contracts: { ...l1Contracts, ...config.l1Contracts },
235
260
  };
236
- const node = await AztecNodeService.createAndSync(aztecNodeConfig, deps, options);
261
+ const node = await AztecNodeService.createAndSync(
262
+ aztecNodeConfig,
263
+ { ...deps, proverNodeDeps: { broker: deps.proverBroker } },
264
+ options,
265
+ );
237
266
  return node;
238
267
  }
@@ -31,6 +31,15 @@ export class AnvilTestWatcher {
31
31
 
32
32
  private isMarkingAsProven = true;
33
33
 
34
+ // Optional callback to check if there are pending txs in the mempool.
35
+ private getPendingTxCount?: () => Promise<number>;
36
+
37
+ // Optional callback to check if the sequencer is actively building a block.
38
+ private isSequencerBuilding?: () => boolean;
39
+
40
+ // Tracks when we first observed the current unfilled slot with pending txs (real wall time).
41
+ private unfilledSlotFirstSeen?: { slot: number; realTime: number };
42
+
34
43
  constructor(
35
44
  private cheatcodes: EthCheatCodes,
36
45
  rollupAddress: EthAddress,
@@ -59,6 +68,16 @@ export class AnvilTestWatcher {
59
68
  this.isLocalNetwork = isLocalNetwork;
60
69
  }
61
70
 
71
+ /** Sets a callback to check for pending txs, used to skip unfilled slots faster when txs are waiting. */
72
+ setGetPendingTxCount(fn: () => Promise<number>) {
73
+ this.getPendingTxCount = fn;
74
+ }
75
+
76
+ /** Sets a callback to check if the sequencer is actively building, to avoid warping while it works. */
77
+ setIsSequencerBuilding(fn: () => boolean) {
78
+ this.isSequencerBuilding = fn;
79
+ }
80
+
62
81
  async start() {
63
82
  if (this.filledRunningPromise) {
64
83
  throw new Error('Watcher already watching for filled slot');
@@ -131,15 +150,8 @@ export class AnvilTestWatcher {
131
150
  const nextSlotTimestamp = Number(await this.rollup.read.getTimestampForSlot([BigInt(nextSlot)]));
132
151
 
133
152
  if (BigInt(currentSlot) === checkpointLog.slotNumber) {
134
- // We should jump to the next slot
135
- try {
136
- await this.cheatcodes.warp(nextSlotTimestamp, {
137
- resetBlockInterval: true,
138
- });
139
- } catch (e) {
140
- this.logger.error(`Failed to warp to timestamp ${nextSlotTimestamp}: ${e}`);
141
- }
142
-
153
+ // The current slot has been filled, we should jump to the next slot.
154
+ await this.warpToTimestamp(nextSlotTimestamp);
143
155
  this.logger.info(`Slot ${currentSlot} was filled, jumped to next slot`);
144
156
  return;
145
157
  }
@@ -149,18 +161,50 @@ export class AnvilTestWatcher {
149
161
  return;
150
162
  }
151
163
 
152
- const currentTimestamp = this.dateProvider?.now() ?? Date.now();
153
- if (currentTimestamp > nextSlotTimestamp * 1000) {
154
- try {
155
- await this.cheatcodes.warp(nextSlotTimestamp, { resetBlockInterval: true });
156
- } catch (e) {
157
- this.logger.error(`Failed to warp to timestamp ${nextSlotTimestamp}: ${e}`);
164
+ // If there are pending txs and the sequencer missed them, warp quickly (after a 2s real-time debounce) so the
165
+ // sequencer can retry in the next slot. Without this, we'd have to wait a full real-time slot duration (~36s) for
166
+ // the dateProvider to catch up to the next slot timestamp. We skip the warp if the sequencer is actively building
167
+ // to avoid invalidating its in-progress work.
168
+ if (this.getPendingTxCount) {
169
+ const pendingTxs = await this.getPendingTxCount();
170
+ if (pendingTxs > 0) {
171
+ if (this.isSequencerBuilding?.()) {
172
+ this.unfilledSlotFirstSeen = undefined;
173
+ return;
174
+ }
175
+
176
+ const realNow = Date.now();
177
+ if (!this.unfilledSlotFirstSeen || this.unfilledSlotFirstSeen.slot !== currentSlot) {
178
+ this.unfilledSlotFirstSeen = { slot: currentSlot, realTime: realNow };
179
+ return;
180
+ }
181
+
182
+ if (realNow - this.unfilledSlotFirstSeen.realTime > 2000) {
183
+ await this.warpToTimestamp(nextSlotTimestamp);
184
+ this.unfilledSlotFirstSeen = undefined;
185
+ this.logger.info(`Slot ${currentSlot} was missed with pending txs, jumped to next slot`);
186
+ }
187
+
188
+ return;
158
189
  }
190
+ }
159
191
 
192
+ // Fallback: warp when the dateProvider time has passed the next slot timestamp.
193
+ const currentTimestamp = this.dateProvider?.now() ?? Date.now();
194
+ if (currentTimestamp > nextSlotTimestamp * 1000) {
195
+ await this.warpToTimestamp(nextSlotTimestamp);
160
196
  this.logger.info(`Slot ${currentSlot} was missed, jumped to next slot`);
161
197
  }
162
198
  } catch {
163
199
  this.logger.error('mineIfSlotFilled failed');
164
200
  }
165
201
  }
202
+
203
+ private async warpToTimestamp(timestamp: number) {
204
+ try {
205
+ await this.cheatcodes.warp(timestamp, { resetBlockInterval: true });
206
+ } catch (e) {
207
+ this.logger.error(`Failed to warp to timestamp ${timestamp}: ${e}`);
208
+ }
209
+ }
166
210
  }
@@ -1,6 +1,7 @@
1
1
  import { Fr } from '@aztec/aztec.js/fields';
2
2
  import { type EthCheatCodes, RollupCheatCodes } from '@aztec/ethereum/test';
3
3
  import { type EpochNumber, SlotNumber } from '@aztec/foundation/branded-types';
4
+ import type { Logger } from '@aztec/foundation/log';
4
5
  import { EpochMonitor } from '@aztec/prover-node';
5
6
  import type { EthAddress, L2BlockSource } from '@aztec/stdlib/block';
6
7
  import { computeL2ToL1MembershipWitnessFromMessagesInEpoch } from '@aztec/stdlib/messaging';
@@ -13,6 +14,7 @@ export class EpochTestSettler {
13
14
  cheatcodes: EthCheatCodes,
14
15
  rollupAddress: EthAddress,
15
16
  private l2BlockSource: L2BlockSource,
17
+ private log: Logger,
16
18
  private options: { pollingIntervalMs: number; provingDelayMs?: number },
17
19
  ) {
18
20
  this.rollupCheatCodes = new RollupCheatCodes(cheatcodes, { rollupAddress });
@@ -29,10 +31,16 @@ export class EpochTestSettler {
29
31
  }
30
32
 
31
33
  async handleEpochReadyToProve(epoch: EpochNumber): Promise<boolean> {
32
- const blocks = await this.l2BlockSource.getBlocksForEpoch(epoch);
34
+ const checkpointedBlocks = await this.l2BlockSource.getCheckpointedBlocksForEpoch(epoch);
35
+ const blocks = checkpointedBlocks.map(b => b.block);
36
+ this.log.info(
37
+ `Settling epoch ${epoch} with blocks ${blocks[0]?.header.getBlockNumber()} to ${blocks.at(-1)?.header.getBlockNumber()}`,
38
+ { blocks: blocks.map(b => b.toBlockInfo()) },
39
+ );
33
40
  const messagesInEpoch: Fr[][][][] = [];
34
41
  let previousSlotNumber = SlotNumber.ZERO;
35
42
  let checkpointIndex = -1;
43
+
36
44
  for (const block of blocks) {
37
45
  const slotNumber = block.header.globalVariables.slotNumber;
38
46
  if (slotNumber !== previousSlotNumber) {
@@ -47,11 +55,15 @@ export class EpochTestSettler {
47
55
  if (firstMessage) {
48
56
  const { root: outHash } = computeL2ToL1MembershipWitnessFromMessagesInEpoch(messagesInEpoch, firstMessage);
49
57
  await this.rollupCheatCodes.insertOutbox(epoch, outHash.toBigInt());
58
+ } else {
59
+ this.log.info(`No L2 to L1 messages in epoch ${epoch}`);
50
60
  }
51
61
 
52
- // Mark the blocks as proven.
53
- for (const block of blocks) {
54
- await this.rollupCheatCodes.markAsProven(block.number);
62
+ const lastCheckpoint = checkpointedBlocks.at(-1)?.checkpointNumber;
63
+ if (lastCheckpoint !== undefined) {
64
+ await this.rollupCheatCodes.markAsProven(lastCheckpoint);
65
+ } else {
66
+ this.log.warn(`No checkpoint found for epoch ${epoch}`);
55
67
  }
56
68
 
57
69
  return true;
@@ -1,7 +0,0 @@
1
- import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server';
2
- import type { LogFn } from '@aztec/foundation/log';
3
- import { type ProverNodeConfig } from '@aztec/prover-node';
4
- export declare function startProverNode(options: any, signalHandlers: (() => Promise<void>)[], services: NamespacedApiHandlers, userLog: LogFn): Promise<{
5
- config: ProverNodeConfig;
6
- }>;
7
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3RhcnRfcHJvdmVyX25vZGUuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9jbGkvY21kcy9zdGFydF9wcm92ZXJfbm9kZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFLQSxPQUFPLEtBQUssRUFBRSxxQkFBcUIsRUFBRSxNQUFNLG1DQUFtQyxDQUFDO0FBRS9FLE9BQU8sS0FBSyxFQUFFLEtBQUssRUFBRSxNQUFNLHVCQUF1QixDQUFDO0FBRW5ELE9BQU8sRUFDTCxLQUFLLGdCQUFnQixFQUl0QixNQUFNLG9CQUFvQixDQUFDO0FBUzVCLHdCQUFzQixlQUFlLENBQ25DLE9BQU8sRUFBRSxHQUFHLEVBQ1osY0FBYyxFQUFFLENBQUMsTUFBTSxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUMsRUFBRSxFQUN2QyxRQUFRLEVBQUUscUJBQXFCLEVBQy9CLE9BQU8sRUFBRSxLQUFLLEdBQ2IsT0FBTyxDQUFDO0lBQUUsTUFBTSxFQUFFLGdCQUFnQixDQUFBO0NBQUUsQ0FBQyxDQStGdkMifQ==
@@ -1 +0,0 @@
1
- {"version":3,"file":"start_prover_node.d.ts","sourceRoot":"","sources":["../../../src/cli/cmds/start_prover_node.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,mCAAmC,CAAC;AAE/E,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAEnD,OAAO,EACL,KAAK,gBAAgB,EAItB,MAAM,oBAAoB,CAAC;AAS5B,wBAAsB,eAAe,CACnC,OAAO,EAAE,GAAG,EACZ,cAAc,EAAE,CAAC,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,EACvC,QAAQ,EAAE,qBAAqB,EAC/B,OAAO,EAAE,KAAK,GACb,OAAO,CAAC;IAAE,MAAM,EAAE,gBAAgB,CAAA;CAAE,CAAC,CA+FvC"}
@@ -1,108 +0,0 @@
1
- import { getInitialTestAccountsData } from '@aztec/accounts/testing';
2
- import { Fr } from '@aztec/aztec.js/fields';
3
- import { getSponsoredFPCAddress } from '@aztec/cli/cli-utils';
4
- import { getL1Config } from '@aztec/cli/config';
5
- import { getPublicClient } from '@aztec/ethereum/client';
6
- import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici';
7
- import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker';
8
- import { createProverNode, getProverNodeConfigFromEnv, proverNodeConfigMappings } from '@aztec/prover-node';
9
- import { P2PApiSchema, ProverNodeApiSchema } from '@aztec/stdlib/interfaces/server';
10
- import { initTelemetryClient, makeTracedFetch, telemetryClientConfigMappings } from '@aztec/telemetry-client';
11
- import { getGenesisValues } from '@aztec/world-state/testing';
12
- import { extractRelevantOptions, preloadCrsDataForVerifying, setupUpdateMonitor } from '../util.js';
13
- import { getVersions } from '../versioning.js';
14
- import { startProverBroker } from './start_prover_broker.js';
15
- export async function startProverNode(options, signalHandlers, services, userLog) {
16
- if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) {
17
- userLog(`Starting a prover-node with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`);
18
- process.exit(1);
19
- }
20
- let proverConfig = {
21
- ...getProverNodeConfigFromEnv(),
22
- ...extractRelevantOptions(options, proverNodeConfigMappings, 'proverNode')
23
- };
24
- if (!proverConfig.l1Contracts.registryAddress || proverConfig.l1Contracts.registryAddress.isZero()) {
25
- throw new Error('L1 registry address is required to start a Prover Node');
26
- }
27
- const followsCanonicalRollup = typeof proverConfig.rollupVersion !== 'number';
28
- const { addresses, config } = await getL1Config(proverConfig.l1Contracts.registryAddress, proverConfig.l1RpcUrls, proverConfig.l1ChainId, proverConfig.rollupVersion);
29
- process.env.ROLLUP_CONTRACT_ADDRESS ??= addresses.rollupAddress.toString();
30
- proverConfig.l1Contracts = addresses;
31
- proverConfig = {
32
- ...proverConfig,
33
- ...config
34
- };
35
- const testAccounts = proverConfig.testAccounts ? (await getInitialTestAccountsData()).map((a)=>a.address) : [];
36
- const sponsoredFPCAccounts = proverConfig.sponsoredFPC ? [
37
- await getSponsoredFPCAddress()
38
- ] : [];
39
- const initialFundedAccounts = testAccounts.concat(sponsoredFPCAccounts);
40
- userLog(`Initial funded accounts: ${initialFundedAccounts.map((a)=>a.toString()).join(', ')}`);
41
- const { genesisArchiveRoot, prefilledPublicData } = await getGenesisValues(initialFundedAccounts);
42
- userLog(`Genesis archive root: ${genesisArchiveRoot.toString()}`);
43
- if (!Fr.fromHexString(config.genesisArchiveTreeRoot).equals(genesisArchiveRoot)) {
44
- throw new Error(`The computed genesis archive tree root ${genesisArchiveRoot} does not match the expected genesis archive tree root ${config.genesisArchiveTreeRoot} for the rollup deployed at ${addresses.rollupAddress}`);
45
- }
46
- const telemetry = await initTelemetryClient(extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'));
47
- let broker;
48
- if (proverConfig.proverBrokerUrl) {
49
- // at 1TPS we'd enqueue ~1k chonk verifier proofs and ~1k AVM proofs immediately
50
- // set a lower connection limit such that we don't overload the server
51
- // Keep retrying up to 30s
52
- const fetch = makeTracedFetch([
53
- 1,
54
- 2,
55
- 3,
56
- 3,
57
- 3,
58
- 3,
59
- 3,
60
- 3,
61
- 3,
62
- 3,
63
- 3
64
- ], false, makeUndiciFetch(new Agent({
65
- connections: 100
66
- })));
67
- broker = createProvingJobBrokerClient(proverConfig.proverBrokerUrl, getVersions(proverConfig), fetch);
68
- } else if (options.proverBroker) {
69
- ({ broker } = await startProverBroker(options, signalHandlers, services, userLog));
70
- } else {
71
- userLog(`--prover-broker-url or --prover-broker is required to start a Prover Node`);
72
- process.exit(1);
73
- }
74
- if (proverConfig.proverAgentCount === 0) {
75
- userLog(`Running prover node without local prover agent. Connect one or more prover agents to this node or pass --proverAgent.proverAgentCount`);
76
- }
77
- await preloadCrsDataForVerifying(proverConfig, userLog);
78
- const proverNode = await createProverNode(proverConfig, {
79
- telemetry,
80
- broker
81
- }, {
82
- prefilledPublicData
83
- });
84
- services.proverNode = [
85
- proverNode,
86
- ProverNodeApiSchema
87
- ];
88
- if (proverNode.getP2P()) {
89
- services.p2p = [
90
- proverNode.getP2P(),
91
- P2PApiSchema
92
- ];
93
- }
94
- if (!proverConfig.proverBrokerUrl) {
95
- services.provingJobSource = [
96
- proverNode.getProver().getProvingJobSource(),
97
- ProvingJobConsumerSchema
98
- ];
99
- }
100
- signalHandlers.push(proverNode.stop.bind(proverNode));
101
- await proverNode.start();
102
- if (proverConfig.autoUpdate !== 'disabled' && proverConfig.autoUpdateUrl) {
103
- await setupUpdateMonitor(proverConfig.autoUpdate, new URL(proverConfig.autoUpdateUrl), followsCanonicalRollup, getPublicClient(proverConfig), proverConfig.l1Contracts.registryAddress, signalHandlers);
104
- }
105
- return {
106
- config: proverConfig
107
- };
108
- }
@@ -1,44 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- NARGO=${NARGO:-nargo}
5
- BB=${BB:-bb}
6
-
7
- # If help is requested, show Aztec-specific info then run nargo compile help and then exit in order to not trigger
8
- # transpilation
9
- for arg in "$@"; do
10
- if [ "$arg" == "--help" ] || [ "$arg" == "-h" ]; then
11
- cat << 'EOF'
12
- Aztec Compile - Compile Aztec Noir contracts
13
-
14
- This command compiles Aztec Noir contracts using nargo and then automatically
15
- postprocesses them to generate Aztec specific artifacts including:
16
- - Transpiled contract artifacts
17
- - Verification keys
18
-
19
- The compiled contracts will be placed in the target/ directory by default.
20
-
21
- ---
22
- Underlying nargo compile options:
23
-
24
- EOF
25
- nargo compile --help
26
- exit 0
27
- fi
28
- done
29
-
30
- # Run nargo compile.
31
- $NARGO compile "$@"
32
-
33
- echo "Postprocessing contract..."
34
- $BB aztec_process
35
-
36
- # Strip internal prefixes from all compiled contract JSONs in target directory
37
- # TODO: This should be part of bb aztec_process!
38
- for json in target/*.json; do
39
- temp_file="${json}.tmp"
40
- jq '.functions |= map(.name |= sub("^__aztec_nr_internals__"; ""))' "$json" > "$temp_file"
41
- mv "$temp_file" "$json"
42
- done
43
-
44
- echo "Compilation complete!"
@@ -1,47 +0,0 @@
1
- #!/usr/bin/env node
2
- import fs from 'fs/promises';
3
- import path from 'path';
4
-
5
- // Simple script to extract a contract function as a separate Noir artifact.
6
- // We need to use this since the transpiling that we do on public functions make the contract artifacts
7
- // unreadable by noir tooling, since they are no longer following the noir artifact format.
8
- async function main() {
9
- let [contractArtifactPath, functionName] = process.argv.slice(2);
10
- if (!contractArtifactPath || !functionName) {
11
- console.log('Usage: node extractFunctionAsNoirArtifact.js <contractArtifactPath> <functionName>');
12
- return;
13
- }
14
-
15
- const contractArtifact = JSON.parse(await fs.readFile(contractArtifactPath, 'utf8'));
16
- const func = contractArtifact.functions.find(f => f.name === functionName);
17
- if (!func) {
18
- console.error(`Function ${functionName} not found in ${contractArtifactPath}`);
19
- return;
20
- }
21
-
22
- const artifact = {
23
- noir_version: contractArtifact.noir_version,
24
- hash: 0,
25
- abi: func.abi,
26
- bytecode: func.bytecode,
27
- debug_symbols: func.debug_symbols,
28
- file_map: contractArtifact.file_map,
29
- expression_width: {
30
- Bounded: {
31
- width: 4,
32
- },
33
- },
34
- };
35
-
36
- const outputDir = path.dirname(contractArtifactPath);
37
- const outputName = path.basename(contractArtifactPath, '.json') + `-${functionName}.json`;
38
-
39
- const outPath = path.join(outputDir, outputName);
40
-
41
- await fs.writeFile(outPath, JSON.stringify(artifact, null, 2));
42
- }
43
-
44
- main().catch(err => {
45
- console.error(err);
46
- process.exit(1);
47
- });