@aztec/end-to-end 0.0.1-commit.e6bd8901 → 0.0.1-commit.ee80a48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dest/e2e_epochs/epochs_test.d.ts +7 -1
  2. package/dest/e2e_epochs/epochs_test.d.ts.map +1 -1
  3. package/dest/e2e_epochs/epochs_test.js +28 -9
  4. package/dest/e2e_l1_publisher/write_json.d.ts +3 -2
  5. package/dest/e2e_l1_publisher/write_json.d.ts.map +1 -1
  6. package/dest/e2e_l1_publisher/write_json.js +1 -7
  7. package/dest/e2e_p2p/reqresp/utils.d.ts +22 -0
  8. package/dest/e2e_p2p/reqresp/utils.d.ts.map +1 -0
  9. package/dest/e2e_p2p/reqresp/utils.js +153 -0
  10. package/dest/e2e_p2p/shared.d.ts +1 -1
  11. package/dest/e2e_p2p/shared.d.ts.map +1 -1
  12. package/dest/e2e_p2p/shared.js +2 -2
  13. package/dest/fixtures/e2e_prover_test.js +1 -1
  14. package/dest/fixtures/ha_setup.d.ts +71 -0
  15. package/dest/fixtures/ha_setup.d.ts.map +1 -0
  16. package/dest/fixtures/ha_setup.js +114 -0
  17. package/dest/fixtures/index.d.ts +2 -1
  18. package/dest/fixtures/index.d.ts.map +1 -1
  19. package/dest/fixtures/index.js +1 -0
  20. package/dest/fixtures/setup.d.ts +3 -3
  21. package/dest/fixtures/setup.d.ts.map +1 -1
  22. package/dest/fixtures/setup.js +20 -15
  23. package/dest/fixtures/setup_p2p_test.d.ts +4 -5
  24. package/dest/fixtures/setup_p2p_test.d.ts.map +1 -1
  25. package/dest/fixtures/setup_p2p_test.js +24 -19
  26. package/dest/spartan/tx_metrics.d.ts +35 -1
  27. package/dest/spartan/tx_metrics.d.ts.map +1 -1
  28. package/dest/spartan/tx_metrics.js +150 -0
  29. package/dest/spartan/utils/config.d.ts +4 -1
  30. package/dest/spartan/utils/config.d.ts.map +1 -1
  31. package/dest/spartan/utils/config.js +2 -1
  32. package/dest/spartan/utils/index.d.ts +4 -4
  33. package/dest/spartan/utils/index.d.ts.map +1 -1
  34. package/dest/spartan/utils/index.js +2 -2
  35. package/dest/spartan/utils/k8s.d.ts +29 -1
  36. package/dest/spartan/utils/k8s.d.ts.map +1 -1
  37. package/dest/spartan/utils/k8s.js +118 -0
  38. package/dest/spartan/utils/nodes.d.ts +11 -1
  39. package/dest/spartan/utils/nodes.d.ts.map +1 -1
  40. package/dest/spartan/utils/nodes.js +198 -27
  41. package/dest/spartan/utils/scripts.d.ts +18 -4
  42. package/dest/spartan/utils/scripts.d.ts.map +1 -1
  43. package/dest/spartan/utils/scripts.js +19 -4
  44. package/package.json +42 -39
  45. package/src/e2e_epochs/epochs_test.ts +31 -10
  46. package/src/e2e_l1_publisher/write_json.ts +1 -6
  47. package/src/e2e_p2p/reqresp/utils.ts +207 -0
  48. package/src/e2e_p2p/shared.ts +10 -2
  49. package/src/fixtures/e2e_prover_test.ts +1 -1
  50. package/src/fixtures/ha_setup.ts +184 -0
  51. package/src/fixtures/index.ts +1 -0
  52. package/src/fixtures/setup.ts +13 -13
  53. package/src/fixtures/setup_p2p_test.ts +15 -20
  54. package/src/spartan/tx_metrics.ts +126 -0
  55. package/src/spartan/utils/config.ts +1 -0
  56. package/src/spartan/utils/index.ts +3 -1
  57. package/src/spartan/utils/k8s.ts +152 -0
  58. package/src/spartan/utils/nodes.ts +239 -24
  59. package/src/spartan/utils/scripts.ts +43 -7
@@ -0,0 +1,207 @@
1
+ import type { AztecNodeService } from '@aztec/aztec-node';
2
+ import { createLogger } from '@aztec/aztec.js/log';
3
+ import { waitForTx } from '@aztec/aztec.js/node';
4
+ import { Tx } from '@aztec/aztec.js/tx';
5
+ import { RollupContract } from '@aztec/ethereum/contracts';
6
+ import { SlotNumber } from '@aztec/foundation/branded-types';
7
+ import { timesAsync } from '@aztec/foundation/collection';
8
+ import { retryUntil } from '@aztec/foundation/retry';
9
+
10
+ import { jest } from '@jest/globals';
11
+ import fs from 'fs';
12
+ import os from 'os';
13
+ import path from 'path';
14
+
15
+ import { shouldCollectMetrics } from '../../fixtures/fixtures.js';
16
+ import { createNodes } from '../../fixtures/setup_p2p_test.js';
17
+ import { P2PNetworkTest, SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES, WAIT_FOR_TX_TIMEOUT } from '../p2p_network.js';
18
+ import { prepareTransactions } from '../shared.js';
19
+
20
+ // Don't set this to a higher value than 9 because each node will use a different L1 publisher account and anvil seeds
21
+ export const NUM_VALIDATORS = 6;
22
+ export const NUM_TXS_PER_NODE = 2;
23
+ export const BOOT_NODE_UDP_PORT = 4500;
24
+
25
+ export const createReqrespDataDir = () => fs.mkdtempSync(path.join(os.tmpdir(), 'reqresp-'));
26
+
27
+ type ReqrespOptions = {
28
+ disableStatusHandshake?: boolean;
29
+ };
30
+
31
+ export async function createReqrespTest(options: ReqrespOptions = {}): Promise<P2PNetworkTest> {
32
+ const { disableStatusHandshake = false } = options;
33
+ const t = await P2PNetworkTest.create({
34
+ testName: 'e2e_p2p_reqresp_tx',
35
+ numberOfNodes: 0,
36
+ numberOfValidators: NUM_VALIDATORS,
37
+ basePort: BOOT_NODE_UDP_PORT,
38
+ // To collect metrics - run in aztec-packages `docker compose --profile metrics up`
39
+ metricsPort: shouldCollectMetrics(),
40
+ initialConfig: {
41
+ ...SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES,
42
+ aztecSlotDuration: 24,
43
+ ...(disableStatusHandshake ? { p2pDisableStatusHandshake: true } : {}),
44
+ listenAddress: '127.0.0.1',
45
+ aztecEpochDuration: 64, // stable committee
46
+ },
47
+ });
48
+ await t.setup();
49
+ await t.applyBaseSetup();
50
+ return t;
51
+ }
52
+
53
+ export async function cleanupReqrespTest(params: { t: P2PNetworkTest; nodes?: AztecNodeService[]; dataDir: string }) {
54
+ const { t, nodes, dataDir } = params;
55
+ if (nodes) {
56
+ await t.stopNodes(nodes);
57
+ }
58
+ await t.teardown();
59
+ for (let i = 0; i < NUM_VALIDATORS; i++) {
60
+ fs.rmSync(`${dataDir}-${i}`, { recursive: true, force: true, maxRetries: 3 });
61
+ }
62
+ }
63
+
64
+ const getNodePort = (nodeIndex: number) => BOOT_NODE_UDP_PORT + 1 + nodeIndex;
65
+
66
+ export async function runReqrespTxTest(params: {
67
+ t: P2PNetworkTest;
68
+ dataDir: string;
69
+ disableStatusHandshake?: boolean;
70
+ }): Promise<AztecNodeService[]> {
71
+ const { t, dataDir, disableStatusHandshake = false } = params;
72
+
73
+ if (!t.bootstrapNodeEnr) {
74
+ throw new Error('Bootstrap node ENR is not available');
75
+ }
76
+
77
+ t.logger.info('Creating nodes');
78
+ const aztecNodeConfig = disableStatusHandshake
79
+ ? { ...t.ctx.aztecNodeConfig, p2pDisableStatusHandshake: true }
80
+ : t.ctx.aztecNodeConfig;
81
+
82
+ const nodes = await createNodes(
83
+ aztecNodeConfig,
84
+ t.ctx.dateProvider!,
85
+ t.bootstrapNodeEnr,
86
+ NUM_VALIDATORS,
87
+ BOOT_NODE_UDP_PORT,
88
+ t.prefilledPublicData,
89
+ dataDir,
90
+ shouldCollectMetrics(),
91
+ );
92
+
93
+ t.logger.info('Waiting for nodes to connect');
94
+ await t.waitForP2PMeshConnectivity(nodes, NUM_VALIDATORS);
95
+
96
+ await t.setupAccount();
97
+
98
+ const targetBlockNumber = await t.ctx.aztecNodeService!.getBlockNumber();
99
+ await retryUntil(
100
+ async () => {
101
+ const blockNumbers = await Promise.all(nodes.map(node => node.getBlockNumber()));
102
+ return blockNumbers.every(blockNumber => blockNumber >= targetBlockNumber) ? true : undefined;
103
+ },
104
+ `validators to sync to L2 block ${targetBlockNumber}`,
105
+ 60,
106
+ 0.5,
107
+ );
108
+
109
+ t.logger.info('Preparing transactions to send');
110
+ const txBatches = await timesAsync(2, () =>
111
+ prepareTransactions(t.logger, t.ctx.aztecNodeService!, NUM_TXS_PER_NODE, t.fundedAccount),
112
+ );
113
+
114
+ t.logger.info('Removing initial node');
115
+ await t.removeInitialNode();
116
+
117
+ t.logger.info('Starting fresh slot');
118
+ const [timestamp] = await t.ctx.cheatCodes.rollup.advanceToNextSlot();
119
+ t.ctx.dateProvider!.setTime(Number(timestamp) * 1000);
120
+ const startSlotTimestamp = BigInt(timestamp);
121
+
122
+ const { proposerIndexes, nodesToTurnOffTxGossip } = await getProposerIndexes(t, startSlotTimestamp);
123
+ t.logger.info(`Turning off tx gossip for nodes: ${nodesToTurnOffTxGossip.map(getNodePort)}`);
124
+ t.logger.info(`Sending txs to proposer nodes: ${proposerIndexes.map(getNodePort)}`);
125
+
126
+ // Replace the p2p node implementation of some of the nodes with a spy such that it does not store transactions that are gossiped to it
127
+ // Original implementation of `handleGossipedTx` will store received transactions in the tx pool.
128
+ // We chose the first 2 nodes that will be the proposers for the next few slots
129
+ for (const nodeIndex of nodesToTurnOffTxGossip) {
130
+ const logger = createLogger(`p2p:${getNodePort(nodeIndex)}`);
131
+ jest.spyOn((nodes[nodeIndex] as any).p2pClient.p2pService, 'handleGossipedTx').mockImplementation(((
132
+ payloadData: Buffer,
133
+ ) => {
134
+ const txHash = Tx.fromBuffer(payloadData).getTxHash();
135
+ logger.info(`Skipping storage of gossiped transaction ${txHash.toString()}`);
136
+ return Promise.resolve();
137
+ }) as any);
138
+ }
139
+
140
+ // We send the tx to the proposer nodes directly, ignoring the pxe and node in each context
141
+ // We cannot just call tx.send since they were created using a pxe wired to the first node which is now stopped
142
+ t.logger.info('Sending transactions through proposer nodes');
143
+ const submittedTxs = await Promise.all(
144
+ txBatches.map(async (batch, batchIndex) => {
145
+ const proposerNode = nodes[proposerIndexes[batchIndex]];
146
+ await Promise.all(
147
+ batch.map(async tx => {
148
+ try {
149
+ await proposerNode.sendTx(tx);
150
+ } catch (err) {
151
+ t.logger.error(`Error sending tx: ${err}`);
152
+ throw err;
153
+ }
154
+ }),
155
+ );
156
+ return batch.map(tx => ({ node: proposerNode, txHash: tx.getTxHash() }));
157
+ }),
158
+ );
159
+
160
+ t.logger.info('Waiting for all transactions to be mined');
161
+ await Promise.all(
162
+ submittedTxs.flatMap((batch, batchIndex) =>
163
+ batch.map(async (submittedTx, txIndex) => {
164
+ t.logger.info(`Waiting for tx ${batchIndex}-${txIndex} ${submittedTx.txHash.toString()} to be mined`);
165
+ await waitForTx(submittedTx.node, submittedTx.txHash, { timeout: WAIT_FOR_TX_TIMEOUT * 1.5 });
166
+ t.logger.info(`Tx ${batchIndex}-${txIndex} ${submittedTx.txHash.toString()} has been mined`);
167
+ }),
168
+ ),
169
+ );
170
+
171
+ t.logger.info('All transactions mined');
172
+
173
+ return nodes;
174
+ }
175
+
176
+ async function getProposerIndexes(t: P2PNetworkTest, startSlotTimestamp: bigint) {
177
+ // Get the nodes for the next set of slots
178
+ const rollupContract = new RollupContract(
179
+ t.ctx.deployL1ContractsValues.l1Client,
180
+ t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress,
181
+ );
182
+
183
+ const attesters = await rollupContract.getAttesters();
184
+ const startSlot = await rollupContract.getSlotAt(startSlotTimestamp);
185
+
186
+ const proposers = await Promise.all(
187
+ Array.from({ length: 3 }, async (_, i) => {
188
+ const slot = SlotNumber(startSlot + i);
189
+ const slotTimestamp = await rollupContract.getTimestampForSlot(slot);
190
+ return await rollupContract.getProposerAt(slotTimestamp);
191
+ }),
192
+ );
193
+ // Get the indexes of the nodes that are responsible for the next two slots
194
+ const proposerIndexes = proposers.map(proposer => attesters.findIndex(a => a.equals(proposer)));
195
+
196
+ if (proposerIndexes.some(i => i === -1)) {
197
+ throw new Error(
198
+ `Proposer index not found for proposer ` +
199
+ `(proposers=${proposers.map(p => p.toString()).join(',')}, indices=${proposerIndexes.join(',')})`,
200
+ );
201
+ }
202
+
203
+ const nodesToTurnOffTxGossip = Array.from({ length: NUM_VALIDATORS }, (_, i) => i).filter(
204
+ i => !proposerIndexes.includes(i),
205
+ );
206
+ return { proposerIndexes, nodesToTurnOffTxGossip };
207
+ }
@@ -56,7 +56,11 @@ export const submitTransactions = async (
56
56
  ): Promise<TxHash[]> => {
57
57
  const rpcConfig = getRpcConfig();
58
58
  rpcConfig.proverEnabled = false;
59
- const wallet = await TestWallet.create(node, { ...getPXEConfig(), proverEnabled: false }, { useLogSuffix: true });
59
+ const wallet = await TestWallet.create(
60
+ node,
61
+ { ...getPXEConfig(), proverEnabled: false },
62
+ { loggerActorLabel: 'pxe-tx' },
63
+ );
60
64
  const fundedAccountManager = await wallet.createSchnorrAccount(fundedAccount.secret, fundedAccount.salt);
61
65
  return submitTxsTo(wallet, fundedAccountManager.address, numTxs, logger);
62
66
  };
@@ -70,7 +74,11 @@ export async function prepareTransactions(
70
74
  const rpcConfig = getRpcConfig();
71
75
  rpcConfig.proverEnabled = false;
72
76
 
73
- const wallet = await TestWallet.create(node, { ...getPXEConfig(), proverEnabled: false }, { useLogSuffix: true });
77
+ const wallet = await TestWallet.create(
78
+ node,
79
+ { ...getPXEConfig(), proverEnabled: false },
80
+ { loggerActorLabel: 'pxe-tx' },
81
+ );
74
82
  const fundedAccountManager = await wallet.createSchnorrAccount(fundedAccount.secret, fundedAccount.salt);
75
83
 
76
84
  const testContractInstance = await getContractInstanceFromInstantiationParams(TestContractArtifact, {
@@ -198,7 +198,7 @@ export class FullProverTest {
198
198
  this.aztecNode,
199
199
  { proverEnabled: this.realProofs },
200
200
  undefined,
201
- true,
201
+ 'pxe-proven',
202
202
  );
203
203
  this.logger.debug(`Contract address ${this.fakeProofsAsset.address}`);
204
204
  await provenWallet.registerContract(this.fakeProofsAssetInstance, TokenContract.artifact);
@@ -0,0 +1,184 @@
1
+ import { EthAddress } from '@aztec/aztec.js/addresses';
2
+ import { Fr } from '@aztec/aztec.js/fields';
3
+ import type { Logger } from '@aztec/aztec.js/log';
4
+ import { SecretValue } from '@aztec/foundation/config';
5
+
6
+ import { Pool } from 'pg';
7
+ import { privateKeyToAccount } from 'viem/accounts';
8
+
9
+ /**
10
+ * Configuration for HA database connection
11
+ */
12
+ export interface HADatabaseConfig {
13
+ /** PostgreSQL connection URL */
14
+ databaseUrl: string;
15
+ /** Node ID for HA coordination */
16
+ nodeId: string;
17
+ /** Enable HA signing */
18
+ haSigningEnabled: boolean;
19
+ /** Polling interval in ms */
20
+ pollingIntervalMs: number;
21
+ /** Signing timeout in ms */
22
+ signingTimeoutMs: number;
23
+ /** Max stuck duties age in ms */
24
+ maxStuckDutiesAgeMs: number;
25
+ }
26
+
27
+ /**
28
+ * Get database configuration from environment variables
29
+ */
30
+ export function createHADatabaseConfig(nodeId: string): HADatabaseConfig {
31
+ const databaseUrl = process.env.DATABASE_URL || 'postgresql://aztec:aztec@localhost:5432/aztec_ha_test';
32
+
33
+ return {
34
+ databaseUrl,
35
+ nodeId,
36
+ haSigningEnabled: true,
37
+ pollingIntervalMs: 100,
38
+ signingTimeoutMs: 3000,
39
+ maxStuckDutiesAgeMs: 72000,
40
+ };
41
+ }
42
+
43
+ /**
44
+ * Setup PostgreSQL database connection pool for HA tests
45
+ *
46
+ * Note: Database migrations should be run separately before starting tests,
47
+ * either via docker-compose entrypoint or manually with: aztec migrate-ha-db up
48
+ */
49
+ export function setupHADatabase(databaseUrl: string, logger?: Logger): Pool {
50
+ try {
51
+ // Create connection pool for test usage
52
+ // Migrations are already run by docker-compose entrypoint before tests start
53
+ const pool = new Pool({ connectionString: databaseUrl });
54
+
55
+ logger?.info('Connected to HA database (migrations should already be applied)');
56
+
57
+ return pool;
58
+ } catch (error) {
59
+ logger?.error(`Failed to connect to HA database: ${error}`);
60
+ throw error;
61
+ }
62
+ }
63
+
64
+ /**
65
+ * Clean up HA database - drop all tables
66
+ * Use this between tests to ensure clean state
67
+ */
68
+ export async function cleanupHADatabase(pool: Pool, logger?: Logger): Promise<void> {
69
+ try {
70
+ // Drop all HA tables
71
+ await pool.query('DROP TABLE IF EXISTS validator_duties CASCADE');
72
+ await pool.query('DROP TABLE IF EXISTS slashing_protection CASCADE');
73
+ await pool.query('DROP TABLE IF EXISTS schema_version CASCADE');
74
+
75
+ logger?.info('HA database cleaned up successfully');
76
+ } catch (error) {
77
+ logger?.error(`Failed to cleanup HA database: ${error}`);
78
+ throw error;
79
+ }
80
+ }
81
+
82
+ /**
83
+ * Query validator duties from the database
84
+ */
85
+ export async function getValidatorDuties(
86
+ pool: Pool,
87
+ slot: bigint,
88
+ dutyType?: 'ATTESTATION' | 'BLOCK_PROPOSAL' | 'GOVERNANCE_VOTE' | 'SLASHING_VOTE',
89
+ ): Promise<
90
+ Array<{
91
+ slot: string;
92
+ dutyType: string;
93
+ validatorAddress: string;
94
+ nodeId: string;
95
+ startedAt: Date;
96
+ completedAt: Date | undefined;
97
+ }>
98
+ > {
99
+ const query = dutyType
100
+ ? 'SELECT slot, duty_type, validator_address, node_id, started_at, completed_at FROM validator_duties WHERE slot = $1 AND duty_type = $2 ORDER BY started_at'
101
+ : 'SELECT slot, duty_type, validator_address, node_id, started_at, completed_at FROM validator_duties WHERE slot = $1 ORDER BY started_at';
102
+
103
+ const params = dutyType ? [slot.toString(), dutyType] : [slot.toString()];
104
+
105
+ const result = await pool.query<{
106
+ slot: string;
107
+ duty_type: string;
108
+ validator_address: string;
109
+ node_id: string;
110
+ started_at: Date;
111
+ completed_at: Date | undefined;
112
+ }>(query, params);
113
+
114
+ return result.rows.map(row => ({
115
+ slot: row.slot,
116
+ dutyType: row.duty_type,
117
+ validatorAddress: row.validator_address,
118
+ nodeId: row.node_id,
119
+ startedAt: row.started_at,
120
+ completedAt: row.completed_at,
121
+ }));
122
+ }
123
+
124
+ /**
125
+ * Convert private keys to Ethereum addresses
126
+ */
127
+ export function getAddressesFromPrivateKeys(privateKeys: `0x${string}`[]): string[] {
128
+ return privateKeys.map(pk => {
129
+ const account = privateKeyToAccount(pk);
130
+ return account.address;
131
+ });
132
+ }
133
+
134
+ /**
135
+ * Create initial validators from private keys for L1 contract deployment
136
+ */
137
+ export function createInitialValidatorsFromPrivateKeys(attesterPrivateKeys: `0x${string}`[]): Array<{
138
+ attester: EthAddress;
139
+ withdrawer: EthAddress;
140
+ privateKey: `0x${string}`;
141
+ bn254SecretKey: SecretValue<bigint>;
142
+ }> {
143
+ return attesterPrivateKeys.map(pk => {
144
+ const account = privateKeyToAccount(pk);
145
+ return {
146
+ attester: EthAddress.fromString(account.address),
147
+ withdrawer: EthAddress.fromString(account.address),
148
+ privateKey: pk,
149
+ bn254SecretKey: new SecretValue(Fr.random().toBigInt()),
150
+ };
151
+ });
152
+ }
153
+
154
+ /**
155
+ * Verify no duplicate attestations per validator (HA coordination check)
156
+ * Groups duties by validator address and verifies each validator attested exactly once
157
+ */
158
+ export function verifyNoDuplicateAttestations(
159
+ attestationDuties: Array<{
160
+ validatorAddress: string;
161
+ nodeId: string;
162
+ completedAt: Date | undefined;
163
+ }>,
164
+ logger?: Logger,
165
+ ): Map<string, typeof attestationDuties> {
166
+ const dutiesByValidator = new Map<string, typeof attestationDuties>();
167
+ for (const duty of attestationDuties) {
168
+ const existing = dutiesByValidator.get(duty.validatorAddress) || [];
169
+ existing.push(duty);
170
+ dutiesByValidator.set(duty.validatorAddress, existing);
171
+ }
172
+
173
+ for (const [validatorAddress, validatorDuties] of dutiesByValidator.entries()) {
174
+ if (validatorDuties.length !== 1) {
175
+ throw new Error(`Validator ${validatorAddress} attested ${validatorDuties.length} times (expected exactly once)`);
176
+ }
177
+ if (!validatorDuties[0].completedAt) {
178
+ throw new Error(`Validator ${validatorAddress} attestation duty not completed`);
179
+ }
180
+ logger?.info(`Validator ${validatorAddress} attested once via node ${validatorDuties[0].nodeId}`);
181
+ }
182
+
183
+ return dutiesByValidator;
184
+ }
@@ -1,4 +1,5 @@
1
1
  export * from './fixtures.js';
2
+ export * from './ha_setup.js';
2
3
  export * from './logging.js';
3
4
  export * from './utils.js';
4
5
  export * from './token_utils.js';
@@ -41,7 +41,7 @@ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
41
41
  import { SecretValue } from '@aztec/foundation/config';
42
42
  import { randomBytes } from '@aztec/foundation/crypto/random';
43
43
  import { tryRmDir } from '@aztec/foundation/fs';
44
- import { withLogNameSuffix } from '@aztec/foundation/log';
44
+ import { withLoggerBindings } from '@aztec/foundation/log/server';
45
45
  import { retryUntil } from '@aztec/foundation/retry';
46
46
  import { sleep } from '@aztec/foundation/sleep';
47
47
  import { DateProvider, TestDateProvider } from '@aztec/foundation/timer';
@@ -125,14 +125,14 @@ export async function setupSharedBlobStorage(config: { dataDirectory?: string }
125
125
  * @param aztecNode - An instance of Aztec Node.
126
126
  * @param opts - Partial configuration for the PXE.
127
127
  * @param logger - The logger to be used.
128
- * @param useLogSuffix - Whether to add a randomly generated suffix to the PXE debug logs.
128
+ * @param actor - Actor label to include in log output (e.g., 'pxe-test').
129
129
  * @returns A test wallet, logger and teardown function.
130
130
  */
131
131
  export async function setupPXEAndGetWallet(
132
132
  aztecNode: AztecNode,
133
133
  opts: Partial<PXEConfig> = {},
134
134
  logger = getLogger(),
135
- useLogSuffix = false,
135
+ actor?: string,
136
136
  ): Promise<{
137
137
  wallet: TestWallet;
138
138
  logger: Logger;
@@ -150,9 +150,7 @@ export async function setupPXEAndGetWallet(
150
150
 
151
151
  const teardown = configuredDataDirectory ? () => Promise.resolve() : () => tryRmDir(PXEConfig.dataDirectory!);
152
152
 
153
- const wallet = await TestWallet.create(aztecNode, PXEConfig, {
154
- useLogSuffix,
155
- });
153
+ const wallet = await TestWallet.create(aztecNode, PXEConfig, { loggerActorLabel: actor });
156
154
 
157
155
  return {
158
156
  wallet,
@@ -392,7 +390,7 @@ export async function setup(
392
390
  const res = await startAnvil({
393
391
  l1BlockTime: opts.ethereumSlotDuration,
394
392
  accounts: opts.anvilAccounts,
395
- port: opts.anvilPort,
393
+ port: opts.anvilPort ?? (process.env.ANVIL_PORT ? parseInt(process.env.ANVIL_PORT) : undefined),
396
394
  });
397
395
  anvil = res.anvil;
398
396
  config.l1RpcUrls = [res.rpcUrl];
@@ -574,10 +572,12 @@ export async function setup(
574
572
  }
575
573
  }
576
574
 
577
- const aztecNodeService = await AztecNodeService.createAndSync(
578
- config,
579
- { dateProvider, telemetry: telemetryClient, p2pClientDeps, logger: createLogger('node:MAIN-aztec-node') },
580
- { prefilledPublicData },
575
+ const aztecNodeService = await withLoggerBindings({ actor: 'node-0' }, () =>
576
+ AztecNodeService.createAndSync(
577
+ config,
578
+ { dateProvider, telemetry: telemetryClient, p2pClientDeps },
579
+ { prefilledPublicData },
580
+ ),
581
581
  );
582
582
  const sequencerClient = aztecNodeService.getSequencer();
583
583
 
@@ -611,7 +611,7 @@ export async function setup(
611
611
  pxeConfig.dataDirectory = path.join(directoryToCleanup, randomBytes(8).toString('hex'));
612
612
  // For tests we only want proving enabled if specifically requested
613
613
  pxeConfig.proverEnabled = !!pxeOpts.proverEnabled;
614
- const wallet = await TestWallet.create(aztecNodeService, pxeConfig);
614
+ const wallet = await TestWallet.create(aztecNodeService, pxeConfig, { loggerActorLabel: 'pxe-0' });
615
615
 
616
616
  if (opts.walletMinFeePadding !== undefined) {
617
617
  wallet.setMinFeePadding(opts.walletMinFeePadding);
@@ -797,7 +797,7 @@ export function createAndSyncProverNode(
797
797
  prefilledPublicData: PublicDataTreeLeaf[] = [],
798
798
  proverNodeDeps: ProverNodeDeps = {},
799
799
  ) {
800
- return withLogNameSuffix('prover-node', async () => {
800
+ return withLoggerBindings({ actor: 'prover-0' }, async () => {
801
801
  const aztecNodeTxProvider = aztecNode && {
802
802
  getTxByHash: aztecNode.getTxByHash.bind(aztecNode),
803
803
  getTxsByHash: aztecNode.getTxsByHash.bind(aztecNode),
@@ -4,14 +4,13 @@
4
4
  import { type AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node';
5
5
  import { range } from '@aztec/foundation/array';
6
6
  import { SecretValue } from '@aztec/foundation/config';
7
- import { addLogNameHandler, removeLogNameHandler } from '@aztec/foundation/log';
7
+ import { withLoggerBindings } from '@aztec/foundation/log/server';
8
8
  import { bufferToHex } from '@aztec/foundation/string';
9
9
  import type { DateProvider } from '@aztec/foundation/timer';
10
10
  import type { ProverNodeConfig, ProverNodeDeps } from '@aztec/prover-node';
11
11
  import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees';
12
12
 
13
13
  import getPort from 'get-port';
14
- import { AsyncLocalStorage } from 'node:async_hooks';
15
14
 
16
15
  import { TEST_PEER_CHECK_INTERVAL_MS } from './fixtures.js';
17
16
  import { createAndSyncProverNode, getPrivateKeyFromIndex } from './utils.js';
@@ -22,6 +21,11 @@ import { getEndToEndTestTelemetryClient } from './with_telemetry_utils.js';
22
21
  // to avoid running validators with the same key
23
22
  export const ATTESTER_PRIVATE_KEYS_START_INDEX = 3;
24
23
 
24
+ // Global counters for actor naming (start at 1)
25
+ let validatorCounter = 1;
26
+ let nodeCounter = 1;
27
+ let proverCounter = 1;
28
+
25
29
  export function generatePrivateKeys(startIndex: number, numberOfKeys: number): `0x${string}`[] {
26
30
  const privateKeys: `0x${string}`[] = [];
27
31
  // Do not start from 0 as it is used during setup
@@ -44,10 +48,6 @@ export async function createNodes(
44
48
  validatorsPerNode = 1,
45
49
  ): Promise<AztecNodeService[]> {
46
50
  const nodePromises: Promise<AztecNodeService>[] = [];
47
- const loggerIdStorage = new AsyncLocalStorage<string>();
48
- const logNameHandler = (module: string) =>
49
- loggerIdStorage.getStore() ? `${module}:${loggerIdStorage.getStore()}` : module;
50
- addLogNameHandler(logNameHandler);
51
51
 
52
52
  for (let i = 0; i < numNodes; i++) {
53
53
  const index = indexOffset + i;
@@ -69,7 +69,6 @@ export async function createNodes(
69
69
  prefilledPublicData,
70
70
  dataDir,
71
71
  metricsPort,
72
- loggerIdStorage,
73
72
  );
74
73
  nodePromises.push(nodePromise);
75
74
  }
@@ -81,7 +80,6 @@ export async function createNodes(
81
80
  throw new Error('Sequencer not found');
82
81
  }
83
82
 
84
- removeLogNameHandler(logNameHandler);
85
83
  return nodes;
86
84
  }
87
85
 
@@ -95,9 +93,9 @@ export async function createNode(
95
93
  prefilledPublicData?: PublicDataTreeLeaf[],
96
94
  dataDirectory?: string,
97
95
  metricsPort?: number,
98
- loggerIdStorage?: AsyncLocalStorage<string>,
99
96
  ) {
100
- const createNode = async () => {
97
+ const actorIndex = validatorCounter++;
98
+ return await withLoggerBindings({ actor: `validator-${actorIndex}` }, async () => {
101
99
  const validatorConfig = await createValidatorConfig(config, bootstrapNode, tcpPort, addressIndex, dataDirectory);
102
100
  const telemetry = await getEndToEndTestTelemetryClient(metricsPort);
103
101
  return await AztecNodeService.createAndSync(
@@ -105,8 +103,7 @@ export async function createNode(
105
103
  { telemetry, dateProvider },
106
104
  { prefilledPublicData, dontStartSequencer: config.dontStartSequencer },
107
105
  );
108
- };
109
- return loggerIdStorage ? await loggerIdStorage.run(tcpPort.toString(), createNode) : createNode();
106
+ });
110
107
  }
111
108
 
112
109
  /** Creates a P2P enabled instance of Aztec Node Service without a validator */
@@ -118,9 +115,9 @@ export async function createNonValidatorNode(
118
115
  prefilledPublicData?: PublicDataTreeLeaf[],
119
116
  dataDirectory?: string,
120
117
  metricsPort?: number,
121
- loggerIdStorage?: AsyncLocalStorage<string>,
122
118
  ) {
123
- const createNode = async () => {
119
+ const actorIndex = nodeCounter++;
120
+ return await withLoggerBindings({ actor: `node-${actorIndex}` }, async () => {
124
121
  const p2pConfig = await createP2PConfig(baseConfig, bootstrapNode, tcpPort, dataDirectory);
125
122
  const config: AztecNodeConfig = {
126
123
  ...p2pConfig,
@@ -130,8 +127,7 @@ export async function createNonValidatorNode(
130
127
  };
131
128
  const telemetry = await getEndToEndTestTelemetryClient(metricsPort);
132
129
  return await AztecNodeService.createAndSync(config, { telemetry, dateProvider }, { prefilledPublicData });
133
- };
134
- return loggerIdStorage ? await loggerIdStorage.run(tcpPort.toString(), createNode) : createNode();
130
+ });
135
131
  }
136
132
 
137
133
  export async function createProverNode(
@@ -143,9 +139,9 @@ export async function createProverNode(
143
139
  prefilledPublicData?: PublicDataTreeLeaf[],
144
140
  dataDirectory?: string,
145
141
  metricsPort?: number,
146
- loggerIdStorage?: AsyncLocalStorage<string>,
147
142
  ) {
148
- const createProverNode = async () => {
143
+ const actorIndex = proverCounter++;
144
+ return await withLoggerBindings({ actor: `prover-${actorIndex}` }, async () => {
149
145
  const proverNodePrivateKey = getPrivateKeyFromIndex(ATTESTER_PRIVATE_KEYS_START_INDEX + addressIndex)!;
150
146
  const telemetry = await getEndToEndTestTelemetryClient(metricsPort);
151
147
 
@@ -165,8 +161,7 @@ export async function createProverNode(
165
161
  prefilledPublicData,
166
162
  { ...proverNodeDeps, telemetry },
167
163
  );
168
- };
169
- return loggerIdStorage ? await loggerIdStorage.run(tcpPort.toString(), createProverNode) : createProverNode();
164
+ });
170
165
  }
171
166
 
172
167
  export async function createP2PConfig(