@aztec/end-to-end 0.0.1-commit.2ed92850 → 0.0.1-commit.43597cc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts +1 -1
- package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts.map +1 -1
- package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.js +3 -2
- package/dest/e2e_epochs/epochs_test.d.ts +7 -1
- package/dest/e2e_epochs/epochs_test.d.ts.map +1 -1
- package/dest/e2e_epochs/epochs_test.js +30 -10
- package/dest/e2e_l1_publisher/write_json.d.ts +3 -2
- package/dest/e2e_l1_publisher/write_json.d.ts.map +1 -1
- package/dest/e2e_l1_publisher/write_json.js +1 -7
- package/dest/e2e_p2p/reqresp/utils.d.ts +22 -0
- package/dest/e2e_p2p/reqresp/utils.d.ts.map +1 -0
- package/dest/e2e_p2p/reqresp/utils.js +153 -0
- package/dest/e2e_p2p/shared.d.ts +1 -1
- package/dest/e2e_p2p/shared.d.ts.map +1 -1
- package/dest/e2e_p2p/shared.js +5 -2
- package/dest/fixtures/e2e_prover_test.js +1 -1
- package/dest/fixtures/ha_setup.d.ts +71 -0
- package/dest/fixtures/ha_setup.d.ts.map +1 -0
- package/dest/fixtures/ha_setup.js +114 -0
- package/dest/fixtures/index.d.ts +2 -1
- package/dest/fixtures/index.d.ts.map +1 -1
- package/dest/fixtures/index.js +1 -0
- package/dest/fixtures/setup.d.ts +3 -3
- package/dest/fixtures/setup.d.ts.map +1 -1
- package/dest/fixtures/setup.js +20 -15
- package/dest/fixtures/setup_p2p_test.d.ts +12 -8
- package/dest/fixtures/setup_p2p_test.d.ts.map +1 -1
- package/dest/fixtures/setup_p2p_test.js +29 -21
- package/dest/shared/uniswap_l1_l2.d.ts +1 -1
- package/dest/shared/uniswap_l1_l2.d.ts.map +1 -1
- package/dest/shared/uniswap_l1_l2.js +7 -5
- package/dest/spartan/tx_metrics.d.ts +35 -1
- package/dest/spartan/tx_metrics.d.ts.map +1 -1
- package/dest/spartan/tx_metrics.js +150 -0
- package/dest/spartan/utils/config.d.ts +4 -1
- package/dest/spartan/utils/config.d.ts.map +1 -1
- package/dest/spartan/utils/config.js +2 -1
- package/dest/spartan/utils/index.d.ts +4 -4
- package/dest/spartan/utils/index.d.ts.map +1 -1
- package/dest/spartan/utils/index.js +2 -2
- package/dest/spartan/utils/k8s.d.ts +29 -1
- package/dest/spartan/utils/k8s.d.ts.map +1 -1
- package/dest/spartan/utils/k8s.js +118 -0
- package/dest/spartan/utils/nodes.d.ts +11 -1
- package/dest/spartan/utils/nodes.d.ts.map +1 -1
- package/dest/spartan/utils/nodes.js +198 -27
- package/dest/spartan/utils/scripts.d.ts +18 -4
- package/dest/spartan/utils/scripts.d.ts.map +1 -1
- package/dest/spartan/utils/scripts.js +19 -4
- package/package.json +42 -39
- package/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts +3 -4
- package/src/e2e_epochs/epochs_test.ts +32 -10
- package/src/e2e_l1_publisher/write_json.ts +1 -6
- package/src/e2e_p2p/reqresp/utils.ts +207 -0
- package/src/e2e_p2p/shared.ts +11 -2
- package/src/fixtures/dumps/epoch_proof_result.json +1 -1
- package/src/fixtures/e2e_prover_test.ts +1 -1
- package/src/fixtures/ha_setup.ts +184 -0
- package/src/fixtures/index.ts +1 -0
- package/src/fixtures/setup.ts +13 -13
- package/src/fixtures/setup_p2p_test.ts +31 -27
- package/src/shared/uniswap_l1_l2.ts +7 -9
- package/src/spartan/tx_metrics.ts +126 -0
- package/src/spartan/utils/config.ts +1 -0
- package/src/spartan/utils/index.ts +3 -1
- package/src/spartan/utils/k8s.ts +152 -0
- package/src/spartan/utils/nodes.ts +239 -24
- package/src/spartan/utils/scripts.ts +43 -7
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import { EthAddress } from '@aztec/aztec.js/addresses';
|
|
2
|
+
import { Fr } from '@aztec/aztec.js/fields';
|
|
3
|
+
import type { Logger } from '@aztec/aztec.js/log';
|
|
4
|
+
import { SecretValue } from '@aztec/foundation/config';
|
|
5
|
+
|
|
6
|
+
import { Pool } from 'pg';
|
|
7
|
+
import { privateKeyToAccount } from 'viem/accounts';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Configuration for HA database connection
|
|
11
|
+
*/
|
|
12
|
+
export interface HADatabaseConfig {
|
|
13
|
+
/** PostgreSQL connection URL */
|
|
14
|
+
databaseUrl: string;
|
|
15
|
+
/** Node ID for HA coordination */
|
|
16
|
+
nodeId: string;
|
|
17
|
+
/** Enable HA signing */
|
|
18
|
+
haSigningEnabled: boolean;
|
|
19
|
+
/** Polling interval in ms */
|
|
20
|
+
pollingIntervalMs: number;
|
|
21
|
+
/** Signing timeout in ms */
|
|
22
|
+
signingTimeoutMs: number;
|
|
23
|
+
/** Max stuck duties age in ms */
|
|
24
|
+
maxStuckDutiesAgeMs: number;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Get database configuration from environment variables
|
|
29
|
+
*/
|
|
30
|
+
export function createHADatabaseConfig(nodeId: string): HADatabaseConfig {
|
|
31
|
+
const databaseUrl = process.env.DATABASE_URL || 'postgresql://aztec:aztec@localhost:5432/aztec_ha_test';
|
|
32
|
+
|
|
33
|
+
return {
|
|
34
|
+
databaseUrl,
|
|
35
|
+
nodeId,
|
|
36
|
+
haSigningEnabled: true,
|
|
37
|
+
pollingIntervalMs: 100,
|
|
38
|
+
signingTimeoutMs: 3000,
|
|
39
|
+
maxStuckDutiesAgeMs: 72000,
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Setup PostgreSQL database connection pool for HA tests
|
|
45
|
+
*
|
|
46
|
+
* Note: Database migrations should be run separately before starting tests,
|
|
47
|
+
* either via docker-compose entrypoint or manually with: aztec migrate-ha-db up
|
|
48
|
+
*/
|
|
49
|
+
export function setupHADatabase(databaseUrl: string, logger?: Logger): Pool {
|
|
50
|
+
try {
|
|
51
|
+
// Create connection pool for test usage
|
|
52
|
+
// Migrations are already run by docker-compose entrypoint before tests start
|
|
53
|
+
const pool = new Pool({ connectionString: databaseUrl });
|
|
54
|
+
|
|
55
|
+
logger?.info('Connected to HA database (migrations should already be applied)');
|
|
56
|
+
|
|
57
|
+
return pool;
|
|
58
|
+
} catch (error) {
|
|
59
|
+
logger?.error(`Failed to connect to HA database: ${error}`);
|
|
60
|
+
throw error;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Clean up HA database - drop all tables
|
|
66
|
+
* Use this between tests to ensure clean state
|
|
67
|
+
*/
|
|
68
|
+
export async function cleanupHADatabase(pool: Pool, logger?: Logger): Promise<void> {
|
|
69
|
+
try {
|
|
70
|
+
// Drop all HA tables
|
|
71
|
+
await pool.query('DROP TABLE IF EXISTS validator_duties CASCADE');
|
|
72
|
+
await pool.query('DROP TABLE IF EXISTS slashing_protection CASCADE');
|
|
73
|
+
await pool.query('DROP TABLE IF EXISTS schema_version CASCADE');
|
|
74
|
+
|
|
75
|
+
logger?.info('HA database cleaned up successfully');
|
|
76
|
+
} catch (error) {
|
|
77
|
+
logger?.error(`Failed to cleanup HA database: ${error}`);
|
|
78
|
+
throw error;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Query validator duties from the database
|
|
84
|
+
*/
|
|
85
|
+
export async function getValidatorDuties(
|
|
86
|
+
pool: Pool,
|
|
87
|
+
slot: bigint,
|
|
88
|
+
dutyType?: 'ATTESTATION' | 'BLOCK_PROPOSAL' | 'GOVERNANCE_VOTE' | 'SLASHING_VOTE',
|
|
89
|
+
): Promise<
|
|
90
|
+
Array<{
|
|
91
|
+
slot: string;
|
|
92
|
+
dutyType: string;
|
|
93
|
+
validatorAddress: string;
|
|
94
|
+
nodeId: string;
|
|
95
|
+
startedAt: Date;
|
|
96
|
+
completedAt: Date | undefined;
|
|
97
|
+
}>
|
|
98
|
+
> {
|
|
99
|
+
const query = dutyType
|
|
100
|
+
? 'SELECT slot, duty_type, validator_address, node_id, started_at, completed_at FROM validator_duties WHERE slot = $1 AND duty_type = $2 ORDER BY started_at'
|
|
101
|
+
: 'SELECT slot, duty_type, validator_address, node_id, started_at, completed_at FROM validator_duties WHERE slot = $1 ORDER BY started_at';
|
|
102
|
+
|
|
103
|
+
const params = dutyType ? [slot.toString(), dutyType] : [slot.toString()];
|
|
104
|
+
|
|
105
|
+
const result = await pool.query<{
|
|
106
|
+
slot: string;
|
|
107
|
+
duty_type: string;
|
|
108
|
+
validator_address: string;
|
|
109
|
+
node_id: string;
|
|
110
|
+
started_at: Date;
|
|
111
|
+
completed_at: Date | undefined;
|
|
112
|
+
}>(query, params);
|
|
113
|
+
|
|
114
|
+
return result.rows.map(row => ({
|
|
115
|
+
slot: row.slot,
|
|
116
|
+
dutyType: row.duty_type,
|
|
117
|
+
validatorAddress: row.validator_address,
|
|
118
|
+
nodeId: row.node_id,
|
|
119
|
+
startedAt: row.started_at,
|
|
120
|
+
completedAt: row.completed_at,
|
|
121
|
+
}));
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Convert private keys to Ethereum addresses
|
|
126
|
+
*/
|
|
127
|
+
export function getAddressesFromPrivateKeys(privateKeys: `0x${string}`[]): string[] {
|
|
128
|
+
return privateKeys.map(pk => {
|
|
129
|
+
const account = privateKeyToAccount(pk);
|
|
130
|
+
return account.address;
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Create initial validators from private keys for L1 contract deployment
|
|
136
|
+
*/
|
|
137
|
+
export function createInitialValidatorsFromPrivateKeys(attesterPrivateKeys: `0x${string}`[]): Array<{
|
|
138
|
+
attester: EthAddress;
|
|
139
|
+
withdrawer: EthAddress;
|
|
140
|
+
privateKey: `0x${string}`;
|
|
141
|
+
bn254SecretKey: SecretValue<bigint>;
|
|
142
|
+
}> {
|
|
143
|
+
return attesterPrivateKeys.map(pk => {
|
|
144
|
+
const account = privateKeyToAccount(pk);
|
|
145
|
+
return {
|
|
146
|
+
attester: EthAddress.fromString(account.address),
|
|
147
|
+
withdrawer: EthAddress.fromString(account.address),
|
|
148
|
+
privateKey: pk,
|
|
149
|
+
bn254SecretKey: new SecretValue(Fr.random().toBigInt()),
|
|
150
|
+
};
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Verify no duplicate attestations per validator (HA coordination check)
|
|
156
|
+
* Groups duties by validator address and verifies each validator attested exactly once
|
|
157
|
+
*/
|
|
158
|
+
export function verifyNoDuplicateAttestations(
|
|
159
|
+
attestationDuties: Array<{
|
|
160
|
+
validatorAddress: string;
|
|
161
|
+
nodeId: string;
|
|
162
|
+
completedAt: Date | undefined;
|
|
163
|
+
}>,
|
|
164
|
+
logger?: Logger,
|
|
165
|
+
): Map<string, typeof attestationDuties> {
|
|
166
|
+
const dutiesByValidator = new Map<string, typeof attestationDuties>();
|
|
167
|
+
for (const duty of attestationDuties) {
|
|
168
|
+
const existing = dutiesByValidator.get(duty.validatorAddress) || [];
|
|
169
|
+
existing.push(duty);
|
|
170
|
+
dutiesByValidator.set(duty.validatorAddress, existing);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
for (const [validatorAddress, validatorDuties] of dutiesByValidator.entries()) {
|
|
174
|
+
if (validatorDuties.length !== 1) {
|
|
175
|
+
throw new Error(`Validator ${validatorAddress} attested ${validatorDuties.length} times (expected exactly once)`);
|
|
176
|
+
}
|
|
177
|
+
if (!validatorDuties[0].completedAt) {
|
|
178
|
+
throw new Error(`Validator ${validatorAddress} attestation duty not completed`);
|
|
179
|
+
}
|
|
180
|
+
logger?.info(`Validator ${validatorAddress} attested once via node ${validatorDuties[0].nodeId}`);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
return dutiesByValidator;
|
|
184
|
+
}
|
package/src/fixtures/index.ts
CHANGED
package/src/fixtures/setup.ts
CHANGED
|
@@ -41,7 +41,7 @@ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
|
41
41
|
import { SecretValue } from '@aztec/foundation/config';
|
|
42
42
|
import { randomBytes } from '@aztec/foundation/crypto/random';
|
|
43
43
|
import { tryRmDir } from '@aztec/foundation/fs';
|
|
44
|
-
import {
|
|
44
|
+
import { withLoggerBindings } from '@aztec/foundation/log/server';
|
|
45
45
|
import { retryUntil } from '@aztec/foundation/retry';
|
|
46
46
|
import { sleep } from '@aztec/foundation/sleep';
|
|
47
47
|
import { DateProvider, TestDateProvider } from '@aztec/foundation/timer';
|
|
@@ -125,14 +125,14 @@ export async function setupSharedBlobStorage(config: { dataDirectory?: string }
|
|
|
125
125
|
* @param aztecNode - An instance of Aztec Node.
|
|
126
126
|
* @param opts - Partial configuration for the PXE.
|
|
127
127
|
* @param logger - The logger to be used.
|
|
128
|
-
* @param
|
|
128
|
+
* @param actor - Actor label to include in log output (e.g., 'pxe-test').
|
|
129
129
|
* @returns A test wallet, logger and teardown function.
|
|
130
130
|
*/
|
|
131
131
|
export async function setupPXEAndGetWallet(
|
|
132
132
|
aztecNode: AztecNode,
|
|
133
133
|
opts: Partial<PXEConfig> = {},
|
|
134
134
|
logger = getLogger(),
|
|
135
|
-
|
|
135
|
+
actor?: string,
|
|
136
136
|
): Promise<{
|
|
137
137
|
wallet: TestWallet;
|
|
138
138
|
logger: Logger;
|
|
@@ -150,9 +150,7 @@ export async function setupPXEAndGetWallet(
|
|
|
150
150
|
|
|
151
151
|
const teardown = configuredDataDirectory ? () => Promise.resolve() : () => tryRmDir(PXEConfig.dataDirectory!);
|
|
152
152
|
|
|
153
|
-
const wallet = await TestWallet.create(aztecNode, PXEConfig, {
|
|
154
|
-
useLogSuffix,
|
|
155
|
-
});
|
|
153
|
+
const wallet = await TestWallet.create(aztecNode, PXEConfig, { loggerActorLabel: actor });
|
|
156
154
|
|
|
157
155
|
return {
|
|
158
156
|
wallet,
|
|
@@ -392,7 +390,7 @@ export async function setup(
|
|
|
392
390
|
const res = await startAnvil({
|
|
393
391
|
l1BlockTime: opts.ethereumSlotDuration,
|
|
394
392
|
accounts: opts.anvilAccounts,
|
|
395
|
-
port: opts.anvilPort,
|
|
393
|
+
port: opts.anvilPort ?? (process.env.ANVIL_PORT ? parseInt(process.env.ANVIL_PORT) : undefined),
|
|
396
394
|
});
|
|
397
395
|
anvil = res.anvil;
|
|
398
396
|
config.l1RpcUrls = [res.rpcUrl];
|
|
@@ -574,10 +572,12 @@ export async function setup(
|
|
|
574
572
|
}
|
|
575
573
|
}
|
|
576
574
|
|
|
577
|
-
const aztecNodeService = await
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
575
|
+
const aztecNodeService = await withLoggerBindings({ actor: 'node-0' }, () =>
|
|
576
|
+
AztecNodeService.createAndSync(
|
|
577
|
+
config,
|
|
578
|
+
{ dateProvider, telemetry: telemetryClient, p2pClientDeps },
|
|
579
|
+
{ prefilledPublicData },
|
|
580
|
+
),
|
|
581
581
|
);
|
|
582
582
|
const sequencerClient = aztecNodeService.getSequencer();
|
|
583
583
|
|
|
@@ -611,7 +611,7 @@ export async function setup(
|
|
|
611
611
|
pxeConfig.dataDirectory = path.join(directoryToCleanup, randomBytes(8).toString('hex'));
|
|
612
612
|
// For tests we only want proving enabled if specifically requested
|
|
613
613
|
pxeConfig.proverEnabled = !!pxeOpts.proverEnabled;
|
|
614
|
-
const wallet = await TestWallet.create(aztecNodeService, pxeConfig);
|
|
614
|
+
const wallet = await TestWallet.create(aztecNodeService, pxeConfig, { loggerActorLabel: 'pxe-0' });
|
|
615
615
|
|
|
616
616
|
if (opts.walletMinFeePadding !== undefined) {
|
|
617
617
|
wallet.setMinFeePadding(opts.walletMinFeePadding);
|
|
@@ -797,7 +797,7 @@ export function createAndSyncProverNode(
|
|
|
797
797
|
prefilledPublicData: PublicDataTreeLeaf[] = [],
|
|
798
798
|
proverNodeDeps: ProverNodeDeps = {},
|
|
799
799
|
) {
|
|
800
|
-
return
|
|
800
|
+
return withLoggerBindings({ actor: 'prover-0' }, async () => {
|
|
801
801
|
const aztecNodeTxProvider = aztecNode && {
|
|
802
802
|
getTxByHash: aztecNode.getTxByHash.bind(aztecNode),
|
|
803
803
|
getTxsByHash: aztecNode.getTxsByHash.bind(aztecNode),
|
|
@@ -4,14 +4,13 @@
|
|
|
4
4
|
import { type AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node';
|
|
5
5
|
import { range } from '@aztec/foundation/array';
|
|
6
6
|
import { SecretValue } from '@aztec/foundation/config';
|
|
7
|
-
import {
|
|
7
|
+
import { withLoggerBindings } from '@aztec/foundation/log/server';
|
|
8
8
|
import { bufferToHex } from '@aztec/foundation/string';
|
|
9
9
|
import type { DateProvider } from '@aztec/foundation/timer';
|
|
10
10
|
import type { ProverNodeConfig, ProverNodeDeps } from '@aztec/prover-node';
|
|
11
11
|
import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees';
|
|
12
12
|
|
|
13
13
|
import getPort from 'get-port';
|
|
14
|
-
import { AsyncLocalStorage } from 'node:async_hooks';
|
|
15
14
|
|
|
16
15
|
import { TEST_PEER_CHECK_INTERVAL_MS } from './fixtures.js';
|
|
17
16
|
import { createAndSyncProverNode, getPrivateKeyFromIndex } from './utils.js';
|
|
@@ -22,6 +21,11 @@ import { getEndToEndTestTelemetryClient } from './with_telemetry_utils.js';
|
|
|
22
21
|
// to avoid running validators with the same key
|
|
23
22
|
export const ATTESTER_PRIVATE_KEYS_START_INDEX = 3;
|
|
24
23
|
|
|
24
|
+
// Global counters for actor naming (start at 1)
|
|
25
|
+
let validatorCounter = 1;
|
|
26
|
+
let nodeCounter = 1;
|
|
27
|
+
let proverCounter = 1;
|
|
28
|
+
|
|
25
29
|
export function generatePrivateKeys(startIndex: number, numberOfKeys: number): `0x${string}`[] {
|
|
26
30
|
const privateKeys: `0x${string}`[] = [];
|
|
27
31
|
// Do not start from 0 as it is used during setup
|
|
@@ -44,10 +48,6 @@ export async function createNodes(
|
|
|
44
48
|
validatorsPerNode = 1,
|
|
45
49
|
): Promise<AztecNodeService[]> {
|
|
46
50
|
const nodePromises: Promise<AztecNodeService>[] = [];
|
|
47
|
-
const loggerIdStorage = new AsyncLocalStorage<string>();
|
|
48
|
-
const logNameHandler = (module: string) =>
|
|
49
|
-
loggerIdStorage.getStore() ? `${module}:${loggerIdStorage.getStore()}` : module;
|
|
50
|
-
addLogNameHandler(logNameHandler);
|
|
51
51
|
|
|
52
52
|
for (let i = 0; i < numNodes; i++) {
|
|
53
53
|
const index = indexOffset + i;
|
|
@@ -69,7 +69,6 @@ export async function createNodes(
|
|
|
69
69
|
prefilledPublicData,
|
|
70
70
|
dataDir,
|
|
71
71
|
metricsPort,
|
|
72
|
-
loggerIdStorage,
|
|
73
72
|
);
|
|
74
73
|
nodePromises.push(nodePromise);
|
|
75
74
|
}
|
|
@@ -81,13 +80,20 @@ export async function createNodes(
|
|
|
81
80
|
throw new Error('Sequencer not found');
|
|
82
81
|
}
|
|
83
82
|
|
|
84
|
-
removeLogNameHandler(logNameHandler);
|
|
85
83
|
return nodes;
|
|
86
84
|
}
|
|
87
85
|
|
|
88
|
-
/**
|
|
86
|
+
/** Extended config type for createNode with test-specific overrides. */
|
|
87
|
+
export type CreateNodeConfig = AztecNodeConfig & {
|
|
88
|
+
/** Whether to skip starting the sequencer. */
|
|
89
|
+
dontStartSequencer?: boolean;
|
|
90
|
+
/** Override the private key (instead of deriving from addressIndex). */
|
|
91
|
+
validatorPrivateKey?: `0x${string}`;
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
/** Creates a P2P enabled instance of Aztec Node Service with a validator. */
|
|
89
95
|
export async function createNode(
|
|
90
|
-
config:
|
|
96
|
+
config: CreateNodeConfig,
|
|
91
97
|
dateProvider: DateProvider,
|
|
92
98
|
tcpPort: number,
|
|
93
99
|
bootstrapNode: string | undefined,
|
|
@@ -95,9 +101,9 @@ export async function createNode(
|
|
|
95
101
|
prefilledPublicData?: PublicDataTreeLeaf[],
|
|
96
102
|
dataDirectory?: string,
|
|
97
103
|
metricsPort?: number,
|
|
98
|
-
loggerIdStorage?: AsyncLocalStorage<string>,
|
|
99
104
|
) {
|
|
100
|
-
const
|
|
105
|
+
const actorIndex = validatorCounter++;
|
|
106
|
+
return await withLoggerBindings({ actor: `validator-${actorIndex}` }, async () => {
|
|
101
107
|
const validatorConfig = await createValidatorConfig(config, bootstrapNode, tcpPort, addressIndex, dataDirectory);
|
|
102
108
|
const telemetry = await getEndToEndTestTelemetryClient(metricsPort);
|
|
103
109
|
return await AztecNodeService.createAndSync(
|
|
@@ -105,8 +111,7 @@ export async function createNode(
|
|
|
105
111
|
{ telemetry, dateProvider },
|
|
106
112
|
{ prefilledPublicData, dontStartSequencer: config.dontStartSequencer },
|
|
107
113
|
);
|
|
108
|
-
};
|
|
109
|
-
return loggerIdStorage ? await loggerIdStorage.run(tcpPort.toString(), createNode) : createNode();
|
|
114
|
+
});
|
|
110
115
|
}
|
|
111
116
|
|
|
112
117
|
/** Creates a P2P enabled instance of Aztec Node Service without a validator */
|
|
@@ -118,9 +123,9 @@ export async function createNonValidatorNode(
|
|
|
118
123
|
prefilledPublicData?: PublicDataTreeLeaf[],
|
|
119
124
|
dataDirectory?: string,
|
|
120
125
|
metricsPort?: number,
|
|
121
|
-
loggerIdStorage?: AsyncLocalStorage<string>,
|
|
122
126
|
) {
|
|
123
|
-
const
|
|
127
|
+
const actorIndex = nodeCounter++;
|
|
128
|
+
return await withLoggerBindings({ actor: `node-${actorIndex}` }, async () => {
|
|
124
129
|
const p2pConfig = await createP2PConfig(baseConfig, bootstrapNode, tcpPort, dataDirectory);
|
|
125
130
|
const config: AztecNodeConfig = {
|
|
126
131
|
...p2pConfig,
|
|
@@ -130,8 +135,7 @@ export async function createNonValidatorNode(
|
|
|
130
135
|
};
|
|
131
136
|
const telemetry = await getEndToEndTestTelemetryClient(metricsPort);
|
|
132
137
|
return await AztecNodeService.createAndSync(config, { telemetry, dateProvider }, { prefilledPublicData });
|
|
133
|
-
};
|
|
134
|
-
return loggerIdStorage ? await loggerIdStorage.run(tcpPort.toString(), createNode) : createNode();
|
|
138
|
+
});
|
|
135
139
|
}
|
|
136
140
|
|
|
137
141
|
export async function createProverNode(
|
|
@@ -143,9 +147,9 @@ export async function createProverNode(
|
|
|
143
147
|
prefilledPublicData?: PublicDataTreeLeaf[],
|
|
144
148
|
dataDirectory?: string,
|
|
145
149
|
metricsPort?: number,
|
|
146
|
-
loggerIdStorage?: AsyncLocalStorage<string>,
|
|
147
150
|
) {
|
|
148
|
-
const
|
|
151
|
+
const actorIndex = proverCounter++;
|
|
152
|
+
return await withLoggerBindings({ actor: `prover-${actorIndex}` }, async () => {
|
|
149
153
|
const proverNodePrivateKey = getPrivateKeyFromIndex(ATTESTER_PRIVATE_KEYS_START_INDEX + addressIndex)!;
|
|
150
154
|
const telemetry = await getEndToEndTestTelemetryClient(metricsPort);
|
|
151
155
|
|
|
@@ -165,8 +169,7 @@ export async function createProverNode(
|
|
|
165
169
|
prefilledPublicData,
|
|
166
170
|
{ ...proverNodeDeps, telemetry },
|
|
167
171
|
);
|
|
168
|
-
};
|
|
169
|
-
return loggerIdStorage ? await loggerIdStorage.run(tcpPort.toString(), createProverNode) : createProverNode();
|
|
172
|
+
});
|
|
170
173
|
}
|
|
171
174
|
|
|
172
175
|
export async function createP2PConfig(
|
|
@@ -192,20 +195,21 @@ export async function createP2PConfig(
|
|
|
192
195
|
}
|
|
193
196
|
|
|
194
197
|
export async function createValidatorConfig(
|
|
195
|
-
config:
|
|
198
|
+
config: CreateNodeConfig,
|
|
196
199
|
bootstrapNodeEnr?: string,
|
|
197
200
|
port?: number,
|
|
198
201
|
addressIndex: number | number[] = 1,
|
|
199
202
|
dataDirectory?: string,
|
|
200
203
|
) {
|
|
201
204
|
const addressIndices = Array.isArray(addressIndex) ? addressIndex : [addressIndex];
|
|
202
|
-
if (addressIndices.length === 0) {
|
|
205
|
+
if (addressIndices.length === 0 && !config.validatorPrivateKey) {
|
|
203
206
|
throw new Error('At least one address index must be provided to create a validator config');
|
|
204
207
|
}
|
|
205
208
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
+
// Use override private key if provided, otherwise derive from address indices
|
|
210
|
+
const attesterPrivateKeys = config.validatorPrivateKey
|
|
211
|
+
? [config.validatorPrivateKey]
|
|
212
|
+
: addressIndices.map(index => bufferToHex(getPrivateKeyFromIndex(ATTESTER_PRIVATE_KEYS_START_INDEX + index)!));
|
|
209
213
|
const p2pConfig = await createP2PConfig(config, bootstrapNodeEnr, port, dataDirectory);
|
|
210
214
|
const nodeConfig: AztecNodeConfig = {
|
|
211
215
|
...config,
|
|
@@ -11,7 +11,7 @@ import type { DeployAztecL1ContractsReturnType } from '@aztec/ethereum/deploy-az
|
|
|
11
11
|
import { deployL1Contract } from '@aztec/ethereum/deploy-l1-contract';
|
|
12
12
|
import type { ExtendedViemWalletClient } from '@aztec/ethereum/types';
|
|
13
13
|
import { extractEvent } from '@aztec/ethereum/utils';
|
|
14
|
-
import {
|
|
14
|
+
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
15
15
|
import { sha256ToField } from '@aztec/foundation/crypto/sha256';
|
|
16
16
|
import { InboxAbi, UniswapPortalAbi, UniswapPortalBytecode } from '@aztec/l1-artifacts';
|
|
17
17
|
import { UniswapContract } from '@aztec/noir-contracts.js/Uniswap';
|
|
@@ -250,8 +250,8 @@ export const uniswapL1L2TestSuite = (
|
|
|
250
250
|
await wethCrossChainHarness.expectPublicBalanceOnL2(uniswapL2Contract.address, 0n);
|
|
251
251
|
|
|
252
252
|
// Since the outbox is only consumable when the epoch is proven, we need to advance to the next epoch.
|
|
253
|
-
const
|
|
254
|
-
const epoch = await rollup.getEpochNumberForCheckpoint(checkpointNumber);
|
|
253
|
+
const block = await aztecNode.getBlock(l2UniswapInteractionReceipt.blockNumber!);
|
|
254
|
+
const epoch = await rollup.getEpochNumberForCheckpoint(block!.checkpointNumber);
|
|
255
255
|
await cheatCodes.rollup.advanceToEpoch(EpochNumber(epoch + 1));
|
|
256
256
|
await waitForProven(aztecNode, l2UniswapInteractionReceipt, { provenTimeout: 300 });
|
|
257
257
|
|
|
@@ -838,9 +838,8 @@ export const uniswapL1L2TestSuite = (
|
|
|
838
838
|
chainId: new Fr(l1Client.chain.id),
|
|
839
839
|
});
|
|
840
840
|
|
|
841
|
-
const
|
|
842
|
-
|
|
843
|
-
);
|
|
841
|
+
const block = await aztecNode.getBlock(withdrawReceipt.blockNumber!);
|
|
842
|
+
const epoch = await rollup.getEpochNumberForCheckpoint(block!.checkpointNumber);
|
|
844
843
|
const swapResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, swapPrivateLeaf);
|
|
845
844
|
const withdrawResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, withdrawLeaf);
|
|
846
845
|
|
|
@@ -972,9 +971,8 @@ export const uniswapL1L2TestSuite = (
|
|
|
972
971
|
chainId: new Fr(l1Client.chain.id),
|
|
973
972
|
});
|
|
974
973
|
|
|
975
|
-
const
|
|
976
|
-
|
|
977
|
-
);
|
|
974
|
+
const block = await aztecNode.getBlock(withdrawReceipt.blockNumber!);
|
|
975
|
+
const epoch = await rollup.getEpochNumberForCheckpoint(block!.checkpointNumber);
|
|
978
976
|
const swapResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, swapPublicLeaf);
|
|
979
977
|
const withdrawResult = await computeL2ToL1MembershipWitness(aztecNode, epoch, withdrawLeaf);
|
|
980
978
|
|
|
@@ -6,6 +6,132 @@ import { Tx, type TxReceipt } from '@aztec/stdlib/tx';
|
|
|
6
6
|
|
|
7
7
|
import { createHistogram } from 'perf_hooks';
|
|
8
8
|
|
|
9
|
+
/** Metrics class for proving-related benchmarks. */
|
|
10
|
+
export class ProvingMetrics {
|
|
11
|
+
private successfulTxs: number | undefined;
|
|
12
|
+
private proofDuration: number | undefined;
|
|
13
|
+
private activeAgents: number | undefined;
|
|
14
|
+
private avgQueueTime: number | undefined;
|
|
15
|
+
private jobRetries: number | undefined;
|
|
16
|
+
private jobDuration: number | undefined;
|
|
17
|
+
private timedOutJobs: number | undefined;
|
|
18
|
+
private resolvedJobs: number | undefined;
|
|
19
|
+
private rejectedJobs: number | undefined;
|
|
20
|
+
private epochProvingDuration: number | undefined;
|
|
21
|
+
private provenTransactions: number | undefined;
|
|
22
|
+
private provenBlocks: number | undefined;
|
|
23
|
+
|
|
24
|
+
constructor(private prefix: string) {}
|
|
25
|
+
|
|
26
|
+
recordSuccessfulTxs(count: number): void {
|
|
27
|
+
this.successfulTxs = count;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
recordProofDuration(seconds: number): void {
|
|
31
|
+
this.proofDuration = seconds;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
recordActiveAgents(count: number): void {
|
|
35
|
+
this.activeAgents = count;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
recordAvgQueueTime(ms: number): void {
|
|
39
|
+
this.avgQueueTime = ms;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
recordJobRetries(count: number): void {
|
|
43
|
+
this.jobRetries = count;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
recordJobDuration(ms: number): void {
|
|
47
|
+
this.jobDuration = ms;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
recordTimedOutJobs(count: number): void {
|
|
51
|
+
this.timedOutJobs = count;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
recordResolvedJobs(count: number): void {
|
|
55
|
+
this.resolvedJobs = count;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
recordRejectedJobs(count: number): void {
|
|
59
|
+
this.rejectedJobs = count;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
recordEpochProvingDuration(seconds: number): void {
|
|
63
|
+
this.epochProvingDuration = seconds;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
recordProvenTransactions(count: number): void {
|
|
67
|
+
this.provenTransactions = count;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
recordProvenBlocks(count: number): void {
|
|
71
|
+
this.provenBlocks = count;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
toGithubActionBenchmarkJSON(): Array<{ name: string; unit: string; value: number }> {
|
|
75
|
+
const data: Array<{ name: string; unit: string; value: number }> = [];
|
|
76
|
+
|
|
77
|
+
if (this.successfulTxs !== undefined) {
|
|
78
|
+
data.push({ name: `${this.prefix}/successful_txs`, unit: 'count', value: this.successfulTxs });
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
if (this.proofDuration !== undefined) {
|
|
82
|
+
data.push({ name: `${this.prefix}/proof_duration`, unit: 's', value: this.proofDuration });
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
if (this.activeAgents !== undefined) {
|
|
86
|
+
data.push({ name: `${this.prefix}/active_agents`, unit: 'count', value: this.activeAgents });
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
if (this.avgQueueTime !== undefined) {
|
|
90
|
+
data.push({ name: `${this.prefix}/avg_queue_time`, unit: 'ms', value: this.avgQueueTime });
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (this.jobRetries !== undefined) {
|
|
94
|
+
data.push({ name: `${this.prefix}/job_retries`, unit: 'count', value: this.jobRetries });
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
if (this.jobDuration !== undefined) {
|
|
98
|
+
data.push({ name: `${this.prefix}/job_duration`, unit: 'ms', value: this.jobDuration });
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
if (this.timedOutJobs !== undefined) {
|
|
102
|
+
data.push({ name: `${this.prefix}/timed_out_jobs`, unit: 'count', value: this.timedOutJobs });
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (this.resolvedJobs !== undefined) {
|
|
106
|
+
data.push({ name: `${this.prefix}/resolved_jobs`, unit: 'count', value: this.resolvedJobs });
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (this.rejectedJobs !== undefined) {
|
|
110
|
+
data.push({ name: `${this.prefix}/rejected_jobs`, unit: 'count', value: this.rejectedJobs });
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
if (this.epochProvingDuration !== undefined) {
|
|
114
|
+
data.push({ name: `${this.prefix}/epoch_proving_duration`, unit: 's', value: this.epochProvingDuration });
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
if (this.provenTransactions !== undefined) {
|
|
118
|
+
data.push({ name: `${this.prefix}/proven_transactions`, unit: 'count', value: this.provenTransactions });
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if (this.provenBlocks !== undefined) {
|
|
122
|
+
data.push({ name: `${this.prefix}/proven_blocks`, unit: 'count', value: this.provenBlocks });
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const scenario = process.env.BENCH_SCENARIO?.trim();
|
|
126
|
+
if (!scenario) {
|
|
127
|
+
return data;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
const scenarioPrefix = `scenario/${scenario}/`;
|
|
131
|
+
return data.map(entry => ({ ...entry, name: `${scenarioPrefix}${entry.name}` }));
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
9
135
|
export type TxInclusionData = {
|
|
10
136
|
txHash: string;
|
|
11
137
|
sentAt: number;
|
|
@@ -15,6 +15,7 @@ const testConfigSchema = z.object({
|
|
|
15
15
|
AZTEC_EPOCH_DURATION: z.coerce.number().optional().default(32),
|
|
16
16
|
AZTEC_PROOF_SUBMISSION_WINDOW: z.coerce.number().optional().default(5),
|
|
17
17
|
AZTEC_LAG_IN_EPOCHS_FOR_VALIDATOR_SET: z.coerce.number().optional().default(2),
|
|
18
|
+
FUNDING_PRIVATE_KEY: z.string().optional(),
|
|
18
19
|
});
|
|
19
20
|
|
|
20
21
|
export type TestConfig = z.infer<typeof testConfigSchema>;
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
export { type TestConfig, setupEnvironment } from './config.js';
|
|
6
6
|
|
|
7
7
|
// Scripts
|
|
8
|
-
export { getGitProjectRoot, getAztecBin, runAztecBin, runProjectScript } from './scripts.js';
|
|
8
|
+
export { type ScriptResult, getGitProjectRoot, getAztecBin, runAztecBin, runProjectScript } from './scripts.js';
|
|
9
9
|
|
|
10
10
|
// K8s operations
|
|
11
11
|
export {
|
|
@@ -24,6 +24,7 @@ export {
|
|
|
24
24
|
getServiceEndpoint,
|
|
25
25
|
getRPCEndpoint,
|
|
26
26
|
getEthereumEndpoint,
|
|
27
|
+
createResilientPrometheusConnection,
|
|
27
28
|
} from './k8s.js';
|
|
28
29
|
|
|
29
30
|
// Chaos Mesh
|
|
@@ -45,6 +46,7 @@ export { restartBot, installTransferBot, uninstallTransferBot } from './bot.js';
|
|
|
45
46
|
// Node operations (sequencers, validators, pods)
|
|
46
47
|
export {
|
|
47
48
|
awaitCheckpointNumber,
|
|
49
|
+
waitForProvenToAdvance,
|
|
48
50
|
getSequencers,
|
|
49
51
|
updateSequencersConfig,
|
|
50
52
|
getSequencersConfig,
|