@aztec/end-to-end 4.0.0-nightly.20260112 → 4.0.0-nightly.20260114
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/bench/client_flows/client_flows_benchmark.d.ts +10 -10
- package/dest/bench/client_flows/client_flows_benchmark.d.ts.map +1 -1
- package/dest/bench/client_flows/client_flows_benchmark.js +98 -133
- package/dest/e2e_blacklist_token_contract/blacklist_token_contract_test.d.ts +5 -6
- package/dest/e2e_blacklist_token_contract/blacklist_token_contract_test.d.ts.map +1 -1
- package/dest/e2e_blacklist_token_contract/blacklist_token_contract_test.js +94 -112
- package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts +5 -5
- package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts.map +1 -1
- package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.js +60 -62
- package/dest/e2e_deploy_contract/deploy_test.d.ts +4 -3
- package/dest/e2e_deploy_contract/deploy_test.d.ts.map +1 -1
- package/dest/e2e_deploy_contract/deploy_test.js +15 -13
- package/dest/e2e_epochs/epochs_test.js +1 -1
- package/dest/e2e_fees/bridging_race.notest.js +2 -4
- package/dest/e2e_fees/fees_test.d.ts +11 -11
- package/dest/e2e_fees/fees_test.d.ts.map +1 -1
- package/dest/e2e_fees/fees_test.js +118 -139
- package/dest/e2e_l1_publisher/write_json.d.ts +1 -1
- package/dest/e2e_l1_publisher/write_json.d.ts.map +1 -1
- package/dest/e2e_l1_publisher/write_json.js +1 -0
- package/dest/e2e_nested_contract/nested_contract_test.d.ts +5 -8
- package/dest/e2e_nested_contract/nested_contract_test.d.ts.map +1 -1
- package/dest/e2e_nested_contract/nested_contract_test.js +29 -40
- package/dest/e2e_p2p/inactivity_slash_test.js +1 -1
- package/dest/e2e_p2p/p2p_network.d.ts +5 -3
- package/dest/e2e_p2p/p2p_network.d.ts.map +1 -1
- package/dest/e2e_p2p/p2p_network.js +99 -102
- package/dest/e2e_token_contract/token_contract_test.d.ts +15 -8
- package/dest/e2e_token_contract/token_contract_test.d.ts.map +1 -1
- package/dest/e2e_token_contract/token_contract_test.js +88 -92
- package/dest/fixtures/e2e_prover_test.d.ts +7 -13
- package/dest/fixtures/e2e_prover_test.d.ts.map +1 -1
- package/dest/fixtures/e2e_prover_test.js +78 -91
- package/dest/fixtures/snapshot_manager.d.ts +10 -44
- package/dest/fixtures/snapshot_manager.d.ts.map +1 -1
- package/dest/fixtures/snapshot_manager.js +12 -255
- package/dest/fixtures/utils.js +3 -3
- package/dest/quality_of_service/grafana_client.d.ts +41 -0
- package/dest/quality_of_service/grafana_client.d.ts.map +1 -0
- package/dest/quality_of_service/{alert_checker.js → grafana_client.js} +1 -1
- package/dest/quality_of_service/prometheus_client.d.ts +38 -0
- package/dest/quality_of_service/prometheus_client.d.ts.map +1 -0
- package/dest/quality_of_service/prometheus_client.js +67 -0
- package/dest/spartan/setup_test_wallets.d.ts +4 -3
- package/dest/spartan/setup_test_wallets.d.ts.map +1 -1
- package/dest/spartan/tx_metrics.d.ts +4 -1
- package/dest/spartan/tx_metrics.d.ts.map +1 -1
- package/dest/spartan/tx_metrics.js +21 -1
- package/dest/spartan/utils.d.ts +5 -1
- package/dest/spartan/utils.d.ts.map +1 -1
- package/dest/spartan/utils.js +25 -8
- package/package.json +38 -38
- package/src/bench/client_flows/client_flows_benchmark.ts +134 -192
- package/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts +104 -136
- package/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts +91 -103
- package/src/e2e_deploy_contract/deploy_test.ts +18 -14
- package/src/e2e_epochs/epochs_test.ts +1 -1
- package/src/e2e_fees/bridging_race.notest.ts +2 -5
- package/src/e2e_fees/fees_test.ts +165 -210
- package/src/e2e_l1_publisher/write_json.ts +1 -0
- package/src/e2e_nested_contract/nested_contract_test.ts +30 -54
- package/src/e2e_p2p/inactivity_slash_test.ts +1 -1
- package/src/e2e_p2p/p2p_network.ts +149 -165
- package/src/e2e_token_contract/token_contract_test.ts +104 -113
- package/src/fixtures/e2e_prover_test.ts +97 -130
- package/src/fixtures/snapshot_manager.ts +12 -325
- package/src/fixtures/utils.ts +3 -3
- package/src/quality_of_service/{alert_checker.ts → grafana_client.ts} +1 -1
- package/src/quality_of_service/prometheus_client.ts +113 -0
- package/src/spartan/setup_test_wallets.ts +7 -1
- package/src/spartan/tx_metrics.ts +21 -1
- package/src/spartan/utils.ts +26 -8
- package/dest/quality_of_service/alert_checker.d.ts +0 -41
- package/dest/quality_of_service/alert_checker.d.ts.map +0 -1
|
@@ -2,7 +2,6 @@ import { SchnorrAccountContractArtifact } from '@aztec/accounts/schnorr';
|
|
|
2
2
|
import { type InitialAccountData, generateSchnorrAccounts } from '@aztec/accounts/testing';
|
|
3
3
|
import { type AztecNodeConfig, AztecNodeService, getConfigEnvVars } from '@aztec/aztec-node';
|
|
4
4
|
import { AztecAddress, EthAddress } from '@aztec/aztec.js/addresses';
|
|
5
|
-
import { getContractClassFromArtifact } from '@aztec/aztec.js/contracts';
|
|
6
5
|
import { BatchCall, type ContractFunctionInteraction, waitForProven } from '@aztec/aztec.js/contracts';
|
|
7
6
|
import { publishContractClass, publishInstance } from '@aztec/aztec.js/deployment';
|
|
8
7
|
import type { Logger } from '@aztec/aztec.js/log';
|
|
@@ -18,12 +17,9 @@ import {
|
|
|
18
17
|
deployAztecL1Contracts,
|
|
19
18
|
} from '@aztec/ethereum/deploy-aztec-l1-contracts';
|
|
20
19
|
import { EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test';
|
|
21
|
-
import { asyncMap } from '@aztec/foundation/async-map';
|
|
22
20
|
import { SecretValue } from '@aztec/foundation/config';
|
|
23
21
|
import { randomBytes } from '@aztec/foundation/crypto/random';
|
|
24
22
|
import { tryRmDir } from '@aztec/foundation/fs';
|
|
25
|
-
import { createLogger } from '@aztec/foundation/log';
|
|
26
|
-
import { resolver, reviver } from '@aztec/foundation/serialize';
|
|
27
23
|
import { TestDateProvider } from '@aztec/foundation/timer';
|
|
28
24
|
import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
29
25
|
import { protocolContractsHash } from '@aztec/protocol-contracts';
|
|
@@ -31,17 +27,13 @@ import type { ProverNode } from '@aztec/prover-node';
|
|
|
31
27
|
import { getPXEConfig } from '@aztec/pxe/server';
|
|
32
28
|
import type { SequencerClient } from '@aztec/sequencer-client';
|
|
33
29
|
import { tryStop } from '@aztec/stdlib/interfaces/server';
|
|
34
|
-
import { getConfigEnvVars as getTelemetryConfig, initTelemetryClient } from '@aztec/telemetry-client';
|
|
35
30
|
import { TestWallet } from '@aztec/test-wallet/server';
|
|
36
31
|
import { getGenesisValues } from '@aztec/world-state/testing';
|
|
37
32
|
|
|
38
33
|
import type { Anvil } from '@viem/anvil';
|
|
39
|
-
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
|
40
|
-
import { copySync, removeSync } from 'fs-extra/esm';
|
|
41
34
|
import fs from 'fs/promises';
|
|
42
35
|
import { tmpdir } from 'os';
|
|
43
|
-
import path
|
|
44
|
-
import type { Hex } from 'viem';
|
|
36
|
+
import path from 'path';
|
|
45
37
|
import { mnemonicToAccount } from 'viem/accounts';
|
|
46
38
|
import { foundry } from 'viem/chains';
|
|
47
39
|
|
|
@@ -75,192 +67,10 @@ export type SubsystemsContext = {
|
|
|
75
67
|
directoryToCleanup?: string;
|
|
76
68
|
};
|
|
77
69
|
|
|
78
|
-
type SnapshotEntry = {
|
|
79
|
-
name: string;
|
|
80
|
-
apply: (context: SubsystemsContext) => Promise<any>;
|
|
81
|
-
restore: (snapshotData: any, context: SubsystemsContext) => Promise<any>;
|
|
82
|
-
snapshotPath: string;
|
|
83
|
-
};
|
|
84
|
-
|
|
85
|
-
export function createSnapshotManager(
|
|
86
|
-
testName: string,
|
|
87
|
-
dataPath?: string,
|
|
88
|
-
config: Partial<SetupOptions> = {},
|
|
89
|
-
deployL1ContractsArgs: Partial<DeployAztecL1ContractsArgs> = {
|
|
90
|
-
initialValidators: [],
|
|
91
|
-
},
|
|
92
|
-
) {
|
|
93
|
-
return dataPath
|
|
94
|
-
? new SnapshotManager(testName, dataPath, config, deployL1ContractsArgs)
|
|
95
|
-
: new MockSnapshotManager(testName, config, deployL1ContractsArgs);
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
export interface ISnapshotManager {
|
|
99
|
-
snapshot<T>(
|
|
100
|
-
name: string,
|
|
101
|
-
apply: (context: SubsystemsContext) => Promise<T>,
|
|
102
|
-
restore?: (snapshotData: T, context: SubsystemsContext) => Promise<void>,
|
|
103
|
-
): Promise<void>;
|
|
104
|
-
|
|
105
|
-
setup(): Promise<SubsystemsContext>;
|
|
106
|
-
|
|
107
|
-
teardown(): Promise<void>;
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
/** Snapshot manager that does not perform snapshotting, it just applies transition and restoration functions as it receives them. */
|
|
111
|
-
class MockSnapshotManager implements ISnapshotManager {
|
|
112
|
-
private context?: SubsystemsContext;
|
|
113
|
-
private logger: Logger;
|
|
114
|
-
|
|
115
|
-
constructor(
|
|
116
|
-
testName: string,
|
|
117
|
-
private config: Partial<AztecNodeConfig> = {},
|
|
118
|
-
private deployL1ContractsArgs: Partial<DeployAztecL1ContractsArgs> = {},
|
|
119
|
-
) {
|
|
120
|
-
this.logger = createLogger(`e2e:snapshot_manager:${testName}`);
|
|
121
|
-
this.logger.warn(`No data path given, will not persist any snapshots.`);
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
public async snapshot<T>(
|
|
125
|
-
name: string,
|
|
126
|
-
apply: (context: SubsystemsContext) => Promise<T>,
|
|
127
|
-
restore: (snapshotData: T, context: SubsystemsContext) => Promise<void> = () => Promise.resolve(),
|
|
128
|
-
) {
|
|
129
|
-
// We are running in disabled mode. Just apply the state.
|
|
130
|
-
const context = await this.setup();
|
|
131
|
-
this.logger.verbose(`Applying state transition for ${name}...`);
|
|
132
|
-
const snapshotData = await apply(context);
|
|
133
|
-
this.logger.verbose(`State transition for ${name} complete.`);
|
|
134
|
-
// Execute the restoration function.
|
|
135
|
-
await restore(snapshotData, context);
|
|
136
|
-
return;
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
public async setup() {
|
|
140
|
-
if (!this.context) {
|
|
141
|
-
this.context = await setupFromFresh(undefined, this.logger, this.config, this.deployL1ContractsArgs);
|
|
142
|
-
}
|
|
143
|
-
return this.context;
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
public async teardown() {
|
|
147
|
-
await teardown(this.context);
|
|
148
|
-
this.context = undefined;
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
/**
|
|
153
|
-
* Snapshot engine for local e2e tests. Read more:
|
|
154
|
-
* https://github.com/AztecProtocol/aztec-packages/pull/5526
|
|
155
|
-
*/
|
|
156
|
-
class SnapshotManager implements ISnapshotManager {
|
|
157
|
-
private snapshotStack: SnapshotEntry[] = [];
|
|
158
|
-
private context?: SubsystemsContext;
|
|
159
|
-
private livePath: string;
|
|
160
|
-
private logger: Logger;
|
|
161
|
-
|
|
162
|
-
constructor(
|
|
163
|
-
testName: string,
|
|
164
|
-
private dataPath: string,
|
|
165
|
-
private config: Partial<SetupOptions> = {},
|
|
166
|
-
private deployL1ContractsArgs: Partial<DeployAztecL1ContractsArgs> = {},
|
|
167
|
-
) {
|
|
168
|
-
this.livePath = join(this.dataPath, 'live', testName);
|
|
169
|
-
this.logger = createLogger(`e2e:snapshot_manager:${testName}`);
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
public async snapshot<T>(
|
|
173
|
-
name: string,
|
|
174
|
-
apply: (context: SubsystemsContext) => Promise<T>,
|
|
175
|
-
restore: (snapshotData: T, context: SubsystemsContext) => Promise<void> = () => Promise.resolve(),
|
|
176
|
-
) {
|
|
177
|
-
const snapshotPath = join(this.dataPath, 'snapshots', ...this.snapshotStack.map(e => e.name), name, 'snapshot');
|
|
178
|
-
|
|
179
|
-
if (existsSync(snapshotPath)) {
|
|
180
|
-
// Snapshot exists. Record entry on stack but do nothing else as we're probably still descending the tree.
|
|
181
|
-
// It's the tests responsibility to call setup() before a test to ensure subsystems get created.
|
|
182
|
-
this.logger.verbose(`Snapshot exists at ${snapshotPath}. Continuing...`);
|
|
183
|
-
this.snapshotStack.push({ name, apply, restore, snapshotPath });
|
|
184
|
-
return;
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
// Snapshot didn't exist at snapshotPath, and by definition none of the child snapshots can exist.
|
|
188
|
-
// If we have no subsystem context yet, create it from the top of the snapshot stack (if it exists).
|
|
189
|
-
const context = await this.setup();
|
|
190
|
-
|
|
191
|
-
this.snapshotStack.push({ name, apply, restore, snapshotPath });
|
|
192
|
-
|
|
193
|
-
// Apply current state transition.
|
|
194
|
-
this.logger.verbose(`Applying state transition for ${name}...`);
|
|
195
|
-
const snapshotData = await apply(context);
|
|
196
|
-
this.logger.verbose(`State transition for ${name} complete.`);
|
|
197
|
-
|
|
198
|
-
// Execute the restoration function.
|
|
199
|
-
await restore(snapshotData, context);
|
|
200
|
-
|
|
201
|
-
// Save the snapshot data.
|
|
202
|
-
const ethCheatCodes = new EthCheatCodesWithState(context.aztecNodeConfig.l1RpcUrls, context.dateProvider);
|
|
203
|
-
const anvilStateFile = `${this.livePath}/anvil.dat`;
|
|
204
|
-
await ethCheatCodes.dumpChainState(anvilStateFile);
|
|
205
|
-
writeFileSync(`${this.livePath}/${name}.json`, JSON.stringify(snapshotData || {}, resolver));
|
|
206
|
-
|
|
207
|
-
// Copy everything to snapshot path.
|
|
208
|
-
// We want it to be atomic, in case multiple processes are racing to create the snapshot.
|
|
209
|
-
this.logger.verbose(`Saving snapshot to ${snapshotPath}...`);
|
|
210
|
-
if (mkdirSync(snapshotPath, { recursive: true })) {
|
|
211
|
-
copySync(this.livePath, snapshotPath);
|
|
212
|
-
this.logger.verbose(`Snapshot copied to ${snapshotPath}.`);
|
|
213
|
-
} else {
|
|
214
|
-
this.logger.verbose(`Snapshot already exists at ${snapshotPath}. Discarding our version.`);
|
|
215
|
-
await this.teardown();
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
/**
|
|
220
|
-
* Creates and returns the subsystem context based on the current snapshot stack.
|
|
221
|
-
* If the subsystem context already exists, just return it.
|
|
222
|
-
* If you want to be sure to get a clean snapshot, be sure to call teardown() before calling setup().
|
|
223
|
-
*/
|
|
224
|
-
public async setup() {
|
|
225
|
-
// We have no subsystem context yet.
|
|
226
|
-
// If one exists on the snapshot stack, create one from that snapshot.
|
|
227
|
-
// Otherwise create a fresh one.
|
|
228
|
-
if (!this.context) {
|
|
229
|
-
removeSync(this.livePath);
|
|
230
|
-
mkdirSync(this.livePath, { recursive: true });
|
|
231
|
-
const previousSnapshotPath = this.snapshotStack[this.snapshotStack.length - 1]?.snapshotPath;
|
|
232
|
-
if (previousSnapshotPath) {
|
|
233
|
-
this.logger.verbose(`Copying snapshot from ${previousSnapshotPath} to ${this.livePath}...`);
|
|
234
|
-
copySync(previousSnapshotPath, this.livePath);
|
|
235
|
-
this.context = await setupFromState(this.livePath, this.logger);
|
|
236
|
-
// Execute each of the previous snapshots restoration functions in turn.
|
|
237
|
-
await asyncMap(this.snapshotStack, async e => {
|
|
238
|
-
const snapshotData = JSON.parse(readFileSync(`${e.snapshotPath}/${e.name}.json`, 'utf-8'), reviver);
|
|
239
|
-
this.logger.verbose(`Executing restoration function for ${e.name}...`);
|
|
240
|
-
await e.restore(snapshotData, this.context!);
|
|
241
|
-
this.logger.verbose(`Restoration of ${e.name} complete.`);
|
|
242
|
-
});
|
|
243
|
-
} else {
|
|
244
|
-
this.context = await setupFromFresh(this.livePath, this.logger, this.config, this.deployL1ContractsArgs);
|
|
245
|
-
}
|
|
246
|
-
}
|
|
247
|
-
return this.context;
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
/**
|
|
251
|
-
* Destroys the current subsystem context.
|
|
252
|
-
*/
|
|
253
|
-
public async teardown() {
|
|
254
|
-
await teardown(this.context);
|
|
255
|
-
this.context = undefined;
|
|
256
|
-
removeSync(this.livePath);
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
|
|
260
70
|
/**
|
|
261
71
|
* Destroys the current subsystem context.
|
|
262
72
|
*/
|
|
263
|
-
async function teardown(context: SubsystemsContext | undefined) {
|
|
73
|
+
export async function teardown(context: SubsystemsContext | undefined) {
|
|
264
74
|
if (!context) {
|
|
265
75
|
return;
|
|
266
76
|
}
|
|
@@ -281,11 +91,9 @@ async function teardown(context: SubsystemsContext | undefined) {
|
|
|
281
91
|
|
|
282
92
|
/**
|
|
283
93
|
* Initializes a fresh set of subsystems.
|
|
284
|
-
*
|
|
285
|
-
* If there is no statePath, in-memory and temporary state locations will be used.
|
|
94
|
+
* State is stored in temporary in-memory locations.
|
|
286
95
|
*/
|
|
287
|
-
async function setupFromFresh(
|
|
288
|
-
statePath: string | undefined,
|
|
96
|
+
export async function setupFromFresh(
|
|
289
97
|
logger: Logger,
|
|
290
98
|
{ numberOfInitialFundedAccounts = 10, ...opts }: SetupOptions = {},
|
|
291
99
|
deployL1ContractsArgs: Partial<DeployAztecL1ContractsArgs> = {
|
|
@@ -316,11 +124,7 @@ async function setupFromFresh(
|
|
|
316
124
|
// Create a temp directory for all ephemeral state and cleanup afterwards
|
|
317
125
|
const directoryToCleanup = path.join(tmpdir(), randomBytes(8).toString('hex'));
|
|
318
126
|
await fs.mkdir(directoryToCleanup, { recursive: true });
|
|
319
|
-
|
|
320
|
-
aztecNodeConfig.dataDirectory = directoryToCleanup;
|
|
321
|
-
} else {
|
|
322
|
-
aztecNodeConfig.dataDirectory = statePath;
|
|
323
|
-
}
|
|
127
|
+
aztecNodeConfig.dataDirectory = directoryToCleanup;
|
|
324
128
|
|
|
325
129
|
await setupSharedBlobStorage(aztecNodeConfig);
|
|
326
130
|
|
|
@@ -434,17 +238,12 @@ async function setupFromFresh(
|
|
|
434
238
|
|
|
435
239
|
logger.verbose('Creating pxe...');
|
|
436
240
|
const pxeConfig = getPXEConfig();
|
|
437
|
-
pxeConfig.dataDirectory =
|
|
241
|
+
pxeConfig.dataDirectory = path.join(directoryToCleanup, randomBytes(8).toString('hex'));
|
|
438
242
|
// Only enable proving if specifically requested.
|
|
439
243
|
pxeConfig.proverEnabled = !!opts.realProofs;
|
|
440
244
|
const wallet = await TestWallet.create(aztecNode, pxeConfig);
|
|
441
245
|
const cheatCodes = await CheatCodes.create(aztecNodeConfig.l1RpcUrls, aztecNode, dateProvider);
|
|
442
246
|
|
|
443
|
-
if (statePath) {
|
|
444
|
-
writeFileSync(`${statePath}/aztec_node_config.json`, JSON.stringify(aztecNodeConfig, resolver));
|
|
445
|
-
writeFileSync(`${statePath}/accounts.json`, JSON.stringify(initialFundedAccounts, resolver));
|
|
446
|
-
}
|
|
447
|
-
|
|
448
247
|
return {
|
|
449
248
|
aztecNodeConfig,
|
|
450
249
|
anvil,
|
|
@@ -464,120 +263,8 @@ async function setupFromFresh(
|
|
|
464
263
|
}
|
|
465
264
|
|
|
466
265
|
/**
|
|
467
|
-
*
|
|
468
|
-
|
|
469
|
-
async function setupFromState(statePath: string, logger: Logger): Promise<SubsystemsContext> {
|
|
470
|
-
logger.verbose(`Initializing with saved state at ${statePath}...`);
|
|
471
|
-
|
|
472
|
-
const directoryToCleanup = path.join(tmpdir(), randomBytes(8).toString('hex'));
|
|
473
|
-
await fs.mkdir(directoryToCleanup, { recursive: true });
|
|
474
|
-
|
|
475
|
-
// TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing.
|
|
476
|
-
const aztecNodeConfig: AztecNodeConfig & SetupOptions = JSON.parse(
|
|
477
|
-
readFileSync(`${statePath}/aztec_node_config.json`, 'utf-8'),
|
|
478
|
-
reviver,
|
|
479
|
-
);
|
|
480
|
-
aztecNodeConfig.dataDirectory = statePath;
|
|
481
|
-
aztecNodeConfig.listenAddress = '127.0.0.1';
|
|
482
|
-
|
|
483
|
-
await setupSharedBlobStorage(aztecNodeConfig);
|
|
484
|
-
|
|
485
|
-
const initialFundedAccounts: InitialAccountData[] =
|
|
486
|
-
JSON.parse(readFileSync(`${statePath}/accounts.json`, 'utf-8'), reviver) || [];
|
|
487
|
-
const { prefilledPublicData } = await getGenesisValues(initialFundedAccounts.map(a => a.address));
|
|
488
|
-
|
|
489
|
-
// Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies.
|
|
490
|
-
const { anvil, rpcUrl } = await startAnvil();
|
|
491
|
-
aztecNodeConfig.l1RpcUrls = [rpcUrl];
|
|
492
|
-
// Load anvil state.
|
|
493
|
-
const anvilStateFile = `${statePath}/anvil.dat`;
|
|
494
|
-
|
|
495
|
-
const dateProvider = new TestDateProvider();
|
|
496
|
-
const ethCheatCodes = new EthCheatCodesWithState(aztecNodeConfig.l1RpcUrls, dateProvider);
|
|
497
|
-
await ethCheatCodes.loadChainState(anvilStateFile);
|
|
498
|
-
|
|
499
|
-
// TODO: Encapsulate this in a NativeAcvm impl.
|
|
500
|
-
const acvmConfig = await getACVMConfig(logger);
|
|
501
|
-
if (acvmConfig) {
|
|
502
|
-
aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory;
|
|
503
|
-
aztecNodeConfig.acvmBinaryPath = acvmConfig.acvmBinaryPath;
|
|
504
|
-
}
|
|
505
|
-
|
|
506
|
-
const bbConfig = await getBBConfig(logger);
|
|
507
|
-
if (bbConfig) {
|
|
508
|
-
aztecNodeConfig.bbBinaryPath = bbConfig.bbBinaryPath;
|
|
509
|
-
aztecNodeConfig.bbWorkingDirectory = bbConfig.bbWorkingDirectory;
|
|
510
|
-
}
|
|
511
|
-
|
|
512
|
-
logger.verbose('Creating ETH clients...');
|
|
513
|
-
const l1Client = createExtendedL1Client(aztecNodeConfig.l1RpcUrls, mnemonicToAccount(MNEMONIC));
|
|
514
|
-
|
|
515
|
-
const watcher = new AnvilTestWatcher(
|
|
516
|
-
ethCheatCodes,
|
|
517
|
-
aztecNodeConfig.l1Contracts.rollupAddress,
|
|
518
|
-
l1Client,
|
|
519
|
-
dateProvider,
|
|
520
|
-
);
|
|
521
|
-
await watcher.start();
|
|
522
|
-
|
|
523
|
-
const telemetry = await initTelemetryClient(getTelemetryConfig());
|
|
524
|
-
|
|
525
|
-
logger.verbose('Creating aztec node...');
|
|
526
|
-
const aztecNode = await AztecNodeService.createAndSync(
|
|
527
|
-
aztecNodeConfig,
|
|
528
|
-
{ telemetry, dateProvider },
|
|
529
|
-
{ prefilledPublicData },
|
|
530
|
-
);
|
|
531
|
-
|
|
532
|
-
let proverNode: ProverNode | undefined = undefined;
|
|
533
|
-
if (aztecNodeConfig.startProverNode) {
|
|
534
|
-
logger.verbose('Creating and syncing a simulated prover node...');
|
|
535
|
-
const proverNodePrivateKey = getPrivateKeyFromIndex(2);
|
|
536
|
-
const proverNodePrivateKeyHex: Hex = `0x${proverNodePrivateKey!.toString('hex')}`;
|
|
537
|
-
proverNode = await createAndSyncProverNode(
|
|
538
|
-
proverNodePrivateKeyHex,
|
|
539
|
-
aztecNodeConfig,
|
|
540
|
-
{
|
|
541
|
-
...aztecNodeConfig.proverNodeConfig,
|
|
542
|
-
dataDirectory: path.join(directoryToCleanup, randomBytes(8).toString('hex')),
|
|
543
|
-
p2pEnabled: false,
|
|
544
|
-
},
|
|
545
|
-
aztecNode,
|
|
546
|
-
prefilledPublicData,
|
|
547
|
-
);
|
|
548
|
-
}
|
|
549
|
-
|
|
550
|
-
logger.verbose('Creating pxe...');
|
|
551
|
-
const pxeConfig = getPXEConfig();
|
|
552
|
-
pxeConfig.dataDirectory = statePath;
|
|
553
|
-
const wallet = await TestWallet.create(aztecNode, pxeConfig);
|
|
554
|
-
const cheatCodes = await CheatCodes.create(aztecNodeConfig.l1RpcUrls, aztecNode, dateProvider);
|
|
555
|
-
|
|
556
|
-
return {
|
|
557
|
-
aztecNodeConfig,
|
|
558
|
-
anvil,
|
|
559
|
-
aztecNode,
|
|
560
|
-
wallet,
|
|
561
|
-
sequencer: aztecNode.getSequencer()!,
|
|
562
|
-
acvmConfig,
|
|
563
|
-
bbConfig,
|
|
564
|
-
proverNode,
|
|
565
|
-
deployL1ContractsValues: {
|
|
566
|
-
l1Client,
|
|
567
|
-
l1ContractAddresses: aztecNodeConfig.l1Contracts,
|
|
568
|
-
rollupVersion: aztecNodeConfig.rollupVersion,
|
|
569
|
-
},
|
|
570
|
-
watcher,
|
|
571
|
-
cheatCodes,
|
|
572
|
-
dateProvider,
|
|
573
|
-
initialFundedAccounts,
|
|
574
|
-
directoryToCleanup,
|
|
575
|
-
};
|
|
576
|
-
}
|
|
577
|
-
|
|
578
|
-
/**
|
|
579
|
-
* Snapshot 'apply' helper function to add accounts.
|
|
580
|
-
* The 'restore' function is not provided, as it must be a closure within the test context to capture the results.
|
|
266
|
+
* Helper function to deploy accounts.
|
|
267
|
+
* Returns deployed account data that can be used by tests.
|
|
581
268
|
*/
|
|
582
269
|
export const deployAccounts =
|
|
583
270
|
(numberOfAccounts: number, logger: Logger) =>
|
|
@@ -622,14 +309,14 @@ export async function publicDeployAccounts(
|
|
|
622
309
|
node?: AztecNode,
|
|
623
310
|
) {
|
|
624
311
|
const instances = (await Promise.all(accountsToDeploy.map(account => wallet.getContractMetadata(account)))).map(
|
|
625
|
-
metadata => metadata.
|
|
312
|
+
metadata => metadata.instance,
|
|
626
313
|
);
|
|
627
314
|
|
|
628
|
-
const
|
|
629
|
-
const
|
|
315
|
+
const { instance } = await wallet.getContractMetadata(accountsToDeploy[0]);
|
|
316
|
+
const { isContractClassPubliclyRegistered } = await wallet.getContractClassMetadata(instance!.currentContractClassId);
|
|
630
317
|
|
|
631
318
|
const calls: ContractFunctionInteraction[] = await Promise.all([
|
|
632
|
-
...(!
|
|
319
|
+
...(!isContractClassPubliclyRegistered ? [publishContractClass(wallet, SchnorrAccountContractArtifact)] : []),
|
|
633
320
|
...instances.map(instance => publishInstance(wallet, instance!)),
|
|
634
321
|
]);
|
|
635
322
|
|
package/src/fixtures/utils.ts
CHANGED
|
@@ -693,14 +693,14 @@ export async function ensureAccountContractsPublished(wallet: Wallet, accountsTo
|
|
|
693
693
|
.filter(({ deployed }) => !deployed)
|
|
694
694
|
.map(({ address }) => wallet.getContractMetadata(address)),
|
|
695
695
|
)
|
|
696
|
-
).map(contractMetadata => contractMetadata.
|
|
696
|
+
).map(contractMetadata => contractMetadata.instance);
|
|
697
697
|
const contractClass = await getContractClassFromArtifact(SchnorrAccountContractArtifact);
|
|
698
|
-
if (!(await wallet.getContractClassMetadata(contractClass.id
|
|
698
|
+
if (!(await wallet.getContractClassMetadata(contractClass.id)).isContractClassPubliclyRegistered) {
|
|
699
699
|
await (await publishContractClass(wallet, SchnorrAccountContractArtifact))
|
|
700
700
|
.send({ from: accountsToDeploy[0] })
|
|
701
701
|
.wait();
|
|
702
702
|
}
|
|
703
|
-
const requests =
|
|
703
|
+
const requests = instances.map(instance => publishInstance(wallet, instance!));
|
|
704
704
|
const batch = new BatchCall(wallet, requests);
|
|
705
705
|
await batch.send({ from: accountsToDeploy[0] }).wait();
|
|
706
706
|
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
export type PromteheusClientOptions = {
|
|
2
|
+
server: URL;
|
|
3
|
+
};
|
|
4
|
+
|
|
5
|
+
export class PrometheusClient {
|
|
6
|
+
constructor(
|
|
7
|
+
private config: PromteheusClientOptions,
|
|
8
|
+
private httpClient: typeof fetch = fetch,
|
|
9
|
+
) {}
|
|
10
|
+
|
|
11
|
+
public async querySingleValue(query: string, time = new Date()): Promise<number> {
|
|
12
|
+
const resp = await this.queryRaw(query, time);
|
|
13
|
+
if (resp.status === 'success') {
|
|
14
|
+
if (resp.data.resultType === 'vector') {
|
|
15
|
+
if (resp.data.result.length === 0) {
|
|
16
|
+
return 0;
|
|
17
|
+
}
|
|
18
|
+
const [_, value] = resp.data.result[0].value;
|
|
19
|
+
return parseFloat(value);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
throw new TypeError('Unsupported response body', { cause: JSON.stringify(resp) });
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
public queryRaw(query: string, time = new Date()): Promise<PrometheusResponse> {
|
|
27
|
+
const searchParams = new URLSearchParams();
|
|
28
|
+
searchParams.set('query', query);
|
|
29
|
+
searchParams.set('time', String(Math.trunc(time.getTime() / 1000)));
|
|
30
|
+
searchParams.set('limit', '10');
|
|
31
|
+
|
|
32
|
+
return this.callPrometheus('query', searchParams);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
public queryRangeRaw(
|
|
36
|
+
query: string,
|
|
37
|
+
step: PrometheusDuration,
|
|
38
|
+
start: Date,
|
|
39
|
+
end = new Date(),
|
|
40
|
+
): Promise<PrometheusResponse> {
|
|
41
|
+
const searchParams = new URLSearchParams();
|
|
42
|
+
searchParams.set('query', query);
|
|
43
|
+
searchParams.set('step', step);
|
|
44
|
+
searchParams.set('start', String(Math.trunc(start.getTime() / 1000)));
|
|
45
|
+
searchParams.set('end', String(Math.trunc(end.getTime() / 1000)));
|
|
46
|
+
searchParams.set('limit', '10');
|
|
47
|
+
|
|
48
|
+
return this.callPrometheus('query_range', searchParams);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
private async callPrometheus(api: string, searchParams: URLSearchParams): Promise<PrometheusResponse> {
|
|
52
|
+
const url = new URL('api/v1/' + api, this.config.server);
|
|
53
|
+
for (const [name, value] of searchParams) {
|
|
54
|
+
url.searchParams.append(name, value);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const resp = await this.httpClient(url, { method: 'GET' });
|
|
58
|
+
if (!resp.ok || resp.status !== 200) {
|
|
59
|
+
throw new Error('Invalid HTTP response from Prometheus', {
|
|
60
|
+
cause: {
|
|
61
|
+
url,
|
|
62
|
+
status: resp.status,
|
|
63
|
+
statusText: resp.statusText,
|
|
64
|
+
},
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const body = await resp.json();
|
|
69
|
+
if ('status' in body && (body.status === 'error' || body.status === 'success')) {
|
|
70
|
+
return body;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
throw new Error('Invalid response from Prometheus', {
|
|
74
|
+
cause: {
|
|
75
|
+
url,
|
|
76
|
+
body,
|
|
77
|
+
},
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export type PrometheusDuration = `${number}s` | `${number}m` | `${number}h`;
|
|
83
|
+
|
|
84
|
+
export type PrometheusData =
|
|
85
|
+
| {
|
|
86
|
+
resultType: 'vector';
|
|
87
|
+
result: Array<{
|
|
88
|
+
metric: unknown;
|
|
89
|
+
value: [unixTimestamp: number, value: string];
|
|
90
|
+
}>;
|
|
91
|
+
}
|
|
92
|
+
| {
|
|
93
|
+
resultType: 'matrix';
|
|
94
|
+
result: Array<{
|
|
95
|
+
metric: unknown;
|
|
96
|
+
values: [unixTimestamp: number, value: string];
|
|
97
|
+
}>;
|
|
98
|
+
}
|
|
99
|
+
| {
|
|
100
|
+
resultType: 'scalar' | 'string';
|
|
101
|
+
result: unknown;
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
export type PrometheusResponse =
|
|
105
|
+
| {
|
|
106
|
+
status: 'error';
|
|
107
|
+
errorType: string;
|
|
108
|
+
error: string;
|
|
109
|
+
}
|
|
110
|
+
| {
|
|
111
|
+
status: 'success';
|
|
112
|
+
data: PrometheusData;
|
|
113
|
+
};
|
|
@@ -315,11 +315,17 @@ export async function performTransfers({
|
|
|
315
315
|
}
|
|
316
316
|
}
|
|
317
317
|
|
|
318
|
+
export type WalletWrapper = {
|
|
319
|
+
wallet: TestWallet;
|
|
320
|
+
aztecNode: AztecNode;
|
|
321
|
+
cleanup: () => Promise<void>;
|
|
322
|
+
};
|
|
323
|
+
|
|
318
324
|
export async function createWalletAndAztecNodeClient(
|
|
319
325
|
nodeUrl: string,
|
|
320
326
|
proverEnabled: boolean,
|
|
321
327
|
logger: Logger,
|
|
322
|
-
): Promise<
|
|
328
|
+
): Promise<WalletWrapper> {
|
|
323
329
|
const aztecNode = createAztecNodeClient(nodeUrl);
|
|
324
330
|
const [bbConfig, acvmConfig] = await Promise.all([getBBConfig(logger), getACVMConfig(logger)]);
|
|
325
331
|
const pxeConfig = {
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import type { AztecNode } from '@aztec/aztec.js/node';
|
|
2
2
|
import type { L2Block } from '@aztec/stdlib/block';
|
|
3
|
+
import type { TopicType } from '@aztec/stdlib/p2p';
|
|
3
4
|
import { Tx, type TxReceipt, TxStatus } from '@aztec/stdlib/tx';
|
|
4
5
|
|
|
5
6
|
import { createHistogram } from 'perf_hooks';
|
|
@@ -21,6 +22,8 @@ export class TxInclusionMetrics {
|
|
|
21
22
|
private groups = new Set<string>();
|
|
22
23
|
private blocks = new Map<number, Promise<L2Block>>();
|
|
23
24
|
|
|
25
|
+
private p2pGossipLatencyByTopic: Partial<Record<TopicType, { p50: number; p95: number }>> = {};
|
|
26
|
+
|
|
24
27
|
constructor(private aztecNode: AztecNode) {}
|
|
25
28
|
|
|
26
29
|
recordSentTx(tx: Tx, group: string): void {
|
|
@@ -71,7 +74,7 @@ export class TxInclusionMetrics {
|
|
|
71
74
|
} {
|
|
72
75
|
const histogram = createHistogram({});
|
|
73
76
|
for (const tx of this.data.values()) {
|
|
74
|
-
if (!tx.blocknumber || tx.group !== group) {
|
|
77
|
+
if (!tx.blocknumber || tx.group !== group || tx.minedAt === -1) {
|
|
75
78
|
continue;
|
|
76
79
|
}
|
|
77
80
|
|
|
@@ -101,6 +104,10 @@ export class TxInclusionMetrics {
|
|
|
101
104
|
};
|
|
102
105
|
}
|
|
103
106
|
|
|
107
|
+
public recordP2PGossipLatency(topicName: TopicType, p50: number, p95: number): void {
|
|
108
|
+
this.p2pGossipLatencyByTopic[topicName] = { p50, p95 };
|
|
109
|
+
}
|
|
110
|
+
|
|
104
111
|
toGithubActionBenchmarkJSON(): Array<{ name: string; unit: string; value: number; range?: number; extra?: string }> {
|
|
105
112
|
const data: Array<{ name: string; unit: string; value: number; range?: number; extra?: string }> = [];
|
|
106
113
|
for (const group of this.groups) {
|
|
@@ -125,6 +132,19 @@ export class TxInclusionMetrics {
|
|
|
125
132
|
);
|
|
126
133
|
}
|
|
127
134
|
|
|
135
|
+
for (const [topic, { p50, p95 }] of Object.entries(this.p2pGossipLatencyByTopic)) {
|
|
136
|
+
data.push({
|
|
137
|
+
name: `p2p_gossip_latency/${topic}/p50`,
|
|
138
|
+
unit: 'ms',
|
|
139
|
+
value: p50,
|
|
140
|
+
});
|
|
141
|
+
data.push({
|
|
142
|
+
name: `p2p_gossip_latency/${topic}/p95`,
|
|
143
|
+
unit: 'ms',
|
|
144
|
+
value: p95,
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
|
|
128
148
|
return data;
|
|
129
149
|
}
|
|
130
150
|
}
|