@aztec/aztec 0.0.1-commit.7cf39cb55 → 0.0.1-commit.808bf7f90
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/bin/index.js +5 -1
- package/dest/cli/admin_api_key_store.d.ts +45 -0
- package/dest/cli/admin_api_key_store.d.ts.map +1 -0
- package/dest/cli/admin_api_key_store.js +98 -0
- package/dest/cli/aztec_start_action.d.ts +1 -1
- package/dest/cli/aztec_start_action.d.ts.map +1 -1
- package/dest/cli/aztec_start_action.js +46 -10
- package/dest/cli/aztec_start_options.d.ts +1 -1
- package/dest/cli/aztec_start_options.d.ts.map +1 -1
- package/dest/cli/aztec_start_options.js +23 -6
- package/dest/cli/cli.d.ts +1 -1
- package/dest/cli/cli.d.ts.map +1 -1
- package/dest/cli/cli.js +0 -1
- package/dest/cli/cmds/compile.d.ts +4 -0
- package/dest/cli/cmds/compile.d.ts.map +1 -0
- package/dest/cli/cmds/compile.js +68 -0
- package/dest/cli/cmds/profile.d.ts +4 -0
- package/dest/cli/cmds/profile.d.ts.map +1 -0
- package/dest/cli/cmds/profile.js +8 -0
- package/dest/cli/cmds/profile_flamegraph.d.ts +4 -0
- package/dest/cli/cmds/profile_flamegraph.d.ts.map +1 -0
- package/dest/cli/cmds/profile_flamegraph.js +51 -0
- package/dest/cli/cmds/profile_gates.d.ts +4 -0
- package/dest/cli/cmds/profile_gates.d.ts.map +1 -0
- package/dest/cli/cmds/profile_gates.js +57 -0
- package/dest/cli/cmds/profile_utils.d.ts +18 -0
- package/dest/cli/cmds/profile_utils.d.ts.map +1 -0
- package/dest/cli/cmds/profile_utils.js +50 -0
- package/dest/cli/cmds/start_node.d.ts +1 -1
- package/dest/cli/cmds/start_node.d.ts.map +1 -1
- package/dest/cli/cmds/start_node.js +58 -8
- package/dest/cli/cmds/utils/artifacts.d.ts +21 -0
- package/dest/cli/cmds/utils/artifacts.d.ts.map +1 -0
- package/dest/cli/cmds/utils/artifacts.js +24 -0
- package/dest/cli/cmds/utils/spawn.d.ts +3 -0
- package/dest/cli/cmds/utils/spawn.d.ts.map +1 -0
- package/dest/cli/cmds/utils/spawn.js +16 -0
- package/dest/local-network/local-network.d.ts +3 -1
- package/dest/local-network/local-network.d.ts.map +1 -1
- package/dest/local-network/local-network.js +25 -4
- package/dest/testing/anvil_test_watcher.d.ts +9 -1
- package/dest/testing/anvil_test_watcher.d.ts.map +1 -1
- package/dest/testing/anvil_test_watcher.js +52 -15
- package/package.json +34 -34
- package/scripts/aztec.sh +7 -4
- package/scripts/init.sh +23 -13
- package/scripts/new.sh +17 -16
- package/scripts/setup_workspace.sh +124 -0
- package/src/bin/index.ts +5 -1
- package/src/cli/admin_api_key_store.ts +128 -0
- package/src/cli/aztec_start_action.ts +50 -6
- package/src/cli/aztec_start_options.ts +25 -5
- package/src/cli/cli.ts +0 -1
- package/src/cli/cmds/compile.ts +80 -0
- package/src/cli/cmds/profile.ts +25 -0
- package/src/cli/cmds/profile_flamegraph.ts +63 -0
- package/src/cli/cmds/profile_gates.ts +67 -0
- package/src/cli/cmds/profile_utils.ts +58 -0
- package/src/cli/cmds/start_node.ts +49 -7
- package/src/cli/cmds/utils/artifacts.ts +44 -0
- package/src/cli/cmds/utils/spawn.ts +16 -0
- package/src/local-network/local-network.ts +38 -7
- package/src/testing/anvil_test_watcher.ts +59 -15
- package/dest/cli/cmds/start_prover_node.d.ts +0 -7
- package/dest/cli/cmds/start_prover_node.d.ts.map +0 -1
- package/dest/cli/cmds/start_prover_node.js +0 -108
- package/scripts/compile.sh +0 -44
- package/scripts/extract_function.js +0 -47
- package/scripts/flamegraph.sh +0 -59
- package/scripts/setup_project.sh +0 -31
- package/src/cli/cmds/start_prover_node.ts +0 -124
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import type { LogFn } from '@aztec/foundation/log';
|
|
2
|
+
|
|
3
|
+
import { readFile, rename, rm, writeFile } from 'fs/promises';
|
|
4
|
+
import { basename, dirname, join } from 'path';
|
|
5
|
+
|
|
6
|
+
import { makeFunctionArtifact } from './profile_utils.js';
|
|
7
|
+
import type { CompiledArtifact } from './utils/artifacts.js';
|
|
8
|
+
import { run } from './utils/spawn.js';
|
|
9
|
+
|
|
10
|
+
/** Generates a gate count flamegraph SVG for a single contract function. */
|
|
11
|
+
export async function profileFlamegraph(artifactPath: string, functionName: string, log: LogFn): Promise<void> {
|
|
12
|
+
const raw = await readFile(artifactPath, 'utf-8');
|
|
13
|
+
const artifact: CompiledArtifact = JSON.parse(raw);
|
|
14
|
+
|
|
15
|
+
if (!Array.isArray(artifact.functions)) {
|
|
16
|
+
throw new Error(`${artifactPath} does not appear to be a contract artifact (no functions array)`);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const func = artifact.functions.find(f => f.name === functionName);
|
|
20
|
+
if (!func) {
|
|
21
|
+
const available = artifact.functions.map(f => f.name).join(', ');
|
|
22
|
+
throw new Error(`Function "${functionName}" not found in artifact. Available: ${available}`);
|
|
23
|
+
}
|
|
24
|
+
if (func.is_unconstrained) {
|
|
25
|
+
throw new Error(`Function "${functionName}" is unconstrained and cannot be profiled`);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const outputDir = dirname(artifactPath);
|
|
29
|
+
const contractName = basename(artifactPath, '.json');
|
|
30
|
+
const functionArtifact = join(outputDir, `${contractName}-${functionName}.json`);
|
|
31
|
+
|
|
32
|
+
try {
|
|
33
|
+
await writeFile(functionArtifact, makeFunctionArtifact(artifact, func));
|
|
34
|
+
|
|
35
|
+
const profiler = process.env.PROFILER_PATH ?? 'noir-profiler';
|
|
36
|
+
const bb = process.env.BB ?? 'bb';
|
|
37
|
+
|
|
38
|
+
await run(profiler, [
|
|
39
|
+
'gates',
|
|
40
|
+
'--artifact-path',
|
|
41
|
+
functionArtifact,
|
|
42
|
+
'--backend-path',
|
|
43
|
+
bb,
|
|
44
|
+
'--backend-gates-command',
|
|
45
|
+
'gates',
|
|
46
|
+
'--output',
|
|
47
|
+
outputDir,
|
|
48
|
+
'--scheme',
|
|
49
|
+
'chonk',
|
|
50
|
+
'--include_gates_per_opcode',
|
|
51
|
+
]);
|
|
52
|
+
|
|
53
|
+
// noir-profiler names the SVG using the internal function name which
|
|
54
|
+
// retains the __aztec_nr_internals__ prefix in the bytecode metadata.
|
|
55
|
+
const srcSvg = join(outputDir, `__aztec_nr_internals__${functionName}_gates.svg`);
|
|
56
|
+
const destSvg = join(outputDir, `${contractName}-${functionName}-flamegraph.svg`);
|
|
57
|
+
await rename(srcSvg, destSvg);
|
|
58
|
+
|
|
59
|
+
log(`Flamegraph written to ${destSvg}`);
|
|
60
|
+
} finally {
|
|
61
|
+
await rm(functionArtifact, { force: true });
|
|
62
|
+
}
|
|
63
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { asyncPool } from '@aztec/foundation/async-pool';
|
|
2
|
+
import type { LogFn } from '@aztec/foundation/log';
|
|
3
|
+
|
|
4
|
+
import { execFile as execFileCb } from 'child_process';
|
|
5
|
+
import { rm } from 'fs/promises';
|
|
6
|
+
import { promisify } from 'util';
|
|
7
|
+
|
|
8
|
+
import { MAX_CONCURRENT, discoverArtifacts } from './profile_utils.js';
|
|
9
|
+
|
|
10
|
+
const execFile = promisify(execFileCb);
|
|
11
|
+
|
|
12
|
+
interface GateCountResult {
|
|
13
|
+
name: string;
|
|
14
|
+
gateCount: number;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/** Parses circuit_size from bb gates JSON output: { "functions": [{ "circuit_size": N }] } */
|
|
18
|
+
function parseGateCount(stdout: string): number {
|
|
19
|
+
const parsed = JSON.parse(stdout);
|
|
20
|
+
const size = parsed?.functions?.[0]?.circuit_size;
|
|
21
|
+
if (typeof size !== 'number') {
|
|
22
|
+
throw new Error('Failed to parse circuit_size from bb gates output');
|
|
23
|
+
}
|
|
24
|
+
return size;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/** Runs bb gates on a single artifact file and returns the gate count. */
|
|
28
|
+
async function getGateCount(bb: string, artifactPath: string): Promise<number> {
|
|
29
|
+
const { stdout } = await execFile(bb, ['gates', '--scheme', 'chonk', '-b', artifactPath]);
|
|
30
|
+
return parseGateCount(stdout);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/** Profiles all compiled artifacts in a target directory and prints gate counts. */
|
|
34
|
+
export async function profileGates(targetDir: string, log: LogFn): Promise<void> {
|
|
35
|
+
const bb = process.env.BB ?? 'bb';
|
|
36
|
+
const { artifacts, tmpDir } = await discoverArtifacts(targetDir);
|
|
37
|
+
|
|
38
|
+
if (artifacts.length === 0) {
|
|
39
|
+
log('No artifacts found in target directory.');
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
const results: GateCountResult[] = await asyncPool(MAX_CONCURRENT, artifacts, async artifact => ({
|
|
45
|
+
name: artifact.name,
|
|
46
|
+
gateCount: await getGateCount(bb, artifact.filePath),
|
|
47
|
+
}));
|
|
48
|
+
results.sort((a, b) => a.name.localeCompare(b.name));
|
|
49
|
+
|
|
50
|
+
if (results.length === 0) {
|
|
51
|
+
log('No constrained circuits found.');
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const maxNameLen = Math.max(...results.map(r => r.name.length));
|
|
56
|
+
log('');
|
|
57
|
+
log('Gate counts:');
|
|
58
|
+
log('-'.repeat(maxNameLen + 16));
|
|
59
|
+
for (const { name, gateCount } of results) {
|
|
60
|
+
log(`${name.padEnd(maxNameLen)} ${gateCount.toLocaleString().padStart(12)}`);
|
|
61
|
+
}
|
|
62
|
+
log('-'.repeat(maxNameLen + 16));
|
|
63
|
+
log(`Total: ${results.length} circuit(s)`);
|
|
64
|
+
} finally {
|
|
65
|
+
await rm(tmpDir, { recursive: true, force: true });
|
|
66
|
+
}
|
|
67
|
+
}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { mkdtemp, writeFile } from 'fs/promises';
|
|
2
|
+
import { tmpdir } from 'os';
|
|
3
|
+
import { join } from 'path';
|
|
4
|
+
|
|
5
|
+
import type { CompiledArtifact, ContractFunction } from './utils/artifacts.js';
|
|
6
|
+
import { readArtifactFiles } from './utils/artifacts.js';
|
|
7
|
+
|
|
8
|
+
export const MAX_CONCURRENT = 4;
|
|
9
|
+
|
|
10
|
+
export interface DiscoveredArtifact {
|
|
11
|
+
name: string;
|
|
12
|
+
filePath: string;
|
|
13
|
+
type: 'contract-function' | 'program';
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Reads a target directory and returns a list of discovered artifacts with temp files
|
|
18
|
+
* created for contract functions. Caller must clean up tmpDir when done.
|
|
19
|
+
*/
|
|
20
|
+
export async function discoverArtifacts(
|
|
21
|
+
targetDir: string,
|
|
22
|
+
): Promise<{ artifacts: DiscoveredArtifact[]; tmpDir: string }> {
|
|
23
|
+
const files = await readArtifactFiles(targetDir);
|
|
24
|
+
const tmpDir = await mkdtemp(join(tmpdir(), 'aztec-profile-'));
|
|
25
|
+
const artifacts: DiscoveredArtifact[] = [];
|
|
26
|
+
|
|
27
|
+
for (const file of files) {
|
|
28
|
+
if (Array.isArray(file.content.functions)) {
|
|
29
|
+
for (const func of file.content.functions) {
|
|
30
|
+
if (!func.bytecode || func.is_unconstrained) {
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
const name = `${file.name}::${func.name}`;
|
|
34
|
+
const tmpPath = join(tmpDir, `${file.name}-${func.name}.json`);
|
|
35
|
+
await writeFile(tmpPath, makeFunctionArtifact(file.content, func));
|
|
36
|
+
artifacts.push({ name, filePath: tmpPath, type: 'contract-function' });
|
|
37
|
+
}
|
|
38
|
+
} else if (file.content.bytecode) {
|
|
39
|
+
artifacts.push({ name: file.name, filePath: file.filePath, type: 'program' });
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return { artifacts, tmpDir };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/** Extracts a contract function as a standalone program artifact JSON string. */
|
|
47
|
+
export function makeFunctionArtifact(artifact: CompiledArtifact, func: ContractFunction) {
|
|
48
|
+
/* eslint-disable camelcase */
|
|
49
|
+
return JSON.stringify({
|
|
50
|
+
noir_version: artifact.noir_version,
|
|
51
|
+
hash: 0,
|
|
52
|
+
abi: func.abi,
|
|
53
|
+
bytecode: func.bytecode,
|
|
54
|
+
debug_symbols: func.debug_symbols,
|
|
55
|
+
file_map: artifact.file_map,
|
|
56
|
+
});
|
|
57
|
+
/* eslint-enable camelcase */
|
|
58
|
+
}
|
|
@@ -6,13 +6,16 @@ import { getL1Config } from '@aztec/cli/config';
|
|
|
6
6
|
import { getPublicClient } from '@aztec/ethereum/client';
|
|
7
7
|
import { SecretValue } from '@aztec/foundation/config';
|
|
8
8
|
import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server';
|
|
9
|
+
import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici';
|
|
9
10
|
import type { LogFn } from '@aztec/foundation/log';
|
|
11
|
+
import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker';
|
|
10
12
|
import { type CliPXEOptions, type PXEConfig, allPxeConfigMappings } from '@aztec/pxe/config';
|
|
11
13
|
import { AztecNodeAdminApiSchema, AztecNodeApiSchema } from '@aztec/stdlib/interfaces/client';
|
|
12
|
-
import { P2PApiSchema } from '@aztec/stdlib/interfaces/server';
|
|
14
|
+
import { P2PApiSchema, ProverNodeApiSchema, type ProvingJobBroker } from '@aztec/stdlib/interfaces/server';
|
|
13
15
|
import {
|
|
14
16
|
type TelemetryClientConfig,
|
|
15
17
|
initTelemetryClient,
|
|
18
|
+
makeTracedFetch,
|
|
16
19
|
telemetryClientConfigMappings,
|
|
17
20
|
} from '@aztec/telemetry-client';
|
|
18
21
|
import { EmbeddedWallet } from '@aztec/wallets/embedded';
|
|
@@ -25,6 +28,8 @@ import {
|
|
|
25
28
|
preloadCrsDataForVerifying,
|
|
26
29
|
setupUpdateMonitor,
|
|
27
30
|
} from '../util.js';
|
|
31
|
+
import { getVersions } from '../versioning.js';
|
|
32
|
+
import { startProverBroker } from './start_prover_broker.js';
|
|
28
33
|
|
|
29
34
|
export async function startNode(
|
|
30
35
|
options: any,
|
|
@@ -45,9 +50,32 @@ export async function startNode(
|
|
|
45
50
|
...relevantOptions,
|
|
46
51
|
};
|
|
47
52
|
|
|
53
|
+
// Prover node configuration and broker setup
|
|
54
|
+
// REFACTOR: Move the broker setup out of here and into the prover-node factory
|
|
55
|
+
let broker: ProvingJobBroker | undefined = undefined;
|
|
48
56
|
if (options.proverNode) {
|
|
49
|
-
|
|
50
|
-
|
|
57
|
+
nodeConfig.enableProverNode = true;
|
|
58
|
+
if (nodeConfig.proverAgentCount === 0) {
|
|
59
|
+
userLog(
|
|
60
|
+
`Running prover node without local prover agent. Connect prover agents or pass --proverAgent.proverAgentCount`,
|
|
61
|
+
);
|
|
62
|
+
}
|
|
63
|
+
if (nodeConfig.proverBrokerUrl) {
|
|
64
|
+
// at 1TPS we'd enqueue ~1k chonk verifier proofs and ~1k AVM proofs immediately
|
|
65
|
+
// set a lower connection limit such that we don't overload the server
|
|
66
|
+
// Keep retrying up to 30s
|
|
67
|
+
const fetch = makeTracedFetch(
|
|
68
|
+
[1, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3],
|
|
69
|
+
false,
|
|
70
|
+
makeUndiciFetch(new Agent({ connections: 100 })),
|
|
71
|
+
);
|
|
72
|
+
broker = createProvingJobBrokerClient(nodeConfig.proverBrokerUrl, getVersions(nodeConfig), fetch);
|
|
73
|
+
} else if (options.proverBroker) {
|
|
74
|
+
({ broker } = await startProverBroker(options, signalHandlers, services, userLog));
|
|
75
|
+
} else {
|
|
76
|
+
userLog(`--prover-broker-url or --prover-broker is required to start a Prover Node`);
|
|
77
|
+
process.exit(1);
|
|
78
|
+
}
|
|
51
79
|
}
|
|
52
80
|
|
|
53
81
|
await preloadCrsDataForVerifying(nodeConfig, userLog);
|
|
@@ -101,12 +129,17 @@ export async function startNode(
|
|
|
101
129
|
...extractNamespacedOptions(options, 'sequencer'),
|
|
102
130
|
};
|
|
103
131
|
// If no publisher private keys have been given, use the first validator key
|
|
104
|
-
if (
|
|
132
|
+
if (
|
|
133
|
+
sequencerConfig.sequencerPublisherPrivateKeys === undefined ||
|
|
134
|
+
!sequencerConfig.sequencerPublisherPrivateKeys.length
|
|
135
|
+
) {
|
|
105
136
|
if (sequencerConfig.validatorPrivateKeys?.getValue().length) {
|
|
106
|
-
sequencerConfig.
|
|
137
|
+
sequencerConfig.sequencerPublisherPrivateKeys = [
|
|
138
|
+
new SecretValue(sequencerConfig.validatorPrivateKeys.getValue()[0]),
|
|
139
|
+
];
|
|
107
140
|
}
|
|
108
141
|
}
|
|
109
|
-
nodeConfig.
|
|
142
|
+
nodeConfig.sequencerPublisherPrivateKeys = sequencerConfig.sequencerPublisherPrivateKeys;
|
|
110
143
|
}
|
|
111
144
|
|
|
112
145
|
if (nodeConfig.p2pEnabled) {
|
|
@@ -120,13 +153,22 @@ export async function startNode(
|
|
|
120
153
|
const telemetry = await initTelemetryClient(telemetryConfig);
|
|
121
154
|
|
|
122
155
|
// Create and start Aztec Node
|
|
123
|
-
const node = await createAztecNode(nodeConfig, { telemetry }, { prefilledPublicData });
|
|
156
|
+
const node = await createAztecNode(nodeConfig, { telemetry, proverBroker: broker }, { prefilledPublicData });
|
|
124
157
|
|
|
125
158
|
// Add node and p2p to services list
|
|
126
159
|
services.node = [node, AztecNodeApiSchema];
|
|
127
160
|
services.p2p = [node.getP2P(), P2PApiSchema];
|
|
128
161
|
adminServices.nodeAdmin = [node, AztecNodeAdminApiSchema];
|
|
129
162
|
|
|
163
|
+
// Register prover-node services if the prover node subsystem is running
|
|
164
|
+
const proverNode = node.getProverNode();
|
|
165
|
+
if (proverNode) {
|
|
166
|
+
services.prover = [proverNode, ProverNodeApiSchema];
|
|
167
|
+
if (!nodeConfig.proverBrokerUrl) {
|
|
168
|
+
services.provingJobSource = [proverNode.getProver().getProvingJobSource(), ProvingJobConsumerSchema];
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
130
172
|
// Add node stop function to signal handlers
|
|
131
173
|
signalHandlers.push(node.stop.bind(node));
|
|
132
174
|
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { readFile, readdir } from 'fs/promises';
|
|
2
|
+
import { join } from 'path';
|
|
3
|
+
|
|
4
|
+
export interface CompiledArtifact {
|
|
5
|
+
noir_version: string;
|
|
6
|
+
file_map: unknown;
|
|
7
|
+
functions: ContractFunction[];
|
|
8
|
+
bytecode?: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface ContractFunction {
|
|
12
|
+
name: string;
|
|
13
|
+
abi: unknown;
|
|
14
|
+
bytecode: string;
|
|
15
|
+
debug_symbols: unknown;
|
|
16
|
+
is_unconstrained?: boolean;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export interface ArtifactFile {
|
|
20
|
+
name: string;
|
|
21
|
+
filePath: string;
|
|
22
|
+
content: CompiledArtifact;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/** Reads all JSON artifact files from a target directory and returns their parsed contents. */
|
|
26
|
+
export async function readArtifactFiles(targetDir: string): Promise<ArtifactFile[]> {
|
|
27
|
+
let entries: string[];
|
|
28
|
+
try {
|
|
29
|
+
entries = (await readdir(targetDir)).filter(f => f.endsWith('.json'));
|
|
30
|
+
} catch (err: any) {
|
|
31
|
+
if (err?.code === 'ENOENT') {
|
|
32
|
+
throw new Error(`Target directory '${targetDir}' does not exist. Compile first with 'aztec compile'.`);
|
|
33
|
+
}
|
|
34
|
+
throw err;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const artifacts: ArtifactFile[] = [];
|
|
38
|
+
for (const file of entries) {
|
|
39
|
+
const filePath = join(targetDir, file);
|
|
40
|
+
const content = JSON.parse(await readFile(filePath, 'utf-8')) as CompiledArtifact;
|
|
41
|
+
artifacts.push({ name: file.replace('.json', ''), filePath, content });
|
|
42
|
+
}
|
|
43
|
+
return artifacts;
|
|
44
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { spawn } from 'child_process';
|
|
2
|
+
|
|
3
|
+
/** Spawns a command with inherited stdio and rejects on non-zero exit. */
|
|
4
|
+
export function run(cmd: string, args: string[]): Promise<void> {
|
|
5
|
+
return new Promise((resolve, reject) => {
|
|
6
|
+
const child = spawn(cmd, args, { stdio: 'inherit' });
|
|
7
|
+
child.on('error', reject);
|
|
8
|
+
child.on('close', code => {
|
|
9
|
+
if (code !== 0) {
|
|
10
|
+
reject(new Error(`${cmd} exited with code ${code}`));
|
|
11
|
+
} else {
|
|
12
|
+
resolve();
|
|
13
|
+
}
|
|
14
|
+
});
|
|
15
|
+
});
|
|
16
|
+
}
|
|
@@ -18,6 +18,8 @@ import type { LogFn } from '@aztec/foundation/log';
|
|
|
18
18
|
import { DateProvider, TestDateProvider } from '@aztec/foundation/timer';
|
|
19
19
|
import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
20
20
|
import { protocolContractsHash } from '@aztec/protocol-contracts';
|
|
21
|
+
import { SequencerState } from '@aztec/sequencer-client';
|
|
22
|
+
import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server';
|
|
21
23
|
import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees';
|
|
22
24
|
import {
|
|
23
25
|
type TelemetryClient,
|
|
@@ -105,12 +107,14 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
|
|
|
105
107
|
};
|
|
106
108
|
const hdAccount = mnemonicToAccount(config.l1Mnemonic || DefaultMnemonic);
|
|
107
109
|
if (
|
|
108
|
-
aztecNodeConfig.
|
|
109
|
-
!aztecNodeConfig.
|
|
110
|
-
aztecNodeConfig.
|
|
110
|
+
aztecNodeConfig.sequencerPublisherPrivateKeys == undefined ||
|
|
111
|
+
!aztecNodeConfig.sequencerPublisherPrivateKeys.length ||
|
|
112
|
+
aztecNodeConfig.sequencerPublisherPrivateKeys[0].getValue() === NULL_KEY
|
|
111
113
|
) {
|
|
112
114
|
const privKey = hdAccount.getHdKey().privateKey;
|
|
113
|
-
aztecNodeConfig.
|
|
115
|
+
aztecNodeConfig.sequencerPublisherPrivateKeys = [
|
|
116
|
+
new SecretValue(`0x${Buffer.from(privKey!).toString('hex')}` as const),
|
|
117
|
+
];
|
|
114
118
|
}
|
|
115
119
|
if (!aztecNodeConfig.validatorPrivateKeys?.getValue().length) {
|
|
116
120
|
const privKey = hdAccount.getHdKey().privateKey;
|
|
@@ -178,6 +182,21 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
|
|
|
178
182
|
const blobClient = createBlobClient();
|
|
179
183
|
const node = await createAztecNode(aztecNodeConfig, { telemetry, blobClient, dateProvider }, { prefilledPublicData });
|
|
180
184
|
|
|
185
|
+
// Now that the node is up, let the watcher check for pending txs so it can skip unfilled slots faster when
|
|
186
|
+
// transactions are waiting in the mempool. Also let it check if the sequencer is actively building, to avoid
|
|
187
|
+
// warping time out from under an in-progress block.
|
|
188
|
+
watcher?.setGetPendingTxCount(() => node.getPendingTxCount());
|
|
189
|
+
const sequencer = node.getSequencer()?.getSequencer();
|
|
190
|
+
if (sequencer) {
|
|
191
|
+
const idleStates: Set<string> = new Set([
|
|
192
|
+
SequencerState.STOPPED,
|
|
193
|
+
SequencerState.STOPPING,
|
|
194
|
+
SequencerState.IDLE,
|
|
195
|
+
SequencerState.SYNCHRONIZING,
|
|
196
|
+
]);
|
|
197
|
+
watcher?.setIsSequencerBuilding(() => !idleStates.has(sequencer.getState()));
|
|
198
|
+
}
|
|
199
|
+
|
|
181
200
|
let epochTestSettler: EpochTestSettler | undefined;
|
|
182
201
|
if (!aztecNodeConfig.p2pEnabled) {
|
|
183
202
|
epochTestSettler = new EpochTestSettler(
|
|
@@ -191,7 +210,10 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
|
|
|
191
210
|
}
|
|
192
211
|
|
|
193
212
|
if (initialAccounts.length) {
|
|
194
|
-
const wallet = await EmbeddedWallet.create(node, {
|
|
213
|
+
const wallet = await EmbeddedWallet.create(node, {
|
|
214
|
+
pxeConfig: { proverEnabled: aztecNodeConfig.realProofs },
|
|
215
|
+
ephemeral: true,
|
|
216
|
+
});
|
|
195
217
|
|
|
196
218
|
userLog('Setting up funded test accounts...');
|
|
197
219
|
const accountManagers = await deployFundedSchnorrAccounts(wallet, initialAccounts);
|
|
@@ -221,7 +243,12 @@ export async function createLocalNetwork(config: Partial<LocalNetworkConfig> = {
|
|
|
221
243
|
*/
|
|
222
244
|
export async function createAztecNode(
|
|
223
245
|
config: Partial<AztecNodeConfig> = {},
|
|
224
|
-
deps: {
|
|
246
|
+
deps: {
|
|
247
|
+
telemetry?: TelemetryClient;
|
|
248
|
+
blobClient?: BlobClientInterface;
|
|
249
|
+
dateProvider?: DateProvider;
|
|
250
|
+
proverBroker?: ProvingJobBroker;
|
|
251
|
+
} = {},
|
|
225
252
|
options: { prefilledPublicData?: PublicDataTreeLeaf[] } = {},
|
|
226
253
|
) {
|
|
227
254
|
// TODO(#12272): will clean this up. This is criminal.
|
|
@@ -231,6 +258,10 @@ export async function createAztecNode(
|
|
|
231
258
|
...config,
|
|
232
259
|
l1Contracts: { ...l1Contracts, ...config.l1Contracts },
|
|
233
260
|
};
|
|
234
|
-
const node = await AztecNodeService.createAndSync(
|
|
261
|
+
const node = await AztecNodeService.createAndSync(
|
|
262
|
+
aztecNodeConfig,
|
|
263
|
+
{ ...deps, proverNodeDeps: { broker: deps.proverBroker } },
|
|
264
|
+
options,
|
|
265
|
+
);
|
|
235
266
|
return node;
|
|
236
267
|
}
|
|
@@ -31,6 +31,15 @@ export class AnvilTestWatcher {
|
|
|
31
31
|
|
|
32
32
|
private isMarkingAsProven = true;
|
|
33
33
|
|
|
34
|
+
// Optional callback to check if there are pending txs in the mempool.
|
|
35
|
+
private getPendingTxCount?: () => Promise<number>;
|
|
36
|
+
|
|
37
|
+
// Optional callback to check if the sequencer is actively building a block.
|
|
38
|
+
private isSequencerBuilding?: () => boolean;
|
|
39
|
+
|
|
40
|
+
// Tracks when we first observed the current unfilled slot with pending txs (real wall time).
|
|
41
|
+
private unfilledSlotFirstSeen?: { slot: number; realTime: number };
|
|
42
|
+
|
|
34
43
|
constructor(
|
|
35
44
|
private cheatcodes: EthCheatCodes,
|
|
36
45
|
rollupAddress: EthAddress,
|
|
@@ -59,6 +68,16 @@ export class AnvilTestWatcher {
|
|
|
59
68
|
this.isLocalNetwork = isLocalNetwork;
|
|
60
69
|
}
|
|
61
70
|
|
|
71
|
+
/** Sets a callback to check for pending txs, used to skip unfilled slots faster when txs are waiting. */
|
|
72
|
+
setGetPendingTxCount(fn: () => Promise<number>) {
|
|
73
|
+
this.getPendingTxCount = fn;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/** Sets a callback to check if the sequencer is actively building, to avoid warping while it works. */
|
|
77
|
+
setIsSequencerBuilding(fn: () => boolean) {
|
|
78
|
+
this.isSequencerBuilding = fn;
|
|
79
|
+
}
|
|
80
|
+
|
|
62
81
|
async start() {
|
|
63
82
|
if (this.filledRunningPromise) {
|
|
64
83
|
throw new Error('Watcher already watching for filled slot');
|
|
@@ -131,15 +150,8 @@ export class AnvilTestWatcher {
|
|
|
131
150
|
const nextSlotTimestamp = Number(await this.rollup.read.getTimestampForSlot([BigInt(nextSlot)]));
|
|
132
151
|
|
|
133
152
|
if (BigInt(currentSlot) === checkpointLog.slotNumber) {
|
|
134
|
-
//
|
|
135
|
-
|
|
136
|
-
await this.cheatcodes.warp(nextSlotTimestamp, {
|
|
137
|
-
resetBlockInterval: true,
|
|
138
|
-
});
|
|
139
|
-
} catch (e) {
|
|
140
|
-
this.logger.error(`Failed to warp to timestamp ${nextSlotTimestamp}: ${e}`);
|
|
141
|
-
}
|
|
142
|
-
|
|
153
|
+
// The current slot has been filled, we should jump to the next slot.
|
|
154
|
+
await this.warpToTimestamp(nextSlotTimestamp);
|
|
143
155
|
this.logger.info(`Slot ${currentSlot} was filled, jumped to next slot`);
|
|
144
156
|
return;
|
|
145
157
|
}
|
|
@@ -149,18 +161,50 @@ export class AnvilTestWatcher {
|
|
|
149
161
|
return;
|
|
150
162
|
}
|
|
151
163
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
164
|
+
// If there are pending txs and the sequencer missed them, warp quickly (after a 2s real-time debounce) so the
|
|
165
|
+
// sequencer can retry in the next slot. Without this, we'd have to wait a full real-time slot duration (~36s) for
|
|
166
|
+
// the dateProvider to catch up to the next slot timestamp. We skip the warp if the sequencer is actively building
|
|
167
|
+
// to avoid invalidating its in-progress work.
|
|
168
|
+
if (this.getPendingTxCount) {
|
|
169
|
+
const pendingTxs = await this.getPendingTxCount();
|
|
170
|
+
if (pendingTxs > 0) {
|
|
171
|
+
if (this.isSequencerBuilding?.()) {
|
|
172
|
+
this.unfilledSlotFirstSeen = undefined;
|
|
173
|
+
return;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const realNow = Date.now();
|
|
177
|
+
if (!this.unfilledSlotFirstSeen || this.unfilledSlotFirstSeen.slot !== currentSlot) {
|
|
178
|
+
this.unfilledSlotFirstSeen = { slot: currentSlot, realTime: realNow };
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
if (realNow - this.unfilledSlotFirstSeen.realTime > 2000) {
|
|
183
|
+
await this.warpToTimestamp(nextSlotTimestamp);
|
|
184
|
+
this.unfilledSlotFirstSeen = undefined;
|
|
185
|
+
this.logger.info(`Slot ${currentSlot} was missed with pending txs, jumped to next slot`);
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
return;
|
|
158
189
|
}
|
|
190
|
+
}
|
|
159
191
|
|
|
192
|
+
// Fallback: warp when the dateProvider time has passed the next slot timestamp.
|
|
193
|
+
const currentTimestamp = this.dateProvider?.now() ?? Date.now();
|
|
194
|
+
if (currentTimestamp > nextSlotTimestamp * 1000) {
|
|
195
|
+
await this.warpToTimestamp(nextSlotTimestamp);
|
|
160
196
|
this.logger.info(`Slot ${currentSlot} was missed, jumped to next slot`);
|
|
161
197
|
}
|
|
162
198
|
} catch {
|
|
163
199
|
this.logger.error('mineIfSlotFilled failed');
|
|
164
200
|
}
|
|
165
201
|
}
|
|
202
|
+
|
|
203
|
+
private async warpToTimestamp(timestamp: number) {
|
|
204
|
+
try {
|
|
205
|
+
await this.cheatcodes.warp(timestamp, { resetBlockInterval: true });
|
|
206
|
+
} catch (e) {
|
|
207
|
+
this.logger.error(`Failed to warp to timestamp ${timestamp}: ${e}`);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
166
210
|
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server';
|
|
2
|
-
import type { LogFn } from '@aztec/foundation/log';
|
|
3
|
-
import { type ProverNodeConfig } from '@aztec/prover-node';
|
|
4
|
-
export declare function startProverNode(options: any, signalHandlers: (() => Promise<void>)[], services: NamespacedApiHandlers, userLog: LogFn): Promise<{
|
|
5
|
-
config: ProverNodeConfig;
|
|
6
|
-
}>;
|
|
7
|
-
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3RhcnRfcHJvdmVyX25vZGUuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9jbGkvY21kcy9zdGFydF9wcm92ZXJfbm9kZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFLQSxPQUFPLEtBQUssRUFBRSxxQkFBcUIsRUFBRSxNQUFNLG1DQUFtQyxDQUFDO0FBRS9FLE9BQU8sS0FBSyxFQUFFLEtBQUssRUFBRSxNQUFNLHVCQUF1QixDQUFDO0FBRW5ELE9BQU8sRUFDTCxLQUFLLGdCQUFnQixFQUl0QixNQUFNLG9CQUFvQixDQUFDO0FBUzVCLHdCQUFzQixlQUFlLENBQ25DLE9BQU8sRUFBRSxHQUFHLEVBQ1osY0FBYyxFQUFFLENBQUMsTUFBTSxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUMsRUFBRSxFQUN2QyxRQUFRLEVBQUUscUJBQXFCLEVBQy9CLE9BQU8sRUFBRSxLQUFLLEdBQ2IsT0FBTyxDQUFDO0lBQUUsTUFBTSxFQUFFLGdCQUFnQixDQUFBO0NBQUUsQ0FBQyxDQStGdkMifQ==
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"start_prover_node.d.ts","sourceRoot":"","sources":["../../../src/cli/cmds/start_prover_node.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,mCAAmC,CAAC;AAE/E,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAEnD,OAAO,EACL,KAAK,gBAAgB,EAItB,MAAM,oBAAoB,CAAC;AAS5B,wBAAsB,eAAe,CACnC,OAAO,EAAE,GAAG,EACZ,cAAc,EAAE,CAAC,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,EACvC,QAAQ,EAAE,qBAAqB,EAC/B,OAAO,EAAE,KAAK,GACb,OAAO,CAAC;IAAE,MAAM,EAAE,gBAAgB,CAAA;CAAE,CAAC,CA+FvC"}
|
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
import { getInitialTestAccountsData } from '@aztec/accounts/testing';
|
|
2
|
-
import { Fr } from '@aztec/aztec.js/fields';
|
|
3
|
-
import { getSponsoredFPCAddress } from '@aztec/cli/cli-utils';
|
|
4
|
-
import { getL1Config } from '@aztec/cli/config';
|
|
5
|
-
import { getPublicClient } from '@aztec/ethereum/client';
|
|
6
|
-
import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici';
|
|
7
|
-
import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker';
|
|
8
|
-
import { createProverNode, getProverNodeConfigFromEnv, proverNodeConfigMappings } from '@aztec/prover-node';
|
|
9
|
-
import { P2PApiSchema, ProverNodeApiSchema } from '@aztec/stdlib/interfaces/server';
|
|
10
|
-
import { initTelemetryClient, makeTracedFetch, telemetryClientConfigMappings } from '@aztec/telemetry-client';
|
|
11
|
-
import { getGenesisValues } from '@aztec/world-state/testing';
|
|
12
|
-
import { extractRelevantOptions, preloadCrsDataForVerifying, setupUpdateMonitor } from '../util.js';
|
|
13
|
-
import { getVersions } from '../versioning.js';
|
|
14
|
-
import { startProverBroker } from './start_prover_broker.js';
|
|
15
|
-
export async function startProverNode(options, signalHandlers, services, userLog) {
|
|
16
|
-
if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) {
|
|
17
|
-
userLog(`Starting a prover-node with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`);
|
|
18
|
-
process.exit(1);
|
|
19
|
-
}
|
|
20
|
-
let proverConfig = {
|
|
21
|
-
...getProverNodeConfigFromEnv(),
|
|
22
|
-
...extractRelevantOptions(options, proverNodeConfigMappings, 'proverNode')
|
|
23
|
-
};
|
|
24
|
-
if (!proverConfig.l1Contracts.registryAddress || proverConfig.l1Contracts.registryAddress.isZero()) {
|
|
25
|
-
throw new Error('L1 registry address is required to start a Prover Node');
|
|
26
|
-
}
|
|
27
|
-
const followsCanonicalRollup = typeof proverConfig.rollupVersion !== 'number';
|
|
28
|
-
const { addresses, config } = await getL1Config(proverConfig.l1Contracts.registryAddress, proverConfig.l1RpcUrls, proverConfig.l1ChainId, proverConfig.rollupVersion);
|
|
29
|
-
process.env.ROLLUP_CONTRACT_ADDRESS ??= addresses.rollupAddress.toString();
|
|
30
|
-
proverConfig.l1Contracts = addresses;
|
|
31
|
-
proverConfig = {
|
|
32
|
-
...proverConfig,
|
|
33
|
-
...config
|
|
34
|
-
};
|
|
35
|
-
const testAccounts = proverConfig.testAccounts ? (await getInitialTestAccountsData()).map((a)=>a.address) : [];
|
|
36
|
-
const sponsoredFPCAccounts = proverConfig.sponsoredFPC ? [
|
|
37
|
-
await getSponsoredFPCAddress()
|
|
38
|
-
] : [];
|
|
39
|
-
const initialFundedAccounts = testAccounts.concat(sponsoredFPCAccounts);
|
|
40
|
-
userLog(`Initial funded accounts: ${initialFundedAccounts.map((a)=>a.toString()).join(', ')}`);
|
|
41
|
-
const { genesisArchiveRoot, prefilledPublicData } = await getGenesisValues(initialFundedAccounts);
|
|
42
|
-
userLog(`Genesis archive root: ${genesisArchiveRoot.toString()}`);
|
|
43
|
-
if (!Fr.fromHexString(config.genesisArchiveTreeRoot).equals(genesisArchiveRoot)) {
|
|
44
|
-
throw new Error(`The computed genesis archive tree root ${genesisArchiveRoot} does not match the expected genesis archive tree root ${config.genesisArchiveTreeRoot} for the rollup deployed at ${addresses.rollupAddress}`);
|
|
45
|
-
}
|
|
46
|
-
const telemetry = await initTelemetryClient(extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'));
|
|
47
|
-
let broker;
|
|
48
|
-
if (proverConfig.proverBrokerUrl) {
|
|
49
|
-
// at 1TPS we'd enqueue ~1k chonk verifier proofs and ~1k AVM proofs immediately
|
|
50
|
-
// set a lower connection limit such that we don't overload the server
|
|
51
|
-
// Keep retrying up to 30s
|
|
52
|
-
const fetch = makeTracedFetch([
|
|
53
|
-
1,
|
|
54
|
-
2,
|
|
55
|
-
3,
|
|
56
|
-
3,
|
|
57
|
-
3,
|
|
58
|
-
3,
|
|
59
|
-
3,
|
|
60
|
-
3,
|
|
61
|
-
3,
|
|
62
|
-
3,
|
|
63
|
-
3
|
|
64
|
-
], false, makeUndiciFetch(new Agent({
|
|
65
|
-
connections: 100
|
|
66
|
-
})));
|
|
67
|
-
broker = createProvingJobBrokerClient(proverConfig.proverBrokerUrl, getVersions(proverConfig), fetch);
|
|
68
|
-
} else if (options.proverBroker) {
|
|
69
|
-
({ broker } = await startProverBroker(options, signalHandlers, services, userLog));
|
|
70
|
-
} else {
|
|
71
|
-
userLog(`--prover-broker-url or --prover-broker is required to start a Prover Node`);
|
|
72
|
-
process.exit(1);
|
|
73
|
-
}
|
|
74
|
-
if (proverConfig.proverAgentCount === 0) {
|
|
75
|
-
userLog(`Running prover node without local prover agent. Connect one or more prover agents to this node or pass --proverAgent.proverAgentCount`);
|
|
76
|
-
}
|
|
77
|
-
await preloadCrsDataForVerifying(proverConfig, userLog);
|
|
78
|
-
const proverNode = await createProverNode(proverConfig, {
|
|
79
|
-
telemetry,
|
|
80
|
-
broker
|
|
81
|
-
}, {
|
|
82
|
-
prefilledPublicData
|
|
83
|
-
});
|
|
84
|
-
services.proverNode = [
|
|
85
|
-
proverNode,
|
|
86
|
-
ProverNodeApiSchema
|
|
87
|
-
];
|
|
88
|
-
if (proverNode.getP2P()) {
|
|
89
|
-
services.p2p = [
|
|
90
|
-
proverNode.getP2P(),
|
|
91
|
-
P2PApiSchema
|
|
92
|
-
];
|
|
93
|
-
}
|
|
94
|
-
if (!proverConfig.proverBrokerUrl) {
|
|
95
|
-
services.provingJobSource = [
|
|
96
|
-
proverNode.getProver().getProvingJobSource(),
|
|
97
|
-
ProvingJobConsumerSchema
|
|
98
|
-
];
|
|
99
|
-
}
|
|
100
|
-
signalHandlers.push(proverNode.stop.bind(proverNode));
|
|
101
|
-
await proverNode.start();
|
|
102
|
-
if (proverConfig.autoUpdate !== 'disabled' && proverConfig.autoUpdateUrl) {
|
|
103
|
-
await setupUpdateMonitor(proverConfig.autoUpdate, new URL(proverConfig.autoUpdateUrl), followsCanonicalRollup, getPublicClient(proverConfig), proverConfig.l1Contracts.registryAddress, signalHandlers);
|
|
104
|
-
}
|
|
105
|
-
return {
|
|
106
|
-
config: proverConfig
|
|
107
|
-
};
|
|
108
|
-
}
|