@aztec/ethereum 3.0.0-nightly.20251211 → 3.0.0-nightly.20251213
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/config.d.ts +3 -42
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +3 -327
- package/dest/contracts/inbox.d.ts +3 -3
- package/dest/contracts/inbox.d.ts.map +1 -1
- package/dest/contracts/rollup.d.ts +3 -3
- package/dest/contracts/rollup.d.ts.map +1 -1
- package/dest/deploy_aztec_l1_contracts.d.ts +245 -0
- package/dest/deploy_aztec_l1_contracts.d.ts.map +1 -0
- package/dest/deploy_aztec_l1_contracts.js +329 -0
- package/dest/deploy_l1_contract.d.ts +68 -0
- package/dest/deploy_l1_contract.d.ts.map +1 -0
- package/dest/deploy_l1_contract.js +312 -0
- package/dest/forwarder_proxy.js +1 -1
- package/dest/l1_artifacts.d.ts +37 -37
- package/dest/test/rollup_cheat_codes.js +4 -1
- package/dest/test/start_anvil.d.ts +3 -1
- package/dest/test/start_anvil.d.ts.map +1 -1
- package/package.json +10 -6
- package/src/config.ts +2 -406
- package/src/contracts/inbox.ts +2 -2
- package/src/contracts/rollup.ts +2 -2
- package/src/deploy_aztec_l1_contracts.ts +545 -0
- package/src/deploy_l1_contract.ts +362 -0
- package/src/forwarder_proxy.ts +1 -1
- package/src/test/rollup_cheat_codes.ts +1 -1
- package/src/test/start_anvil.ts +2 -0
- package/dest/deploy_l1_contracts.d.ts +0 -673
- package/dest/deploy_l1_contracts.d.ts.map +0 -1
- package/dest/deploy_l1_contracts.js +0 -1491
- package/src/deploy_l1_contracts.ts +0 -1869
|
@@ -1,1491 +0,0 @@
|
|
|
1
|
-
import { L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/constants';
|
|
2
|
-
import { SlotNumber } from '@aztec/foundation/branded-types';
|
|
3
|
-
import { getActiveNetworkName } from '@aztec/foundation/config';
|
|
4
|
-
import { keccak256String } from '@aztec/foundation/crypto/keccak';
|
|
5
|
-
import { EthAddress } from '@aztec/foundation/eth-address';
|
|
6
|
-
import { jsonStringify } from '@aztec/foundation/json-rpc';
|
|
7
|
-
import { createLogger } from '@aztec/foundation/log';
|
|
8
|
-
import { DateProvider } from '@aztec/foundation/timer';
|
|
9
|
-
import fs from 'fs';
|
|
10
|
-
import chunk from 'lodash.chunk';
|
|
11
|
-
import { concatHex, encodeAbiParameters, encodeDeployData, encodeFunctionData, getAddress, getContract, getContractAddress, numberToHex, padHex } from 'viem';
|
|
12
|
-
import { foundry } from 'viem/chains';
|
|
13
|
-
import { isAnvilTestChain } from './chain.js';
|
|
14
|
-
import { createExtendedL1Client } from './client.js';
|
|
15
|
-
import { getEntryQueueConfig, getGovernanceConfiguration, getRewardBoostConfig, getRewardConfig, validateConfig } from './config.js';
|
|
16
|
-
import { GSEContract } from './contracts/gse.js';
|
|
17
|
-
import { deployMulticall3 } from './contracts/multicall.js';
|
|
18
|
-
import { RegistryContract } from './contracts/registry.js';
|
|
19
|
-
import { RollupContract, SlashingProposerType } from './contracts/rollup.js';
|
|
20
|
-
import { CoinIssuerArtifact, DateGatedRelayerArtifact, FeeAssetArtifact, FeeAssetHandlerArtifact, GSEArtifact, GovernanceArtifact, GovernanceProposerArtifact, MultiAdderArtifact, RegisterNewRollupVersionPayloadArtifact, RegistryArtifact, RollupArtifact, SlashFactoryArtifact, StakingAssetArtifact, StakingAssetHandlerArtifact, l1ArtifactsVerifiers, mockVerifiers } from './l1_artifacts.js';
|
|
21
|
-
import { createL1TxUtilsFromViemWallet, getL1TxUtilsConfigEnvVars } from './l1_tx_utils/index.js';
|
|
22
|
-
import { formatViemError } from './utils.js';
|
|
23
|
-
import { ZK_PASSPORT_DOMAIN, ZK_PASSPORT_SCOPE, ZK_PASSPORT_VERIFIER_ADDRESS } from './zkPassportVerifierAddress.js';
|
|
24
|
-
export const DEPLOYER_ADDRESS = '0x4e59b44847b379578588920cA78FbF26c0B4956C';
|
|
25
|
-
// Minimal ERC20 ABI for validation purposes. We only read view methods.
|
|
26
|
-
const ERC20_VALIDATION_ABI = [
|
|
27
|
-
{
|
|
28
|
-
type: 'function',
|
|
29
|
-
name: 'totalSupply',
|
|
30
|
-
stateMutability: 'view',
|
|
31
|
-
inputs: [],
|
|
32
|
-
outputs: [
|
|
33
|
-
{
|
|
34
|
-
name: '',
|
|
35
|
-
type: 'uint256'
|
|
36
|
-
}
|
|
37
|
-
]
|
|
38
|
-
},
|
|
39
|
-
{
|
|
40
|
-
type: 'function',
|
|
41
|
-
name: 'name',
|
|
42
|
-
stateMutability: 'view',
|
|
43
|
-
inputs: [],
|
|
44
|
-
outputs: [
|
|
45
|
-
{
|
|
46
|
-
name: '',
|
|
47
|
-
type: 'string'
|
|
48
|
-
}
|
|
49
|
-
]
|
|
50
|
-
},
|
|
51
|
-
{
|
|
52
|
-
type: 'function',
|
|
53
|
-
name: 'symbol',
|
|
54
|
-
stateMutability: 'view',
|
|
55
|
-
inputs: [],
|
|
56
|
-
outputs: [
|
|
57
|
-
{
|
|
58
|
-
name: '',
|
|
59
|
-
type: 'string'
|
|
60
|
-
}
|
|
61
|
-
]
|
|
62
|
-
},
|
|
63
|
-
{
|
|
64
|
-
type: 'function',
|
|
65
|
-
name: 'decimals',
|
|
66
|
-
stateMutability: 'view',
|
|
67
|
-
inputs: [],
|
|
68
|
-
outputs: [
|
|
69
|
-
{
|
|
70
|
-
name: '',
|
|
71
|
-
type: 'uint8'
|
|
72
|
-
}
|
|
73
|
-
]
|
|
74
|
-
}
|
|
75
|
-
];
|
|
76
|
-
/**
|
|
77
|
-
* Validates that the provided address points to a contract that resembles an ERC20 token.
|
|
78
|
-
* Checks for contract code and attempts common ERC20 view calls.
|
|
79
|
-
* Throws an error if validation fails.
|
|
80
|
-
*/ export async function validateExistingErc20TokenAddress(l1Client, tokenAddress, logger) {
|
|
81
|
-
const addressString = tokenAddress.toString();
|
|
82
|
-
// Ensure there is contract code at the address
|
|
83
|
-
const code = await l1Client.getCode({
|
|
84
|
-
address: addressString
|
|
85
|
-
});
|
|
86
|
-
if (!code || code === '0x') {
|
|
87
|
-
throw new Error(`No contract code found at provided token address ${addressString}`);
|
|
88
|
-
}
|
|
89
|
-
const contract = getContract({
|
|
90
|
-
address: getAddress(addressString),
|
|
91
|
-
abi: ERC20_VALIDATION_ABI,
|
|
92
|
-
client: l1Client
|
|
93
|
-
});
|
|
94
|
-
// Validate all required ERC20 methods in parallel
|
|
95
|
-
const checks = [
|
|
96
|
-
contract.read.totalSupply().then((total)=>typeof total === 'bigint'),
|
|
97
|
-
contract.read.name().then(()=>true),
|
|
98
|
-
contract.read.symbol().then(()=>true),
|
|
99
|
-
contract.read.decimals().then((dec)=>typeof dec === 'number' || typeof dec === 'bigint')
|
|
100
|
-
];
|
|
101
|
-
const results = await Promise.allSettled(checks);
|
|
102
|
-
const failedChecks = results.filter((result)=>result.status === 'rejected' || result.value !== true);
|
|
103
|
-
if (failedChecks.length > 0) {
|
|
104
|
-
throw new Error(`Address ${addressString} does not appear to implement ERC20 view methods`);
|
|
105
|
-
}
|
|
106
|
-
logger.verbose(`Validated existing token at ${addressString} appears to be ERC20-compatible`);
|
|
107
|
-
}
|
|
108
|
-
export const deploySharedContracts = async (l1Client, deployer, args, logger)=>{
|
|
109
|
-
const networkName = getActiveNetworkName();
|
|
110
|
-
logger.info(`Deploying shared contracts for network configuration: ${networkName}`);
|
|
111
|
-
const txHashes = [];
|
|
112
|
-
let feeAssetAddress;
|
|
113
|
-
let stakingAssetAddress;
|
|
114
|
-
if (args.existingTokenAddress) {
|
|
115
|
-
await validateExistingErc20TokenAddress(l1Client, args.existingTokenAddress, logger);
|
|
116
|
-
feeAssetAddress = args.existingTokenAddress;
|
|
117
|
-
stakingAssetAddress = args.existingTokenAddress;
|
|
118
|
-
logger.verbose(`Using existing token for fee and staking assets at ${args.existingTokenAddress}`);
|
|
119
|
-
} else {
|
|
120
|
-
const deployedFee = await deployer.deploy(FeeAssetArtifact, [
|
|
121
|
-
'FeeJuice',
|
|
122
|
-
'FEE',
|
|
123
|
-
l1Client.account.address
|
|
124
|
-
]);
|
|
125
|
-
feeAssetAddress = deployedFee.address;
|
|
126
|
-
logger.verbose(`Deployed Fee Asset at ${feeAssetAddress}`);
|
|
127
|
-
// Mint a tiny bit of tokens to satisfy coin-issuer constraints
|
|
128
|
-
const { txHash } = await deployer.sendTransaction({
|
|
129
|
-
to: feeAssetAddress.toString(),
|
|
130
|
-
data: encodeFunctionData({
|
|
131
|
-
abi: FeeAssetArtifact.contractAbi,
|
|
132
|
-
functionName: 'mint',
|
|
133
|
-
args: [
|
|
134
|
-
l1Client.account.address,
|
|
135
|
-
1n * 10n ** 18n
|
|
136
|
-
]
|
|
137
|
-
})
|
|
138
|
-
}, {
|
|
139
|
-
// contract may not have been deployed yet (CREATE2 returns address before mining),
|
|
140
|
-
// which causes gas estimation to fail. Hardcode to 100k which is plenty for ERC20 mint.
|
|
141
|
-
gasLimit: 100_000n
|
|
142
|
-
});
|
|
143
|
-
await l1Client.waitForTransactionReceipt({
|
|
144
|
-
hash: txHash
|
|
145
|
-
});
|
|
146
|
-
logger.verbose(`Minted tiny bit of tokens to satisfy coin-issuer constraints in ${txHash}`);
|
|
147
|
-
const deployedStaking = await deployer.deploy(StakingAssetArtifact, [
|
|
148
|
-
'Staking',
|
|
149
|
-
'STK',
|
|
150
|
-
l1Client.account.address
|
|
151
|
-
]);
|
|
152
|
-
stakingAssetAddress = deployedStaking.address;
|
|
153
|
-
logger.verbose(`Deployed Staking Asset at ${stakingAssetAddress}`);
|
|
154
|
-
await deployer.waitForDeployments();
|
|
155
|
-
}
|
|
156
|
-
const gseAddress = (await deployer.deploy(GSEArtifact, [
|
|
157
|
-
l1Client.account.address,
|
|
158
|
-
stakingAssetAddress.toString(),
|
|
159
|
-
args.activationThreshold,
|
|
160
|
-
args.ejectionThreshold
|
|
161
|
-
])).address;
|
|
162
|
-
logger.verbose(`Deployed GSE at ${gseAddress}`);
|
|
163
|
-
const { address: registryAddress } = await deployer.deploy(RegistryArtifact, [
|
|
164
|
-
l1Client.account.address,
|
|
165
|
-
feeAssetAddress.toString()
|
|
166
|
-
]);
|
|
167
|
-
logger.verbose(`Deployed Registry at ${registryAddress}`);
|
|
168
|
-
const { address: governanceProposerAddress } = await deployer.deploy(GovernanceProposerArtifact, [
|
|
169
|
-
registryAddress.toString(),
|
|
170
|
-
gseAddress.toString(),
|
|
171
|
-
BigInt(args.governanceProposerQuorum ?? args.governanceProposerRoundSize / 2 + 1),
|
|
172
|
-
BigInt(args.governanceProposerRoundSize)
|
|
173
|
-
]);
|
|
174
|
-
logger.verbose(`Deployed GovernanceProposer at ${governanceProposerAddress}`);
|
|
175
|
-
// @note @LHerskind the assets are expected to be the same at some point, but for better
|
|
176
|
-
// configurability they are different for now.
|
|
177
|
-
const { address: governanceAddress } = await deployer.deploy(GovernanceArtifact, [
|
|
178
|
-
stakingAssetAddress.toString(),
|
|
179
|
-
governanceProposerAddress.toString(),
|
|
180
|
-
gseAddress.toString(),
|
|
181
|
-
getGovernanceConfiguration(networkName)
|
|
182
|
-
]);
|
|
183
|
-
logger.verbose(`Deployed Governance at ${governanceAddress}`);
|
|
184
|
-
let needToSetGovernance = false;
|
|
185
|
-
const existingCode = await l1Client.getCode({
|
|
186
|
-
address: gseAddress.toString()
|
|
187
|
-
});
|
|
188
|
-
if (!existingCode || existingCode === '0x') {
|
|
189
|
-
needToSetGovernance = true;
|
|
190
|
-
} else {
|
|
191
|
-
const gseContract = getContract({
|
|
192
|
-
address: getAddress(gseAddress.toString()),
|
|
193
|
-
abi: GSEArtifact.contractAbi,
|
|
194
|
-
client: l1Client
|
|
195
|
-
});
|
|
196
|
-
const existingGovernance = await gseContract.read.getGovernance();
|
|
197
|
-
if (EthAddress.fromString(existingGovernance).equals(EthAddress.ZERO)) {
|
|
198
|
-
needToSetGovernance = true;
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
if (needToSetGovernance) {
|
|
202
|
-
const { txHash } = await deployer.sendTransaction({
|
|
203
|
-
to: gseAddress.toString(),
|
|
204
|
-
data: encodeFunctionData({
|
|
205
|
-
abi: GSEArtifact.contractAbi,
|
|
206
|
-
functionName: 'setGovernance',
|
|
207
|
-
args: [
|
|
208
|
-
governanceAddress.toString()
|
|
209
|
-
]
|
|
210
|
-
})
|
|
211
|
-
}, {
|
|
212
|
-
gasLimit: 100_000n
|
|
213
|
-
});
|
|
214
|
-
logger.verbose(`Set governance on GSE in ${txHash}`);
|
|
215
|
-
txHashes.push(txHash);
|
|
216
|
-
}
|
|
217
|
-
logger.verbose(`Waiting for deployments to complete`);
|
|
218
|
-
await deployer.waitForDeployments();
|
|
219
|
-
const coinIssuerAddress = (await deployer.deploy(CoinIssuerArtifact, [
|
|
220
|
-
feeAssetAddress.toString(),
|
|
221
|
-
2n * 10n ** 17n,
|
|
222
|
-
l1Client.account.address
|
|
223
|
-
], {
|
|
224
|
-
gasLimit: 1_000_000n,
|
|
225
|
-
noSimulation: true
|
|
226
|
-
})).address;
|
|
227
|
-
logger.verbose(`Deployed CoinIssuer at ${coinIssuerAddress}`);
|
|
228
|
-
logger.verbose(`Waiting for deployments to complete`);
|
|
229
|
-
await deployer.waitForDeployments();
|
|
230
|
-
// Registry ownership will be transferred to governance later, after rollup is added
|
|
231
|
-
let feeAssetHandlerAddress = undefined;
|
|
232
|
-
let stakingAssetHandlerAddress = undefined;
|
|
233
|
-
let zkPassportVerifierAddress = undefined;
|
|
234
|
-
// Only if not on mainnet will we deploy the handlers, and only when we control the token
|
|
235
|
-
if (l1Client.chain.id !== 1 && !args.existingTokenAddress) {
|
|
236
|
-
/* -------------------------------------------------------------------------- */ /* CHEAT CODES START HERE */ /* -------------------------------------------------------------------------- */ const deployedFeeAssetHandler = await deployer.deploy(FeeAssetHandlerArtifact, [
|
|
237
|
-
l1Client.account.address,
|
|
238
|
-
feeAssetAddress.toString(),
|
|
239
|
-
BigInt(1000n * 10n ** 18n)
|
|
240
|
-
]);
|
|
241
|
-
feeAssetHandlerAddress = deployedFeeAssetHandler.address;
|
|
242
|
-
logger.verbose(`Deployed FeeAssetHandler at ${feeAssetHandlerAddress}`);
|
|
243
|
-
// Only add as minter if this is a new deployment (not reusing existing handler from failed previous run)
|
|
244
|
-
if (!deployedFeeAssetHandler.existed) {
|
|
245
|
-
const { txHash } = await deployer.sendTransaction({
|
|
246
|
-
to: feeAssetAddress.toString(),
|
|
247
|
-
data: encodeFunctionData({
|
|
248
|
-
abi: FeeAssetArtifact.contractAbi,
|
|
249
|
-
functionName: 'addMinter',
|
|
250
|
-
args: [
|
|
251
|
-
feeAssetHandlerAddress.toString()
|
|
252
|
-
]
|
|
253
|
-
})
|
|
254
|
-
});
|
|
255
|
-
logger.verbose(`Added fee asset handler ${feeAssetHandlerAddress} as minter on fee asset in ${txHash}`);
|
|
256
|
-
txHashes.push(txHash);
|
|
257
|
-
}
|
|
258
|
-
// Only if on sepolia will we deploy the staking asset handler
|
|
259
|
-
// Should not be deployed to devnet since it would cause caos with sequencers there etc.
|
|
260
|
-
if ([
|
|
261
|
-
11155111,
|
|
262
|
-
foundry.id
|
|
263
|
-
].includes(l1Client.chain.id)) {
|
|
264
|
-
const AMIN = EthAddress.fromString('0x3b218d0F26d15B36C715cB06c949210a0d630637');
|
|
265
|
-
zkPassportVerifierAddress = await getZkPassportVerifierAddress(deployer, args);
|
|
266
|
-
const [domain, scope] = getZkPassportScopes(args);
|
|
267
|
-
const stakingAssetHandlerDeployArgs = {
|
|
268
|
-
owner: l1Client.account.address,
|
|
269
|
-
stakingAsset: stakingAssetAddress.toString(),
|
|
270
|
-
registry: registryAddress.toString(),
|
|
271
|
-
withdrawer: AMIN.toString(),
|
|
272
|
-
validatorsToFlush: 16n,
|
|
273
|
-
mintInterval: BigInt(60 * 60 * 24),
|
|
274
|
-
depositsPerMint: BigInt(10),
|
|
275
|
-
depositMerkleRoot: '0x0000000000000000000000000000000000000000000000000000000000000000',
|
|
276
|
-
zkPassportVerifier: zkPassportVerifierAddress.toString(),
|
|
277
|
-
unhinged: [
|
|
278
|
-
AMIN.toString()
|
|
279
|
-
],
|
|
280
|
-
// Scopes
|
|
281
|
-
domain: domain,
|
|
282
|
-
scope: scope,
|
|
283
|
-
// Skip checks
|
|
284
|
-
skipBindCheck: args.zkPassportArgs?.mockZkPassportVerifier ?? false,
|
|
285
|
-
skipMerkleCheck: true
|
|
286
|
-
};
|
|
287
|
-
stakingAssetHandlerAddress = (await deployer.deploy(StakingAssetHandlerArtifact, [
|
|
288
|
-
stakingAssetHandlerDeployArgs
|
|
289
|
-
])).address;
|
|
290
|
-
logger.verbose(`Deployed StakingAssetHandler at ${stakingAssetHandlerAddress}`);
|
|
291
|
-
const { txHash: stakingMinterTxHash } = await deployer.sendTransaction({
|
|
292
|
-
to: stakingAssetAddress.toString(),
|
|
293
|
-
data: encodeFunctionData({
|
|
294
|
-
abi: StakingAssetArtifact.contractAbi,
|
|
295
|
-
functionName: 'addMinter',
|
|
296
|
-
args: [
|
|
297
|
-
stakingAssetHandlerAddress.toString()
|
|
298
|
-
]
|
|
299
|
-
})
|
|
300
|
-
});
|
|
301
|
-
logger.verbose(`Added staking asset handler ${stakingAssetHandlerAddress} as minter on staking asset in ${stakingMinterTxHash}`);
|
|
302
|
-
txHashes.push(stakingMinterTxHash);
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
/* -------------------------------------------------------------------------- */ /* CHEAT CODES END HERE */ /* -------------------------------------------------------------------------- */ logger.verbose(`Waiting for deployments to complete`);
|
|
306
|
-
await deployer.waitForDeployments();
|
|
307
|
-
await Promise.all(txHashes.map((txHash)=>l1Client.waitForTransactionReceipt({
|
|
308
|
-
hash: txHash
|
|
309
|
-
})));
|
|
310
|
-
logger.verbose(`Deployed shared contracts`);
|
|
311
|
-
const registry = new RegistryContract(l1Client, registryAddress);
|
|
312
|
-
/* -------------------------------------------------------------------------- */ /* FUND REWARD DISTRIBUTOR START */ /* -------------------------------------------------------------------------- */ const rewardDistributorAddress = await registry.getRewardDistributor();
|
|
313
|
-
if (!args.existingTokenAddress) {
|
|
314
|
-
const checkpointReward = getRewardConfig(networkName).checkpointReward;
|
|
315
|
-
const funding = checkpointReward * 200000n;
|
|
316
|
-
const { txHash: fundRewardDistributorTxHash } = await deployer.sendTransaction({
|
|
317
|
-
to: feeAssetAddress.toString(),
|
|
318
|
-
data: encodeFunctionData({
|
|
319
|
-
abi: FeeAssetArtifact.contractAbi,
|
|
320
|
-
functionName: 'mint',
|
|
321
|
-
args: [
|
|
322
|
-
rewardDistributorAddress.toString(),
|
|
323
|
-
funding
|
|
324
|
-
]
|
|
325
|
-
})
|
|
326
|
-
});
|
|
327
|
-
logger.verbose(`Funded reward distributor with ${funding} fee asset in ${fundRewardDistributorTxHash}`);
|
|
328
|
-
} else {
|
|
329
|
-
logger.verbose(`Skipping reward distributor funding as existing token is provided`);
|
|
330
|
-
}
|
|
331
|
-
/* -------------------------------------------------------------------------- */ /* FUND REWARD DISTRIBUTOR STOP */ /* -------------------------------------------------------------------------- */ return {
|
|
332
|
-
feeAssetAddress,
|
|
333
|
-
feeAssetHandlerAddress,
|
|
334
|
-
stakingAssetAddress,
|
|
335
|
-
stakingAssetHandlerAddress,
|
|
336
|
-
zkPassportVerifierAddress,
|
|
337
|
-
registryAddress,
|
|
338
|
-
gseAddress,
|
|
339
|
-
governanceAddress,
|
|
340
|
-
governanceProposerAddress,
|
|
341
|
-
coinIssuerAddress,
|
|
342
|
-
rewardDistributorAddress: await registry.getRewardDistributor()
|
|
343
|
-
};
|
|
344
|
-
};
|
|
345
|
-
const getZkPassportVerifierAddress = async (deployer, args)=>{
|
|
346
|
-
if (args.zkPassportArgs?.mockZkPassportVerifier) {
|
|
347
|
-
return (await deployer.deploy(mockVerifiers.mockZkPassportVerifier)).address;
|
|
348
|
-
}
|
|
349
|
-
return ZK_PASSPORT_VERIFIER_ADDRESS;
|
|
350
|
-
};
|
|
351
|
-
/**
|
|
352
|
-
* Get the zk passport scopes - default to testnet values if not provided
|
|
353
|
-
* @param args - The deployment arguments
|
|
354
|
-
* @returns The zk passport scopes
|
|
355
|
-
*/ const getZkPassportScopes = (args)=>{
|
|
356
|
-
const domain = args.zkPassportArgs?.zkPassportDomain ?? ZK_PASSPORT_DOMAIN;
|
|
357
|
-
const scope = args.zkPassportArgs?.zkPassportScope ?? ZK_PASSPORT_SCOPE;
|
|
358
|
-
return [
|
|
359
|
-
domain,
|
|
360
|
-
scope
|
|
361
|
-
];
|
|
362
|
-
};
|
|
363
|
-
/**
|
|
364
|
-
* Generates verification records for a deployed rollup and its associated contracts (Inbox, Outbox, Slasher, etc).
|
|
365
|
-
* @param rollup - The deployed rollup contract.
|
|
366
|
-
* @param deployer - The L1 deployer instance.
|
|
367
|
-
* @param args - The deployment arguments used for the rollup.
|
|
368
|
-
* @param addresses - The L1 contract addresses.
|
|
369
|
-
* @param extendedClient - The extended viem wallet client.
|
|
370
|
-
* @param logger - The logger.
|
|
371
|
-
*/ async function generateRollupVerificationRecords(rollup, deployer, args, addresses, extendedClient, logger) {
|
|
372
|
-
try {
|
|
373
|
-
// Add Inbox / Outbox verification records (constructor args are created inside RollupCore)
|
|
374
|
-
const rollupAddr = rollup.address;
|
|
375
|
-
const rollupAddresses = await rollup.getRollupAddresses();
|
|
376
|
-
const inboxAddr = rollupAddresses.inboxAddress.toString();
|
|
377
|
-
const outboxAddr = rollupAddresses.outboxAddress.toString();
|
|
378
|
-
const feeAsset = rollupAddresses.feeJuiceAddress.toString();
|
|
379
|
-
const version = await rollup.getVersion();
|
|
380
|
-
const inboxCtor = encodeAbiParameters([
|
|
381
|
-
{
|
|
382
|
-
type: 'address'
|
|
383
|
-
},
|
|
384
|
-
{
|
|
385
|
-
type: 'address'
|
|
386
|
-
},
|
|
387
|
-
{
|
|
388
|
-
type: 'uint256'
|
|
389
|
-
},
|
|
390
|
-
{
|
|
391
|
-
type: 'uint256'
|
|
392
|
-
}
|
|
393
|
-
], [
|
|
394
|
-
rollupAddr,
|
|
395
|
-
feeAsset,
|
|
396
|
-
version,
|
|
397
|
-
BigInt(L1_TO_L2_MSG_SUBTREE_HEIGHT)
|
|
398
|
-
]);
|
|
399
|
-
const outboxCtor = encodeAbiParameters([
|
|
400
|
-
{
|
|
401
|
-
type: 'address'
|
|
402
|
-
},
|
|
403
|
-
{
|
|
404
|
-
type: 'uint256'
|
|
405
|
-
}
|
|
406
|
-
], [
|
|
407
|
-
rollupAddr,
|
|
408
|
-
version
|
|
409
|
-
]);
|
|
410
|
-
deployer.verificationRecords.push({
|
|
411
|
-
name: 'Inbox',
|
|
412
|
-
address: inboxAddr,
|
|
413
|
-
constructorArgsHex: inboxCtor,
|
|
414
|
-
libraries: []
|
|
415
|
-
}, {
|
|
416
|
-
name: 'Outbox',
|
|
417
|
-
address: outboxAddr,
|
|
418
|
-
constructorArgsHex: outboxCtor,
|
|
419
|
-
libraries: []
|
|
420
|
-
});
|
|
421
|
-
// Include Slasher and SlashingProposer (if deployed) in verification data
|
|
422
|
-
try {
|
|
423
|
-
const slasherAddrHex = await rollup.getSlasherAddress();
|
|
424
|
-
const slasherAddr = EthAddress.fromString(slasherAddrHex);
|
|
425
|
-
if (!slasherAddr.isZero()) {
|
|
426
|
-
// Slasher constructor: (address _vetoer, address _governance)
|
|
427
|
-
const slasherCtor = encodeAbiParameters([
|
|
428
|
-
{
|
|
429
|
-
type: 'address'
|
|
430
|
-
},
|
|
431
|
-
{
|
|
432
|
-
type: 'address'
|
|
433
|
-
}
|
|
434
|
-
], [
|
|
435
|
-
args.slashingVetoer.toString(),
|
|
436
|
-
extendedClient.account.address
|
|
437
|
-
]);
|
|
438
|
-
deployer.verificationRecords.push({
|
|
439
|
-
name: 'Slasher',
|
|
440
|
-
address: slasherAddr.toString(),
|
|
441
|
-
constructorArgsHex: slasherCtor,
|
|
442
|
-
libraries: []
|
|
443
|
-
});
|
|
444
|
-
// Proposer address is stored in Slasher.PROPOSER()
|
|
445
|
-
const proposerAddr = (await rollup.getSlashingProposerAddress()).toString();
|
|
446
|
-
// Compute constructor args matching deployment path in RollupCore
|
|
447
|
-
const computedRoundSize = BigInt(args.slashingRoundSizeInEpochs * args.aztecEpochDuration);
|
|
448
|
-
const computedQuorum = BigInt(args.slashingQuorum ?? args.slashingRoundSizeInEpochs * args.aztecEpochDuration / 2 + 1);
|
|
449
|
-
const lifetimeInRounds = BigInt(args.slashingLifetimeInRounds);
|
|
450
|
-
const executionDelayInRounds = BigInt(args.slashingExecutionDelayInRounds);
|
|
451
|
-
if (args.slasherFlavor === 'tally') {
|
|
452
|
-
const slashAmounts = [
|
|
453
|
-
args.slashAmountSmall,
|
|
454
|
-
args.slashAmountMedium,
|
|
455
|
-
args.slashAmountLarge
|
|
456
|
-
];
|
|
457
|
-
const committeeSize = BigInt(args.aztecTargetCommitteeSize);
|
|
458
|
-
const epochDuration = BigInt(args.aztecEpochDuration);
|
|
459
|
-
const slashOffsetInRounds = BigInt(args.slashingOffsetInRounds);
|
|
460
|
-
const proposerCtor = encodeAbiParameters([
|
|
461
|
-
{
|
|
462
|
-
type: 'address'
|
|
463
|
-
},
|
|
464
|
-
{
|
|
465
|
-
type: 'address'
|
|
466
|
-
},
|
|
467
|
-
{
|
|
468
|
-
type: 'uint256'
|
|
469
|
-
},
|
|
470
|
-
{
|
|
471
|
-
type: 'uint256'
|
|
472
|
-
},
|
|
473
|
-
{
|
|
474
|
-
type: 'uint256'
|
|
475
|
-
},
|
|
476
|
-
{
|
|
477
|
-
type: 'uint256'
|
|
478
|
-
},
|
|
479
|
-
{
|
|
480
|
-
type: 'uint256[3]'
|
|
481
|
-
},
|
|
482
|
-
{
|
|
483
|
-
type: 'uint256'
|
|
484
|
-
},
|
|
485
|
-
{
|
|
486
|
-
type: 'uint256'
|
|
487
|
-
},
|
|
488
|
-
{
|
|
489
|
-
type: 'uint256'
|
|
490
|
-
}
|
|
491
|
-
], [
|
|
492
|
-
rollup.address,
|
|
493
|
-
slasherAddr.toString(),
|
|
494
|
-
computedQuorum,
|
|
495
|
-
computedRoundSize,
|
|
496
|
-
lifetimeInRounds,
|
|
497
|
-
executionDelayInRounds,
|
|
498
|
-
slashAmounts,
|
|
499
|
-
committeeSize,
|
|
500
|
-
epochDuration,
|
|
501
|
-
slashOffsetInRounds
|
|
502
|
-
]);
|
|
503
|
-
deployer.verificationRecords.push({
|
|
504
|
-
name: 'TallySlashingProposer',
|
|
505
|
-
address: proposerAddr,
|
|
506
|
-
constructorArgsHex: proposerCtor,
|
|
507
|
-
libraries: []
|
|
508
|
-
});
|
|
509
|
-
} else if (args.slasherFlavor === 'empire') {
|
|
510
|
-
const proposerCtor = encodeAbiParameters([
|
|
511
|
-
{
|
|
512
|
-
type: 'address'
|
|
513
|
-
},
|
|
514
|
-
{
|
|
515
|
-
type: 'address'
|
|
516
|
-
},
|
|
517
|
-
{
|
|
518
|
-
type: 'uint256'
|
|
519
|
-
},
|
|
520
|
-
{
|
|
521
|
-
type: 'uint256'
|
|
522
|
-
},
|
|
523
|
-
{
|
|
524
|
-
type: 'uint256'
|
|
525
|
-
},
|
|
526
|
-
{
|
|
527
|
-
type: 'uint256'
|
|
528
|
-
}
|
|
529
|
-
], [
|
|
530
|
-
rollup.address,
|
|
531
|
-
slasherAddr.toString(),
|
|
532
|
-
computedQuorum,
|
|
533
|
-
computedRoundSize,
|
|
534
|
-
lifetimeInRounds,
|
|
535
|
-
executionDelayInRounds
|
|
536
|
-
]);
|
|
537
|
-
deployer.verificationRecords.push({
|
|
538
|
-
name: 'EmpireSlashingProposer',
|
|
539
|
-
address: proposerAddr,
|
|
540
|
-
constructorArgsHex: proposerCtor,
|
|
541
|
-
libraries: []
|
|
542
|
-
});
|
|
543
|
-
}
|
|
544
|
-
}
|
|
545
|
-
} catch (e) {
|
|
546
|
-
logger.warn(`Failed to add Slasher/Proposer verification records: ${String(e)}`);
|
|
547
|
-
}
|
|
548
|
-
} catch (e) {
|
|
549
|
-
throw new Error(`Failed to generate rollup verification records: ${String(e)}`);
|
|
550
|
-
}
|
|
551
|
-
}
|
|
552
|
-
/**
|
|
553
|
-
* Writes verification records to a JSON file for later forge verify.
|
|
554
|
-
* @param deployer - The L1 deployer containing verification records.
|
|
555
|
-
* @param outputDirectory - The directory to write the verification file to.
|
|
556
|
-
* @param chainId - The chain ID.
|
|
557
|
-
* @param filenameSuffix - Optional suffix for the filename (e.g., 'upgrade').
|
|
558
|
-
* @param logger - The logger.
|
|
559
|
-
*/ async function writeVerificationJson(deployer, outputDirectory, chainId, filenameSuffix = '', logger) {
|
|
560
|
-
try {
|
|
561
|
-
const date = new Date();
|
|
562
|
-
const formattedDate = date.toISOString().slice(2, 19).replace(/[-T:]/g, '');
|
|
563
|
-
// Ensure the verification output directory exists
|
|
564
|
-
await fs.promises.mkdir(outputDirectory, {
|
|
565
|
-
recursive: true
|
|
566
|
-
});
|
|
567
|
-
const suffix = filenameSuffix ? `-${filenameSuffix}` : '';
|
|
568
|
-
const verificationOutputPath = `${outputDirectory}/l1-verify${suffix}-${chainId}-${formattedDate.slice(0, 6)}-${formattedDate.slice(6)}.json`;
|
|
569
|
-
const networkName = getActiveNetworkName();
|
|
570
|
-
const verificationData = {
|
|
571
|
-
chainId: chainId,
|
|
572
|
-
network: networkName,
|
|
573
|
-
records: deployer.verificationRecords
|
|
574
|
-
};
|
|
575
|
-
await fs.promises.writeFile(verificationOutputPath, JSON.stringify(verificationData, null, 2));
|
|
576
|
-
logger.info(`Wrote L1 verification data to ${verificationOutputPath}`);
|
|
577
|
-
} catch (e) {
|
|
578
|
-
logger.warn(`Failed to write L1 verification data file: ${String(e)}`);
|
|
579
|
-
}
|
|
580
|
-
}
|
|
581
|
-
/**
|
|
582
|
-
* Deploys a new rollup, using the existing canonical version to derive certain values (addresses of assets etc).
|
|
583
|
-
* @param clients - The L1 clients.
|
|
584
|
-
* @param args - The deployment arguments.
|
|
585
|
-
* @param registryAddress - The address of the registry.
|
|
586
|
-
* @param logger - The logger.
|
|
587
|
-
* @param txUtilsConfig - The L1 tx utils config.
|
|
588
|
-
* @param createVerificationJson - Optional path to write verification data for forge verify.
|
|
589
|
-
*/ export const deployRollupForUpgrade = async (extendedClient, args, registryAddress, logger, txUtilsConfig, createVerificationJson = false)=>{
|
|
590
|
-
const deployer = new L1Deployer(extendedClient, args.salt, undefined, args.acceleratedTestDeployments, logger, txUtilsConfig, !!createVerificationJson);
|
|
591
|
-
const addresses = await RegistryContract.collectAddresses(extendedClient, registryAddress, 'canonical');
|
|
592
|
-
const { rollup, slashFactoryAddress } = await deployRollup(extendedClient, deployer, args, addresses, logger);
|
|
593
|
-
await deployer.waitForDeployments();
|
|
594
|
-
// Write verification data (constructor args + linked libraries) to file for later forge verify
|
|
595
|
-
if (createVerificationJson) {
|
|
596
|
-
await generateRollupVerificationRecords(rollup, deployer, args, addresses, extendedClient, logger);
|
|
597
|
-
await writeVerificationJson(deployer, createVerificationJson, extendedClient.chain.id, 'upgrade', logger);
|
|
598
|
-
}
|
|
599
|
-
return {
|
|
600
|
-
rollup,
|
|
601
|
-
slashFactoryAddress
|
|
602
|
-
};
|
|
603
|
-
};
|
|
604
|
-
export const deploySlashFactory = async (deployer, rollupAddress, logger)=>{
|
|
605
|
-
const slashFactoryAddress = (await deployer.deploy(SlashFactoryArtifact, [
|
|
606
|
-
rollupAddress
|
|
607
|
-
])).address;
|
|
608
|
-
logger.verbose(`Deployed SlashFactory at ${slashFactoryAddress}`);
|
|
609
|
-
return slashFactoryAddress;
|
|
610
|
-
};
|
|
611
|
-
export const deployUpgradePayload = async (deployer, addresses)=>{
|
|
612
|
-
const payloadAddress = (await deployer.deploy(RegisterNewRollupVersionPayloadArtifact, [
|
|
613
|
-
addresses.registryAddress.toString(),
|
|
614
|
-
addresses.rollupAddress.toString()
|
|
615
|
-
])).address;
|
|
616
|
-
return payloadAddress;
|
|
617
|
-
};
|
|
618
|
-
function slasherFlavorToSolidityEnum(flavor) {
|
|
619
|
-
switch(flavor){
|
|
620
|
-
case 'none':
|
|
621
|
-
return SlashingProposerType.None.valueOf();
|
|
622
|
-
case 'tally':
|
|
623
|
-
return SlashingProposerType.Tally.valueOf();
|
|
624
|
-
case 'empire':
|
|
625
|
-
return SlashingProposerType.Empire.valueOf();
|
|
626
|
-
default:
|
|
627
|
-
{
|
|
628
|
-
const _ = flavor;
|
|
629
|
-
throw new Error(`Unexpected slasher flavor ${flavor}`);
|
|
630
|
-
}
|
|
631
|
-
}
|
|
632
|
-
}
|
|
633
|
-
/**
|
|
634
|
-
* Deploys a new rollup contract, funds and initializes the fee juice portal, and initializes the validator set.
|
|
635
|
-
*/ export const deployRollup = async (extendedClient, deployer, args, addresses, logger)=>{
|
|
636
|
-
if (!addresses.gseAddress) {
|
|
637
|
-
throw new Error('GSE address is required when deploying');
|
|
638
|
-
}
|
|
639
|
-
const networkName = getActiveNetworkName();
|
|
640
|
-
logger.info(`Deploying rollup using network configuration: ${networkName}`);
|
|
641
|
-
const txHashes = [];
|
|
642
|
-
let epochProofVerifier = EthAddress.ZERO;
|
|
643
|
-
if (args.realVerifier) {
|
|
644
|
-
epochProofVerifier = (await deployer.deploy(l1ArtifactsVerifiers.honkVerifier)).address;
|
|
645
|
-
logger.verbose(`Rollup will use the real verifier at ${epochProofVerifier}`);
|
|
646
|
-
} else {
|
|
647
|
-
epochProofVerifier = (await deployer.deploy(mockVerifiers.mockVerifier)).address;
|
|
648
|
-
logger.verbose(`Rollup will use the mock verifier at ${epochProofVerifier}`);
|
|
649
|
-
}
|
|
650
|
-
const rewardConfig = {
|
|
651
|
-
...getRewardConfig(networkName),
|
|
652
|
-
rewardDistributor: addresses.rewardDistributorAddress.toString()
|
|
653
|
-
};
|
|
654
|
-
const rollupConfigArgs = {
|
|
655
|
-
aztecSlotDuration: BigInt(args.aztecSlotDuration),
|
|
656
|
-
aztecEpochDuration: BigInt(args.aztecEpochDuration),
|
|
657
|
-
targetCommitteeSize: BigInt(args.aztecTargetCommitteeSize),
|
|
658
|
-
lagInEpochsForValidatorSet: BigInt(args.lagInEpochsForValidatorSet),
|
|
659
|
-
lagInEpochsForRandao: BigInt(args.lagInEpochsForRandao),
|
|
660
|
-
aztecProofSubmissionEpochs: BigInt(args.aztecProofSubmissionEpochs),
|
|
661
|
-
slashingQuorum: BigInt(args.slashingQuorum ?? args.slashingRoundSizeInEpochs * args.aztecEpochDuration / 2 + 1),
|
|
662
|
-
slashingRoundSize: BigInt(args.slashingRoundSizeInEpochs * args.aztecEpochDuration),
|
|
663
|
-
slashingLifetimeInRounds: BigInt(args.slashingLifetimeInRounds),
|
|
664
|
-
slashingExecutionDelayInRounds: BigInt(args.slashingExecutionDelayInRounds),
|
|
665
|
-
slashingVetoer: args.slashingVetoer.toString(),
|
|
666
|
-
manaTarget: args.manaTarget,
|
|
667
|
-
provingCostPerMana: args.provingCostPerMana,
|
|
668
|
-
rewardConfig: rewardConfig,
|
|
669
|
-
version: 0,
|
|
670
|
-
rewardBoostConfig: getRewardBoostConfig(),
|
|
671
|
-
stakingQueueConfig: getEntryQueueConfig(networkName),
|
|
672
|
-
exitDelaySeconds: BigInt(args.exitDelaySeconds),
|
|
673
|
-
slasherFlavor: slasherFlavorToSolidityEnum(args.slasherFlavor),
|
|
674
|
-
slashingOffsetInRounds: BigInt(args.slashingOffsetInRounds),
|
|
675
|
-
slashAmounts: [
|
|
676
|
-
args.slashAmountSmall,
|
|
677
|
-
args.slashAmountMedium,
|
|
678
|
-
args.slashAmountLarge
|
|
679
|
-
],
|
|
680
|
-
localEjectionThreshold: args.localEjectionThreshold,
|
|
681
|
-
slashingDisableDuration: BigInt(args.slashingDisableDuration ?? 0n),
|
|
682
|
-
earliestRewardsClaimableTimestamp: 0n
|
|
683
|
-
};
|
|
684
|
-
const genesisStateArgs = {
|
|
685
|
-
vkTreeRoot: args.vkTreeRoot.toString(),
|
|
686
|
-
protocolContractsHash: args.protocolContractsHash.toString(),
|
|
687
|
-
genesisArchiveRoot: args.genesisArchiveRoot.toString()
|
|
688
|
-
};
|
|
689
|
-
// Until there is an actual chain-id for the version, we will just draw a random value.
|
|
690
|
-
// TODO(https://linear.app/aztec-labs/issue/TMNT-139/version-at-deployment)
|
|
691
|
-
rollupConfigArgs.version = Buffer.from(keccak256String(jsonStringify({
|
|
692
|
-
rollupConfigArgs,
|
|
693
|
-
genesisStateArgs
|
|
694
|
-
}))).readUint32BE(0);
|
|
695
|
-
logger.verbose(`Rollup config args`, rollupConfigArgs);
|
|
696
|
-
const rollupArgs = [
|
|
697
|
-
addresses.feeJuiceAddress.toString(),
|
|
698
|
-
addresses.stakingAssetAddress.toString(),
|
|
699
|
-
addresses.gseAddress.toString(),
|
|
700
|
-
epochProofVerifier.toString(),
|
|
701
|
-
extendedClient.account.address,
|
|
702
|
-
genesisStateArgs,
|
|
703
|
-
rollupConfigArgs
|
|
704
|
-
];
|
|
705
|
-
const { address: rollupAddress, existed: rollupExisted } = await deployer.deploy(RollupArtifact, rollupArgs, {
|
|
706
|
-
gasLimit: 15_000_000n
|
|
707
|
-
});
|
|
708
|
-
logger.verbose(`Deployed Rollup at ${rollupAddress}, already existed: ${rollupExisted}`, rollupConfigArgs);
|
|
709
|
-
const rollupContract = new RollupContract(extendedClient, rollupAddress);
|
|
710
|
-
await deployer.waitForDeployments();
|
|
711
|
-
logger.verbose(`All core contracts have been deployed`);
|
|
712
|
-
if (args.feeJuicePortalInitialBalance && args.feeJuicePortalInitialBalance > 0n) {
|
|
713
|
-
// Skip funding when using an external token, as we likely don't have mint permissions
|
|
714
|
-
if (!('existingTokenAddress' in args) || !args.existingTokenAddress) {
|
|
715
|
-
const feeJuicePortalAddress = await rollupContract.getFeeJuicePortal();
|
|
716
|
-
// In fast mode, use the L1TxUtils to send transactions with nonce management
|
|
717
|
-
const { txHash: mintTxHash } = await deployer.sendTransaction({
|
|
718
|
-
to: addresses.feeJuiceAddress.toString(),
|
|
719
|
-
data: encodeFunctionData({
|
|
720
|
-
abi: FeeAssetArtifact.contractAbi,
|
|
721
|
-
functionName: 'mint',
|
|
722
|
-
args: [
|
|
723
|
-
feeJuicePortalAddress.toString(),
|
|
724
|
-
args.feeJuicePortalInitialBalance
|
|
725
|
-
]
|
|
726
|
-
})
|
|
727
|
-
});
|
|
728
|
-
logger.verbose(`Funding fee juice portal with ${args.feeJuicePortalInitialBalance} fee juice in ${mintTxHash} (accelerated test deployments)`);
|
|
729
|
-
txHashes.push(mintTxHash);
|
|
730
|
-
} else {
|
|
731
|
-
logger.verbose('Skipping fee juice portal funding due to external token usage');
|
|
732
|
-
}
|
|
733
|
-
}
|
|
734
|
-
const slashFactoryAddress = (await deployer.deploy(SlashFactoryArtifact, [
|
|
735
|
-
rollupAddress.toString()
|
|
736
|
-
])).address;
|
|
737
|
-
logger.verbose(`Deployed SlashFactory at ${slashFactoryAddress}`);
|
|
738
|
-
// We need to call a function on the registry to set the various contract addresses.
|
|
739
|
-
const registryContract = getContract({
|
|
740
|
-
address: getAddress(addresses.registryAddress.toString()),
|
|
741
|
-
abi: RegistryArtifact.contractAbi,
|
|
742
|
-
client: extendedClient
|
|
743
|
-
});
|
|
744
|
-
// Only if we are the owner will we be sending these transactions
|
|
745
|
-
if (await registryContract.read.owner() === getAddress(extendedClient.account.address)) {
|
|
746
|
-
const version = await rollupContract.getVersion();
|
|
747
|
-
try {
|
|
748
|
-
const retrievedRollupAddress = await registryContract.read.getRollup([
|
|
749
|
-
version
|
|
750
|
-
]);
|
|
751
|
-
logger.verbose(`Rollup ${retrievedRollupAddress} already exists in registry`);
|
|
752
|
-
} catch {
|
|
753
|
-
const { txHash: addRollupTxHash } = await deployer.sendTransaction({
|
|
754
|
-
to: addresses.registryAddress.toString(),
|
|
755
|
-
data: encodeFunctionData({
|
|
756
|
-
abi: RegistryArtifact.contractAbi,
|
|
757
|
-
functionName: 'addRollup',
|
|
758
|
-
args: [
|
|
759
|
-
getAddress(rollupContract.address)
|
|
760
|
-
]
|
|
761
|
-
})
|
|
762
|
-
});
|
|
763
|
-
logger.verbose(`Adding rollup ${rollupContract.address} to registry ${addresses.registryAddress} in tx ${addRollupTxHash}`);
|
|
764
|
-
txHashes.push(addRollupTxHash);
|
|
765
|
-
}
|
|
766
|
-
} else {
|
|
767
|
-
logger.verbose(`Not the owner of the registry, skipping rollup addition`);
|
|
768
|
-
}
|
|
769
|
-
// We need to call a function on the registry to set the various contract addresses.
|
|
770
|
-
const gseContract = getContract({
|
|
771
|
-
address: getAddress(addresses.gseAddress.toString()),
|
|
772
|
-
abi: GSEArtifact.contractAbi,
|
|
773
|
-
client: extendedClient
|
|
774
|
-
});
|
|
775
|
-
if (await gseContract.read.owner() === getAddress(extendedClient.account.address)) {
|
|
776
|
-
if (!await gseContract.read.isRollupRegistered([
|
|
777
|
-
rollupContract.address
|
|
778
|
-
])) {
|
|
779
|
-
const { txHash: addRollupTxHash } = await deployer.sendTransaction({
|
|
780
|
-
to: addresses.gseAddress.toString(),
|
|
781
|
-
data: encodeFunctionData({
|
|
782
|
-
abi: GSEArtifact.contractAbi,
|
|
783
|
-
functionName: 'addRollup',
|
|
784
|
-
args: [
|
|
785
|
-
getAddress(rollupContract.address)
|
|
786
|
-
]
|
|
787
|
-
})
|
|
788
|
-
});
|
|
789
|
-
logger.verbose(`Adding rollup ${rollupContract.address} to GSE ${addresses.gseAddress} in tx ${addRollupTxHash}`);
|
|
790
|
-
// wait for this tx to land in case we have to register initialValidators
|
|
791
|
-
await extendedClient.waitForTransactionReceipt({
|
|
792
|
-
hash: addRollupTxHash
|
|
793
|
-
});
|
|
794
|
-
} else {
|
|
795
|
-
logger.verbose(`Rollup ${rollupContract.address} is already registered in GSE ${addresses.gseAddress}`);
|
|
796
|
-
}
|
|
797
|
-
} else {
|
|
798
|
-
logger.verbose(`Not the owner of the gse, skipping rollup addition`);
|
|
799
|
-
}
|
|
800
|
-
const activeAttestorCount = await rollupContract.getActiveAttesterCount();
|
|
801
|
-
const queuedAttestorCount = await rollupContract.getEntryQueueLength();
|
|
802
|
-
logger.info(`Rollup has ${activeAttestorCount} active attestors and ${queuedAttestorCount} queued attestors`);
|
|
803
|
-
const shouldAddValidators = activeAttestorCount === 0n && queuedAttestorCount === 0n;
|
|
804
|
-
if (args.initialValidators && shouldAddValidators && await gseContract.read.isRollupRegistered([
|
|
805
|
-
rollupContract.address
|
|
806
|
-
])) {
|
|
807
|
-
await addMultipleValidators(extendedClient, deployer, addresses.gseAddress.toString(), rollupAddress.toString(), addresses.stakingAssetAddress.toString(), args.initialValidators, args.acceleratedTestDeployments, logger);
|
|
808
|
-
}
|
|
809
|
-
// If the owner is not the Governance contract, transfer ownership to the Governance contract
|
|
810
|
-
logger.verbose(addresses.governanceAddress.toString());
|
|
811
|
-
if (getAddress(await rollupContract.getOwner()) !== getAddress(addresses.governanceAddress.toString())) {
|
|
812
|
-
// TODO(md): add send transaction to the deployer such that we do not need to manage tx hashes here
|
|
813
|
-
const { txHash: transferOwnershipTxHash } = await deployer.sendTransaction({
|
|
814
|
-
to: rollupContract.address,
|
|
815
|
-
data: encodeFunctionData({
|
|
816
|
-
abi: RegistryArtifact.contractAbi,
|
|
817
|
-
functionName: 'transferOwnership',
|
|
818
|
-
args: [
|
|
819
|
-
getAddress(addresses.governanceAddress.toString())
|
|
820
|
-
]
|
|
821
|
-
})
|
|
822
|
-
});
|
|
823
|
-
logger.verbose(`Transferring the ownership of the rollup contract at ${rollupContract.address} to the Governance ${addresses.governanceAddress} in tx ${transferOwnershipTxHash}`);
|
|
824
|
-
txHashes.push(transferOwnershipTxHash);
|
|
825
|
-
}
|
|
826
|
-
await deployer.waitForDeployments();
|
|
827
|
-
await Promise.all(txHashes.map((txHash)=>extendedClient.waitForTransactionReceipt({
|
|
828
|
-
hash: txHash
|
|
829
|
-
})));
|
|
830
|
-
logger.verbose(`Rollup deployed`);
|
|
831
|
-
return {
|
|
832
|
-
rollup: rollupContract,
|
|
833
|
-
slashFactoryAddress
|
|
834
|
-
};
|
|
835
|
-
};
|
|
836
|
-
export const handoverToGovernance = async (extendedClient, deployer, registryAddress, gseAddress, coinIssuerAddress, feeAssetAddress, governanceAddress, logger, acceleratedTestDeployments, useExternalToken = false)=>{
|
|
837
|
-
// We need to call a function on the registry to set the various contract addresses.
|
|
838
|
-
const registryContract = getContract({
|
|
839
|
-
address: getAddress(registryAddress.toString()),
|
|
840
|
-
abi: RegistryArtifact.contractAbi,
|
|
841
|
-
client: extendedClient
|
|
842
|
-
});
|
|
843
|
-
const gseContract = getContract({
|
|
844
|
-
address: getAddress(gseAddress.toString()),
|
|
845
|
-
abi: GSEArtifact.contractAbi,
|
|
846
|
-
client: extendedClient
|
|
847
|
-
});
|
|
848
|
-
const coinIssuerContract = getContract({
|
|
849
|
-
address: getAddress(coinIssuerAddress.toString()),
|
|
850
|
-
abi: CoinIssuerArtifact.contractAbi,
|
|
851
|
-
client: extendedClient
|
|
852
|
-
});
|
|
853
|
-
const feeAsset = getContract({
|
|
854
|
-
address: getAddress(feeAssetAddress.toString()),
|
|
855
|
-
abi: FeeAssetArtifact.contractAbi,
|
|
856
|
-
client: extendedClient
|
|
857
|
-
});
|
|
858
|
-
const txHashes = [];
|
|
859
|
-
// If the owner is not the Governance contract, transfer ownership to the Governance contract
|
|
860
|
-
if (acceleratedTestDeployments || await registryContract.read.owner() !== getAddress(governanceAddress.toString())) {
|
|
861
|
-
// TODO(md): add send transaction to the deployer such that we do not need to manage tx hashes here
|
|
862
|
-
const { txHash: transferOwnershipTxHash } = await deployer.sendTransaction({
|
|
863
|
-
to: registryAddress.toString(),
|
|
864
|
-
data: encodeFunctionData({
|
|
865
|
-
abi: RegistryArtifact.contractAbi,
|
|
866
|
-
functionName: 'transferOwnership',
|
|
867
|
-
args: [
|
|
868
|
-
getAddress(governanceAddress.toString())
|
|
869
|
-
]
|
|
870
|
-
})
|
|
871
|
-
});
|
|
872
|
-
logger.verbose(`Transferring the ownership of the registry contract at ${registryAddress} to the Governance ${governanceAddress} in tx ${transferOwnershipTxHash}`);
|
|
873
|
-
txHashes.push(transferOwnershipTxHash);
|
|
874
|
-
}
|
|
875
|
-
// If the owner is not the Governance contract, transfer ownership to the Governance contract
|
|
876
|
-
if (acceleratedTestDeployments || await gseContract.read.owner() !== getAddress(governanceAddress.toString())) {
|
|
877
|
-
// TODO(md): add send transaction to the deployer such that we do not need to manage tx hashes here
|
|
878
|
-
const { txHash: transferOwnershipTxHash } = await deployer.sendTransaction({
|
|
879
|
-
to: gseContract.address,
|
|
880
|
-
data: encodeFunctionData({
|
|
881
|
-
abi: GSEArtifact.contractAbi,
|
|
882
|
-
functionName: 'transferOwnership',
|
|
883
|
-
args: [
|
|
884
|
-
getAddress(governanceAddress.toString())
|
|
885
|
-
]
|
|
886
|
-
})
|
|
887
|
-
});
|
|
888
|
-
logger.verbose(`Transferring the ownership of the gse contract at ${gseAddress} to the Governance ${governanceAddress} in tx ${transferOwnershipTxHash}`);
|
|
889
|
-
txHashes.push(transferOwnershipTxHash);
|
|
890
|
-
}
|
|
891
|
-
if (!useExternalToken && (acceleratedTestDeployments || await feeAsset.read.owner() !== coinIssuerAddress.toString())) {
|
|
892
|
-
const { txHash } = await deployer.sendTransaction({
|
|
893
|
-
to: feeAssetAddress.toString(),
|
|
894
|
-
data: encodeFunctionData({
|
|
895
|
-
abi: FeeAssetArtifact.contractAbi,
|
|
896
|
-
functionName: 'transferOwnership',
|
|
897
|
-
args: [
|
|
898
|
-
coinIssuerAddress.toString()
|
|
899
|
-
]
|
|
900
|
-
})
|
|
901
|
-
}, {
|
|
902
|
-
gasLimit: 500_000n
|
|
903
|
-
});
|
|
904
|
-
logger.verbose(`Transfer ownership of fee asset to coin issuer ${coinIssuerAddress} in ${txHash}`);
|
|
905
|
-
txHashes.push(txHash);
|
|
906
|
-
const { txHash: acceptTokenOwnershipTxHash } = await deployer.sendTransaction({
|
|
907
|
-
to: coinIssuerAddress.toString(),
|
|
908
|
-
data: encodeFunctionData({
|
|
909
|
-
abi: CoinIssuerArtifact.contractAbi,
|
|
910
|
-
functionName: 'acceptTokenOwnership'
|
|
911
|
-
})
|
|
912
|
-
}, {
|
|
913
|
-
gasLimit: 500_000n
|
|
914
|
-
});
|
|
915
|
-
logger.verbose(`Accept ownership of fee asset in ${acceptTokenOwnershipTxHash}`);
|
|
916
|
-
txHashes.push(acceptTokenOwnershipTxHash);
|
|
917
|
-
} else if (useExternalToken) {
|
|
918
|
-
logger.verbose('Skipping fee asset ownership transfer due to external token usage');
|
|
919
|
-
}
|
|
920
|
-
// Either deploy or at least predict the address of the date gated relayer
|
|
921
|
-
const dateGatedRelayer = await deployer.deploy(DateGatedRelayerArtifact, [
|
|
922
|
-
governanceAddress.toString(),
|
|
923
|
-
1798761600n
|
|
924
|
-
]);
|
|
925
|
-
// If the owner is not the Governance contract, transfer ownership to the Governance contract
|
|
926
|
-
if (acceleratedTestDeployments || await coinIssuerContract.read.owner() === deployer.client.account.address) {
|
|
927
|
-
const { txHash: transferOwnershipTxHash } = await deployer.sendTransaction({
|
|
928
|
-
to: coinIssuerContract.address,
|
|
929
|
-
data: encodeFunctionData({
|
|
930
|
-
abi: CoinIssuerArtifact.contractAbi,
|
|
931
|
-
functionName: 'transferOwnership',
|
|
932
|
-
args: [
|
|
933
|
-
getAddress(dateGatedRelayer.address.toString())
|
|
934
|
-
]
|
|
935
|
-
})
|
|
936
|
-
});
|
|
937
|
-
logger.verbose(`Transferring the ownership of the coin issuer contract at ${coinIssuerAddress} to the DateGatedRelayer ${dateGatedRelayer.address} in tx ${transferOwnershipTxHash}`);
|
|
938
|
-
txHashes.push(transferOwnershipTxHash);
|
|
939
|
-
}
|
|
940
|
-
// Wait for all actions to be mined
|
|
941
|
-
await deployer.waitForDeployments();
|
|
942
|
-
await Promise.all(txHashes.map((txHash)=>extendedClient.waitForTransactionReceipt({
|
|
943
|
-
hash: txHash
|
|
944
|
-
})));
|
|
945
|
-
return {
|
|
946
|
-
dateGatedRelayerAddress: dateGatedRelayer.address
|
|
947
|
-
};
|
|
948
|
-
};
|
|
949
|
-
/*
|
|
950
|
-
* Adds multiple validators to the rollup
|
|
951
|
-
*
|
|
952
|
-
* @param extendedClient - The L1 clients.
|
|
953
|
-
* @param deployer - The L1 deployer.
|
|
954
|
-
* @param rollupAddress - The address of the rollup.
|
|
955
|
-
* @param stakingAssetAddress - The address of the staking asset.
|
|
956
|
-
* @param validators - The validators to initialize.
|
|
957
|
-
* @param acceleratedTestDeployments - Whether to use accelerated test deployments.
|
|
958
|
-
* @param logger - The logger.
|
|
959
|
-
*/ export const addMultipleValidators = async (extendedClient, deployer, gseAddress, rollupAddress, stakingAssetAddress, validators, acceleratedTestDeployments, logger)=>{
|
|
960
|
-
const rollup = new RollupContract(extendedClient, rollupAddress);
|
|
961
|
-
const activationThreshold = await rollup.getActivationThreshold();
|
|
962
|
-
if (validators && validators.length > 0) {
|
|
963
|
-
// Check if some of the initial validators are already registered, so we support idempotent deployments
|
|
964
|
-
if (!acceleratedTestDeployments) {
|
|
965
|
-
const enrichedValidators = await Promise.all(validators.map(async (operator)=>({
|
|
966
|
-
operator,
|
|
967
|
-
status: await rollup.getStatus(operator.attester)
|
|
968
|
-
})));
|
|
969
|
-
const existingValidators = enrichedValidators.filter((v)=>v.status !== 0);
|
|
970
|
-
if (existingValidators.length > 0) {
|
|
971
|
-
logger.warn(`Validators ${existingValidators.map((v)=>v.operator.attester).join(', ')} already exist. Skipping from initialization.`);
|
|
972
|
-
}
|
|
973
|
-
validators = enrichedValidators.filter((v)=>v.status === 0).map((v)=>v.operator);
|
|
974
|
-
}
|
|
975
|
-
if (validators.length === 0) {
|
|
976
|
-
logger.warn('No validators to add. Skipping.');
|
|
977
|
-
return;
|
|
978
|
-
}
|
|
979
|
-
const gseContract = new GSEContract(extendedClient, gseAddress);
|
|
980
|
-
const multiAdder = (await deployer.deploy(MultiAdderArtifact, [
|
|
981
|
-
rollupAddress,
|
|
982
|
-
deployer.client.account.address
|
|
983
|
-
])).address;
|
|
984
|
-
const makeValidatorTuples = async (validator)=>{
|
|
985
|
-
const registrationTuple = await gseContract.makeRegistrationTuple(validator.bn254SecretKey.getValue());
|
|
986
|
-
return {
|
|
987
|
-
attester: getAddress(validator.attester.toString()),
|
|
988
|
-
withdrawer: getAddress(validator.withdrawer.toString()),
|
|
989
|
-
...registrationTuple
|
|
990
|
-
};
|
|
991
|
-
};
|
|
992
|
-
const validatorsTuples = await Promise.all(validators.map(makeValidatorTuples));
|
|
993
|
-
// Mint tokens, approve them, use cheat code to initialize validator set without setting up the epoch.
|
|
994
|
-
const stakeNeeded = activationThreshold * BigInt(validators.length);
|
|
995
|
-
await deployer.l1TxUtils.sendAndMonitorTransaction({
|
|
996
|
-
to: stakingAssetAddress,
|
|
997
|
-
data: encodeFunctionData({
|
|
998
|
-
abi: StakingAssetArtifact.contractAbi,
|
|
999
|
-
functionName: 'mint',
|
|
1000
|
-
args: [
|
|
1001
|
-
multiAdder.toString(),
|
|
1002
|
-
stakeNeeded
|
|
1003
|
-
]
|
|
1004
|
-
})
|
|
1005
|
-
});
|
|
1006
|
-
const entryQueueLengthBefore = await rollup.getEntryQueueLength();
|
|
1007
|
-
const validatorCountBefore = await rollup.getActiveAttesterCount();
|
|
1008
|
-
logger.info(`Adding ${validators.length} validators to the rollup`);
|
|
1009
|
-
const chunkSize = 16;
|
|
1010
|
-
// We will add `chunkSize` validators to the queue until we have covered all of our validators.
|
|
1011
|
-
// The `chunkSize` needs to be small enough to fit inside a single tx, therefore 16.
|
|
1012
|
-
for (const c of chunk(validatorsTuples, chunkSize)){
|
|
1013
|
-
await deployer.l1TxUtils.sendAndMonitorTransaction({
|
|
1014
|
-
to: multiAdder.toString(),
|
|
1015
|
-
data: encodeFunctionData({
|
|
1016
|
-
abi: MultiAdderArtifact.contractAbi,
|
|
1017
|
-
functionName: 'addValidators',
|
|
1018
|
-
args: [
|
|
1019
|
-
c,
|
|
1020
|
-
BigInt(0)
|
|
1021
|
-
]
|
|
1022
|
-
})
|
|
1023
|
-
}, {
|
|
1024
|
-
gasLimit: 16_000_000n
|
|
1025
|
-
});
|
|
1026
|
-
}
|
|
1027
|
-
// After adding to the queue, we will now try to flush from it.
|
|
1028
|
-
// We are explicitly doing this as a second step instead of as part of adding to benefit
|
|
1029
|
-
// from the accounting used to speed the process up.
|
|
1030
|
-
// As the queue computes the amount of possible flushes in an epoch when told to flush,
|
|
1031
|
-
// waiting until we have added all we want allows us to benefit in the case were we added
|
|
1032
|
-
// enough to pass the bootstrap set size without needing to wait another epoch.
|
|
1033
|
-
// This is useful when we are testing as it speeds up the tests slightly.
|
|
1034
|
-
while(true){
|
|
1035
|
-
// If the queue is empty, we can break
|
|
1036
|
-
if (await rollup.getEntryQueueLength() == 0n) {
|
|
1037
|
-
break;
|
|
1038
|
-
}
|
|
1039
|
-
// If there are no available validator flushes, no need to even try
|
|
1040
|
-
if (await rollup.getAvailableValidatorFlushes() == 0n) {
|
|
1041
|
-
break;
|
|
1042
|
-
}
|
|
1043
|
-
// Note that we are flushing at most `chunkSize` at each call
|
|
1044
|
-
await deployer.l1TxUtils.sendAndMonitorTransaction({
|
|
1045
|
-
to: rollup.address,
|
|
1046
|
-
data: encodeFunctionData({
|
|
1047
|
-
abi: RollupArtifact.contractAbi,
|
|
1048
|
-
functionName: 'flushEntryQueue',
|
|
1049
|
-
args: [
|
|
1050
|
-
BigInt(chunkSize)
|
|
1051
|
-
]
|
|
1052
|
-
})
|
|
1053
|
-
}, {
|
|
1054
|
-
gasLimit: 16_000_000n
|
|
1055
|
-
});
|
|
1056
|
-
}
|
|
1057
|
-
const entryQueueLengthAfter = await rollup.getEntryQueueLength();
|
|
1058
|
-
const validatorCountAfter = await rollup.getActiveAttesterCount();
|
|
1059
|
-
if (entryQueueLengthAfter + validatorCountAfter < entryQueueLengthBefore + validatorCountBefore + BigInt(validators.length)) {
|
|
1060
|
-
throw new Error(`Failed to add ${validators.length} validators. Active validators: ${validatorCountBefore} -> ${validatorCountAfter}. Queue: ${entryQueueLengthBefore} -> ${entryQueueLengthAfter}. A likely issue is the bootstrap size.`);
|
|
1061
|
-
}
|
|
1062
|
-
logger.info(`Added ${validators.length} validators. Active validators: ${validatorCountBefore} -> ${validatorCountAfter}. Queue: ${entryQueueLengthBefore} -> ${entryQueueLengthAfter}`);
|
|
1063
|
-
}
|
|
1064
|
-
};
|
|
1065
|
-
/**
|
|
1066
|
-
* Initialize the fee asset handler and make it a minter on the fee asset.
|
|
1067
|
-
* @note This function will only be used for testing purposes.
|
|
1068
|
-
*
|
|
1069
|
-
* @param extendedClient - The L1 clients.
|
|
1070
|
-
* @param deployer - The L1 deployer.
|
|
1071
|
-
* @param feeAssetAddress - The address of the fee asset.
|
|
1072
|
-
* @param logger - The logger.
|
|
1073
|
-
*/ // eslint-disable-next-line camelcase
|
|
1074
|
-
export const cheat_initializeFeeAssetHandler = async (extendedClient, deployer, feeAssetAddress, logger)=>{
|
|
1075
|
-
const feeAssetHandlerAddress = (await deployer.deploy(FeeAssetHandlerArtifact, [
|
|
1076
|
-
extendedClient.account.address,
|
|
1077
|
-
feeAssetAddress.toString(),
|
|
1078
|
-
BigInt(1e18)
|
|
1079
|
-
])).address;
|
|
1080
|
-
logger.verbose(`Deployed FeeAssetHandler at ${feeAssetHandlerAddress}`);
|
|
1081
|
-
const { txHash } = await deployer.sendTransaction({
|
|
1082
|
-
to: feeAssetAddress.toString(),
|
|
1083
|
-
data: encodeFunctionData({
|
|
1084
|
-
abi: FeeAssetArtifact.contractAbi,
|
|
1085
|
-
functionName: 'addMinter',
|
|
1086
|
-
args: [
|
|
1087
|
-
feeAssetHandlerAddress.toString()
|
|
1088
|
-
]
|
|
1089
|
-
})
|
|
1090
|
-
});
|
|
1091
|
-
logger.verbose(`Added fee asset handler ${feeAssetHandlerAddress} as minter on fee asset in ${txHash}`);
|
|
1092
|
-
return {
|
|
1093
|
-
feeAssetHandlerAddress,
|
|
1094
|
-
txHash
|
|
1095
|
-
};
|
|
1096
|
-
};
|
|
1097
|
-
/**
|
|
1098
|
-
* Deploys the aztec L1 contracts; Rollup & (optionally) Decoder Helper.
|
|
1099
|
-
* @param rpcUrls - List of URLs of the ETH RPC to use for deployment.
|
|
1100
|
-
* @param account - Private Key or HD Account that will deploy the contracts.
|
|
1101
|
-
* @param chain - The chain instance to deploy to.
|
|
1102
|
-
* @param logger - A logger object.
|
|
1103
|
-
* @param args - Arguments for initialization of L1 contracts
|
|
1104
|
-
* @returns A list of ETH addresses of the deployed contracts.
|
|
1105
|
-
*/ export const deployL1Contracts = async (rpcUrls, account, chain, logger, args, txUtilsConfig = getL1TxUtilsConfigEnvVars(), createVerificationJson = false)=>{
|
|
1106
|
-
logger.info(`Deploying L1 contracts with config: ${jsonStringify(args)}`);
|
|
1107
|
-
validateConfig(args);
|
|
1108
|
-
if (args.initialValidators && args.initialValidators.length > 0 && args.existingTokenAddress) {
|
|
1109
|
-
throw new Error('Cannot deploy with both initialValidators and existingTokenAddress. ' + 'Initial validator funding requires minting tokens, which is not possible with an external token.');
|
|
1110
|
-
}
|
|
1111
|
-
const l1Client = createExtendedL1Client(rpcUrls, account, chain);
|
|
1112
|
-
// Deploy multicall3 if it does not exist in this network
|
|
1113
|
-
await deployMulticall3(l1Client, logger);
|
|
1114
|
-
// We are assuming that you are running this on a local anvil node which have 1s block times
|
|
1115
|
-
// To align better with actual deployment, we update the block interval to 12s
|
|
1116
|
-
const rpcCall = async (method, params)=>{
|
|
1117
|
-
logger.info(`Calling ${method} with params: ${JSON.stringify(params)}`);
|
|
1118
|
-
return await l1Client.transport.request({
|
|
1119
|
-
method,
|
|
1120
|
-
params
|
|
1121
|
-
});
|
|
1122
|
-
};
|
|
1123
|
-
if (isAnvilTestChain(chain.id)) {
|
|
1124
|
-
try {
|
|
1125
|
-
await rpcCall('anvil_setBlockTimestampInterval', [
|
|
1126
|
-
args.ethereumSlotDuration
|
|
1127
|
-
]);
|
|
1128
|
-
logger.warn(`Set block interval to ${args.ethereumSlotDuration}`);
|
|
1129
|
-
} catch (e) {
|
|
1130
|
-
logger.error(`Error setting block interval: ${e}`);
|
|
1131
|
-
}
|
|
1132
|
-
}
|
|
1133
|
-
logger.verbose(`Deploying contracts from ${account.address.toString()}`);
|
|
1134
|
-
const dateProvider = new DateProvider();
|
|
1135
|
-
const deployer = new L1Deployer(l1Client, args.salt, dateProvider, args.acceleratedTestDeployments, logger, txUtilsConfig, !!createVerificationJson);
|
|
1136
|
-
const { feeAssetAddress, feeAssetHandlerAddress, stakingAssetAddress, stakingAssetHandlerAddress, registryAddress, gseAddress, governanceAddress, rewardDistributorAddress, zkPassportVerifierAddress, coinIssuerAddress } = await deploySharedContracts(l1Client, deployer, args, logger);
|
|
1137
|
-
const { rollup, slashFactoryAddress } = await deployRollup(l1Client, deployer, args, {
|
|
1138
|
-
feeJuiceAddress: feeAssetAddress,
|
|
1139
|
-
registryAddress,
|
|
1140
|
-
gseAddress,
|
|
1141
|
-
rewardDistributorAddress,
|
|
1142
|
-
stakingAssetAddress,
|
|
1143
|
-
governanceAddress
|
|
1144
|
-
}, logger);
|
|
1145
|
-
logger.verbose('Waiting for rollup and slash factory to be deployed');
|
|
1146
|
-
await deployer.waitForDeployments();
|
|
1147
|
-
// Now that the rollup has been deployed and added to the registry, transfer ownership to governance
|
|
1148
|
-
const { dateGatedRelayerAddress } = await handoverToGovernance(l1Client, deployer, registryAddress, gseAddress, coinIssuerAddress, feeAssetAddress, governanceAddress, logger, args.acceleratedTestDeployments, !!args.existingTokenAddress);
|
|
1149
|
-
logger.info(`Handing over to governance complete`);
|
|
1150
|
-
logger.verbose(`All transactions for L1 deployment have been mined`);
|
|
1151
|
-
const l1Contracts = await RegistryContract.collectAddresses(l1Client, registryAddress, 'canonical');
|
|
1152
|
-
logger.info(`Aztec L1 contracts initialized`, l1Contracts);
|
|
1153
|
-
// Write verification data (constructor args + linked libraries) to file for later forge verify
|
|
1154
|
-
if (createVerificationJson) {
|
|
1155
|
-
await generateRollupVerificationRecords(rollup, deployer, args, l1Contracts, l1Client, logger);
|
|
1156
|
-
await writeVerificationJson(deployer, createVerificationJson, chain.id, '', logger);
|
|
1157
|
-
}
|
|
1158
|
-
if (isAnvilTestChain(chain.id)) {
|
|
1159
|
-
// @note We make a time jump PAST the very first slot to not have to deal with the edge case of the first slot.
|
|
1160
|
-
// The edge case being that the genesis block is already occupying slot 0, so we cannot have another block.
|
|
1161
|
-
try {
|
|
1162
|
-
// Need to get the time
|
|
1163
|
-
const currentSlot = await rollup.getSlotNumber();
|
|
1164
|
-
if (currentSlot === 0) {
|
|
1165
|
-
const ts = Number(await rollup.getTimestampForSlot(SlotNumber(1)));
|
|
1166
|
-
await rpcCall('evm_setNextBlockTimestamp', [
|
|
1167
|
-
ts
|
|
1168
|
-
]);
|
|
1169
|
-
await rpcCall('hardhat_mine', [
|
|
1170
|
-
1
|
|
1171
|
-
]);
|
|
1172
|
-
const currentSlot = await rollup.getSlotNumber();
|
|
1173
|
-
if (currentSlot !== 1) {
|
|
1174
|
-
throw new Error(`Error jumping time: current slot is ${currentSlot}`);
|
|
1175
|
-
}
|
|
1176
|
-
logger.info(`Jumped to slot 1`);
|
|
1177
|
-
}
|
|
1178
|
-
} catch (e) {
|
|
1179
|
-
throw new Error(`Error jumping time: ${e}`);
|
|
1180
|
-
}
|
|
1181
|
-
}
|
|
1182
|
-
return {
|
|
1183
|
-
rollupVersion: Number(await rollup.getVersion()),
|
|
1184
|
-
l1Client: l1Client,
|
|
1185
|
-
l1ContractAddresses: {
|
|
1186
|
-
...l1Contracts,
|
|
1187
|
-
slashFactoryAddress,
|
|
1188
|
-
feeAssetHandlerAddress,
|
|
1189
|
-
stakingAssetHandlerAddress,
|
|
1190
|
-
zkPassportVerifierAddress,
|
|
1191
|
-
coinIssuerAddress,
|
|
1192
|
-
dateGatedRelayerAddress
|
|
1193
|
-
}
|
|
1194
|
-
};
|
|
1195
|
-
};
|
|
1196
|
-
export class L1Deployer {
|
|
1197
|
-
client;
|
|
1198
|
-
acceleratedTestDeployments;
|
|
1199
|
-
logger;
|
|
1200
|
-
txUtilsConfig;
|
|
1201
|
-
createVerificationJson;
|
|
1202
|
-
salt;
|
|
1203
|
-
txHashes;
|
|
1204
|
-
l1TxUtils;
|
|
1205
|
-
verificationRecords;
|
|
1206
|
-
constructor(client, maybeSalt, dateProvider = new DateProvider(), acceleratedTestDeployments = false, logger = createLogger('L1Deployer'), txUtilsConfig, createVerificationJson = false){
|
|
1207
|
-
this.client = client;
|
|
1208
|
-
this.acceleratedTestDeployments = acceleratedTestDeployments;
|
|
1209
|
-
this.logger = logger;
|
|
1210
|
-
this.txUtilsConfig = txUtilsConfig;
|
|
1211
|
-
this.createVerificationJson = createVerificationJson;
|
|
1212
|
-
this.txHashes = [];
|
|
1213
|
-
this.verificationRecords = [];
|
|
1214
|
-
this.salt = maybeSalt ? padHex(numberToHex(maybeSalt), {
|
|
1215
|
-
size: 32
|
|
1216
|
-
}) : undefined;
|
|
1217
|
-
this.l1TxUtils = createL1TxUtilsFromViemWallet(this.client, {
|
|
1218
|
-
logger: this.logger,
|
|
1219
|
-
dateProvider
|
|
1220
|
-
}, {
|
|
1221
|
-
...this.txUtilsConfig,
|
|
1222
|
-
debugMaxGasLimit: acceleratedTestDeployments
|
|
1223
|
-
});
|
|
1224
|
-
}
|
|
1225
|
-
async deploy(params, args, opts = {}) {
|
|
1226
|
-
this.logger.debug(`Deploying ${params.name} contract`, {
|
|
1227
|
-
args
|
|
1228
|
-
});
|
|
1229
|
-
try {
|
|
1230
|
-
const { txHash, address, deployedLibraries, existed } = await deployL1Contract(this.client, params.contractAbi, params.contractBytecode, args ?? [], {
|
|
1231
|
-
salt: this.salt,
|
|
1232
|
-
libraries: params.libraries,
|
|
1233
|
-
logger: this.logger,
|
|
1234
|
-
l1TxUtils: this.l1TxUtils,
|
|
1235
|
-
acceleratedTestDeployments: this.acceleratedTestDeployments,
|
|
1236
|
-
gasLimit: opts.gasLimit,
|
|
1237
|
-
noSimulation: opts.noSimulation
|
|
1238
|
-
});
|
|
1239
|
-
if (txHash) {
|
|
1240
|
-
this.txHashes.push(txHash);
|
|
1241
|
-
}
|
|
1242
|
-
this.logger.debug(`Deployed ${params.name} at ${address}`, {
|
|
1243
|
-
args
|
|
1244
|
-
});
|
|
1245
|
-
if (this.createVerificationJson) {
|
|
1246
|
-
// Encode constructor args for verification
|
|
1247
|
-
let constructorArgsHex = '0x';
|
|
1248
|
-
try {
|
|
1249
|
-
const abiItem = params.contractAbi.find((x)=>x && x.type === 'constructor');
|
|
1250
|
-
const inputDefs = abiItem && Array.isArray(abiItem.inputs) ? abiItem.inputs : [];
|
|
1251
|
-
constructorArgsHex = inputDefs.length > 0 ? encodeAbiParameters(inputDefs, args ?? []) : '0x';
|
|
1252
|
-
} catch {
|
|
1253
|
-
constructorArgsHex = '0x';
|
|
1254
|
-
}
|
|
1255
|
-
this.verificationRecords.push({
|
|
1256
|
-
name: params.name,
|
|
1257
|
-
address: address.toString(),
|
|
1258
|
-
constructorArgsHex,
|
|
1259
|
-
libraries: deployedLibraries ?? []
|
|
1260
|
-
});
|
|
1261
|
-
}
|
|
1262
|
-
return {
|
|
1263
|
-
address,
|
|
1264
|
-
existed
|
|
1265
|
-
};
|
|
1266
|
-
} catch (error) {
|
|
1267
|
-
throw new Error(`Failed to deploy ${params.name}`, {
|
|
1268
|
-
cause: formatViemError(error)
|
|
1269
|
-
});
|
|
1270
|
-
}
|
|
1271
|
-
}
|
|
1272
|
-
async waitForDeployments() {
|
|
1273
|
-
if (this.acceleratedTestDeployments) {
|
|
1274
|
-
this.logger.info('Accelerated test deployments - skipping waiting for deployments');
|
|
1275
|
-
return;
|
|
1276
|
-
}
|
|
1277
|
-
if (this.txHashes.length === 0) {
|
|
1278
|
-
return;
|
|
1279
|
-
}
|
|
1280
|
-
this.logger.verbose(`Waiting for ${this.txHashes.length} transactions to be mined`, {
|
|
1281
|
-
txHashes: this.txHashes
|
|
1282
|
-
});
|
|
1283
|
-
const receipts = await Promise.all(this.txHashes.map((txHash)=>this.client.waitForTransactionReceipt({
|
|
1284
|
-
hash: txHash
|
|
1285
|
-
})));
|
|
1286
|
-
const failed = receipts.filter((r)=>r.status !== 'success');
|
|
1287
|
-
if (failed.length > 0) {
|
|
1288
|
-
throw new Error(`Some deployment txs have failed: ${failed.map((f)=>f.transactionHash).join(', ')}`);
|
|
1289
|
-
}
|
|
1290
|
-
this.logger.info('All transactions mined successfully', {
|
|
1291
|
-
txHashes: this.txHashes
|
|
1292
|
-
});
|
|
1293
|
-
}
|
|
1294
|
-
sendTransaction(tx, options) {
|
|
1295
|
-
return this.l1TxUtils.sendTransaction(tx, options).then(({ txHash, state })=>({
|
|
1296
|
-
txHash,
|
|
1297
|
-
gasLimit: state.gasLimit,
|
|
1298
|
-
gasPrice: state.gasPrice
|
|
1299
|
-
}));
|
|
1300
|
-
}
|
|
1301
|
-
}
|
|
1302
|
-
/**
|
|
1303
|
-
* Helper function to deploy ETH contracts.
|
|
1304
|
-
* @param walletClient - A viem WalletClient.
|
|
1305
|
-
* @param publicClient - A viem PublicClient.
|
|
1306
|
-
* @param abi - The ETH contract's ABI (as abitype's Abi).
|
|
1307
|
-
* @param bytecode - The ETH contract's bytecode.
|
|
1308
|
-
* @param args - Constructor arguments for the contract.
|
|
1309
|
-
* @param salt - Optional salt for CREATE2 deployment (does not wait for deployment tx to be mined if set, does not send tx if contract already exists).
|
|
1310
|
-
* @returns The ETH address the contract was deployed to.
|
|
1311
|
-
*/ export async function deployL1Contract(extendedClient, abi, bytecode, args = [], opts = {}) {
|
|
1312
|
-
let txHash = undefined;
|
|
1313
|
-
let resultingAddress = undefined;
|
|
1314
|
-
const deployedLibraries = [];
|
|
1315
|
-
const { salt: saltFromOpts, libraries, logger, gasLimit, acceleratedTestDeployments, noSimulation } = opts;
|
|
1316
|
-
let { l1TxUtils } = opts;
|
|
1317
|
-
if (!l1TxUtils) {
|
|
1318
|
-
const config = getL1TxUtilsConfigEnvVars();
|
|
1319
|
-
l1TxUtils = createL1TxUtilsFromViemWallet(extendedClient, {
|
|
1320
|
-
logger
|
|
1321
|
-
}, {
|
|
1322
|
-
...config,
|
|
1323
|
-
debugMaxGasLimit: acceleratedTestDeployments
|
|
1324
|
-
});
|
|
1325
|
-
}
|
|
1326
|
-
if (libraries) {
|
|
1327
|
-
// Note that this does NOT work well for linked libraries having linked libraries.
|
|
1328
|
-
// Verify that all link references have corresponding code
|
|
1329
|
-
for(const linkRef in libraries.linkReferences){
|
|
1330
|
-
for(const contractName in libraries.linkReferences[linkRef]){
|
|
1331
|
-
if (!libraries.libraryCode[contractName]) {
|
|
1332
|
-
throw new Error(`Missing library code for ${contractName}`);
|
|
1333
|
-
}
|
|
1334
|
-
}
|
|
1335
|
-
}
|
|
1336
|
-
const replacements = {};
|
|
1337
|
-
const libraryTxs = [];
|
|
1338
|
-
for(const libraryName in libraries?.libraryCode){
|
|
1339
|
-
const lib = libraries.libraryCode[libraryName];
|
|
1340
|
-
const { libraries: _libraries, ...optsWithoutLibraries } = opts;
|
|
1341
|
-
const { address, txHash } = await deployL1Contract(extendedClient, lib.contractAbi, lib.contractBytecode, [], optsWithoutLibraries);
|
|
1342
|
-
// Log deployed library name and address for easier verification/triage
|
|
1343
|
-
logger?.verbose(`Linked library deployed`, {
|
|
1344
|
-
library: libraryName,
|
|
1345
|
-
address: address.toString(),
|
|
1346
|
-
txHash
|
|
1347
|
-
});
|
|
1348
|
-
if (txHash) {
|
|
1349
|
-
libraryTxs.push(txHash);
|
|
1350
|
-
}
|
|
1351
|
-
// Try to find the source file for this library from linkReferences
|
|
1352
|
-
let fileNameForLibrary = undefined;
|
|
1353
|
-
for(const fileName in libraries.linkReferences){
|
|
1354
|
-
if (libraries.linkReferences[fileName] && libraries.linkReferences[fileName][libraryName]) {
|
|
1355
|
-
fileNameForLibrary = fileName;
|
|
1356
|
-
break;
|
|
1357
|
-
}
|
|
1358
|
-
}
|
|
1359
|
-
if (fileNameForLibrary) {
|
|
1360
|
-
deployedLibraries.push({
|
|
1361
|
-
file: fileNameForLibrary,
|
|
1362
|
-
contract: libraryName,
|
|
1363
|
-
address: address.toString()
|
|
1364
|
-
});
|
|
1365
|
-
}
|
|
1366
|
-
for(const linkRef in libraries.linkReferences){
|
|
1367
|
-
for(const contractName in libraries.linkReferences[linkRef]){
|
|
1368
|
-
// If the library name matches the one we just deployed, we replace it.
|
|
1369
|
-
if (contractName !== libraryName) {
|
|
1370
|
-
continue;
|
|
1371
|
-
}
|
|
1372
|
-
// We read the first instance to figure out what we are to replace.
|
|
1373
|
-
const start = 2 + 2 * libraries.linkReferences[linkRef][contractName][0].start;
|
|
1374
|
-
const length = 2 * libraries.linkReferences[linkRef][contractName][0].length;
|
|
1375
|
-
const toReplace = bytecode.slice(start, start + length);
|
|
1376
|
-
replacements[toReplace] = address;
|
|
1377
|
-
}
|
|
1378
|
-
}
|
|
1379
|
-
}
|
|
1380
|
-
const escapeRegExp = (s)=>{
|
|
1381
|
-
return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // Escape special characters
|
|
1382
|
-
};
|
|
1383
|
-
for(const toReplace in replacements){
|
|
1384
|
-
const replacement = replacements[toReplace].toString().slice(2);
|
|
1385
|
-
bytecode = bytecode.replace(new RegExp(escapeRegExp(toReplace), 'g'), replacement);
|
|
1386
|
-
}
|
|
1387
|
-
// Reth fails gas estimation if the deployed contract attempts to call a library that is not yet deployed,
|
|
1388
|
-
// so we wait for all library deployments to be mined before deploying the contract.
|
|
1389
|
-
// However, if we are in fast mode or using debugMaxGasLimit, we will skip simulation, so we can skip waiting.
|
|
1390
|
-
if (libraryTxs.length > 0 && !acceleratedTestDeployments) {
|
|
1391
|
-
logger?.verbose(`Awaiting for linked libraries to be deployed`);
|
|
1392
|
-
await Promise.all(libraryTxs.map((txHash)=>extendedClient.waitForTransactionReceipt({
|
|
1393
|
-
hash: txHash
|
|
1394
|
-
})));
|
|
1395
|
-
} else {
|
|
1396
|
-
logger?.verbose(`Skipping waiting for linked libraries to be deployed ${acceleratedTestDeployments ? '(accelerated test deployments)' : ''}`);
|
|
1397
|
-
}
|
|
1398
|
-
}
|
|
1399
|
-
let existed = false;
|
|
1400
|
-
if (saltFromOpts) {
|
|
1401
|
-
logger?.info(`Deploying contract with salt ${saltFromOpts}`);
|
|
1402
|
-
const { address, paddedSalt: salt, calldata } = getExpectedAddress(abi, bytecode, args, saltFromOpts);
|
|
1403
|
-
resultingAddress = address;
|
|
1404
|
-
const existing = await extendedClient.getCode({
|
|
1405
|
-
address: resultingAddress
|
|
1406
|
-
});
|
|
1407
|
-
if (existing === undefined || existing === '0x') {
|
|
1408
|
-
if (!noSimulation) {
|
|
1409
|
-
try {
|
|
1410
|
-
await l1TxUtils.simulate({
|
|
1411
|
-
to: DEPLOYER_ADDRESS,
|
|
1412
|
-
data: concatHex([
|
|
1413
|
-
salt,
|
|
1414
|
-
calldata
|
|
1415
|
-
]),
|
|
1416
|
-
gas: gasLimit
|
|
1417
|
-
});
|
|
1418
|
-
} catch (err) {
|
|
1419
|
-
logger?.error(`Failed to simulate deployment tx using universal deployer`, err);
|
|
1420
|
-
await l1TxUtils.simulate({
|
|
1421
|
-
to: null,
|
|
1422
|
-
data: encodeDeployData({
|
|
1423
|
-
abi,
|
|
1424
|
-
bytecode,
|
|
1425
|
-
args
|
|
1426
|
-
}),
|
|
1427
|
-
gas: gasLimit
|
|
1428
|
-
});
|
|
1429
|
-
}
|
|
1430
|
-
}
|
|
1431
|
-
const res = await l1TxUtils.sendTransaction({
|
|
1432
|
-
to: DEPLOYER_ADDRESS,
|
|
1433
|
-
data: concatHex([
|
|
1434
|
-
salt,
|
|
1435
|
-
calldata
|
|
1436
|
-
])
|
|
1437
|
-
}, {
|
|
1438
|
-
gasLimit
|
|
1439
|
-
});
|
|
1440
|
-
txHash = res.txHash;
|
|
1441
|
-
logger?.verbose(`Deployed contract with salt ${salt} to address ${resultingAddress} in tx ${txHash}.`);
|
|
1442
|
-
} else {
|
|
1443
|
-
logger?.verbose(`Skipping existing deployment of contract with salt ${salt} to address ${resultingAddress}`);
|
|
1444
|
-
existed = true;
|
|
1445
|
-
}
|
|
1446
|
-
} else {
|
|
1447
|
-
const deployData = encodeDeployData({
|
|
1448
|
-
abi,
|
|
1449
|
-
bytecode,
|
|
1450
|
-
args
|
|
1451
|
-
});
|
|
1452
|
-
const { receipt } = await l1TxUtils.sendAndMonitorTransaction({
|
|
1453
|
-
to: null,
|
|
1454
|
-
data: deployData
|
|
1455
|
-
}, {
|
|
1456
|
-
gasLimit
|
|
1457
|
-
});
|
|
1458
|
-
txHash = receipt.transactionHash;
|
|
1459
|
-
resultingAddress = receipt.contractAddress;
|
|
1460
|
-
if (!resultingAddress) {
|
|
1461
|
-
throw new Error(`No contract address found in receipt: ${JSON.stringify(receipt, (_, val)=>typeof val === 'bigint' ? String(val) : val)}`);
|
|
1462
|
-
}
|
|
1463
|
-
}
|
|
1464
|
-
return {
|
|
1465
|
-
address: EthAddress.fromString(resultingAddress),
|
|
1466
|
-
txHash,
|
|
1467
|
-
deployedLibraries,
|
|
1468
|
-
existed
|
|
1469
|
-
};
|
|
1470
|
-
}
|
|
1471
|
-
export function getExpectedAddress(abi, bytecode, args, salt) {
|
|
1472
|
-
const paddedSalt = padHex(salt, {
|
|
1473
|
-
size: 32
|
|
1474
|
-
});
|
|
1475
|
-
const calldata = encodeDeployData({
|
|
1476
|
-
abi,
|
|
1477
|
-
bytecode,
|
|
1478
|
-
args
|
|
1479
|
-
});
|
|
1480
|
-
const address = getContractAddress({
|
|
1481
|
-
from: DEPLOYER_ADDRESS,
|
|
1482
|
-
salt: paddedSalt,
|
|
1483
|
-
bytecode: calldata,
|
|
1484
|
-
opcode: 'CREATE2'
|
|
1485
|
-
});
|
|
1486
|
-
return {
|
|
1487
|
-
address,
|
|
1488
|
-
paddedSalt,
|
|
1489
|
-
calldata
|
|
1490
|
-
};
|
|
1491
|
-
}
|