@rocketh/core 0.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +1 -0
- package/dist/environment/index.d.ts +17 -0
- package/dist/environment/index.d.ts.map +1 -0
- package/dist/environment/index.js +630 -0
- package/dist/environment/index.js.map +1 -0
- package/dist/environment/providers/BaseProvider.d.ts +8 -0
- package/dist/environment/providers/BaseProvider.d.ts.map +1 -0
- package/dist/environment/providers/BaseProvider.js +10 -0
- package/dist/environment/providers/BaseProvider.js.map +1 -0
- package/dist/environment/providers/TransactionHashTracker.d.ts +11 -0
- package/dist/environment/providers/TransactionHashTracker.d.ts.map +1 -0
- package/dist/environment/providers/TransactionHashTracker.js +15 -0
- package/dist/environment/providers/TransactionHashTracker.js.map +1 -0
- package/dist/environment/utils/artifacts.d.ts +39 -0
- package/dist/environment/utils/artifacts.d.ts.map +1 -0
- package/dist/environment/utils/artifacts.js +158 -0
- package/dist/environment/utils/artifacts.js.map +1 -0
- package/dist/environment/utils/chains.d.ts +18 -0
- package/dist/environment/utils/chains.d.ts.map +1 -0
- package/dist/environment/utils/chains.js +152 -0
- package/dist/environment/utils/chains.js.map +1 -0
- package/dist/executor/index.d.ts +55 -0
- package/dist/executor/index.d.ts.map +1 -0
- package/dist/executor/index.js +366 -0
- package/dist/executor/index.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +9 -0
- package/dist/index.js.map +1 -0
- package/dist/internal/logging.d.ts +11 -0
- package/dist/internal/logging.d.ts.map +1 -0
- package/dist/internal/logging.js +71 -0
- package/dist/internal/logging.js.map +1 -0
- package/dist/internal/types.d.ts +5 -0
- package/dist/internal/types.d.ts.map +1 -0
- package/dist/internal/types.js +2 -0
- package/dist/internal/types.js.map +1 -0
- package/dist/types.d.ts +494 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/dist/utils/eth.d.ts +24 -0
- package/dist/utils/eth.d.ts.map +1 -0
- package/dist/utils/eth.js +62 -0
- package/dist/utils/eth.js.map +1 -0
- package/dist/utils/extensions.d.ts +32 -0
- package/dist/utils/extensions.d.ts.map +1 -0
- package/dist/utils/extensions.js +49 -0
- package/dist/utils/extensions.js.map +1 -0
- package/dist/utils/extensions.test.d.ts +18 -0
- package/dist/utils/extensions.test.d.ts.map +1 -0
- package/dist/utils/extensions.test.js +35 -0
- package/dist/utils/extensions.test.js.map +1 -0
- package/dist/utils/json.d.ts +6 -0
- package/dist/utils/json.d.ts.map +1 -0
- package/dist/utils/json.js +28 -0
- package/dist/utils/json.js.map +1 -0
- package/package.json +50 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2018-present Ronan Sandford
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# @rocketh/core
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { Environment, UnknownDeployments, UnresolvedUnknownNamedAccounts, UnresolvedNetworkSpecificData, ResolvedExecutionParams, ResolvedUserConfig, DeploymentStore } from '../types.js';
|
|
2
|
+
import { InternalEnvironment } from '../internal/types.js';
|
|
3
|
+
export declare function loadDeployments(deploymentStore: DeploymentStore, deploymentsPath: string, networkName: string, onlyABIAndAddress?: boolean, expectedChain?: {
|
|
4
|
+
chainId: string;
|
|
5
|
+
genesisHash?: `0x${string}`;
|
|
6
|
+
deleteDeploymentsIfDifferentGenesisHash?: boolean;
|
|
7
|
+
}): Promise<{
|
|
8
|
+
deployments: UnknownDeployments;
|
|
9
|
+
migrations: Record<string, number>;
|
|
10
|
+
chainId?: string;
|
|
11
|
+
genesisHash?: `0x${string}`;
|
|
12
|
+
}>;
|
|
13
|
+
export declare function createEnvironment<NamedAccounts extends UnresolvedUnknownNamedAccounts = UnresolvedUnknownNamedAccounts, Data extends UnresolvedNetworkSpecificData = UnresolvedNetworkSpecificData, Deployments extends UnknownDeployments = UnknownDeployments>(userConfig: ResolvedUserConfig<NamedAccounts, Data>, resolvedExecutionParams: ResolvedExecutionParams, deploymentStore: DeploymentStore): Promise<{
|
|
14
|
+
internal: InternalEnvironment;
|
|
15
|
+
external: Environment<NamedAccounts, Data, Deployments>;
|
|
16
|
+
}>;
|
|
17
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/environment/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAIN,WAAW,EAOX,kBAAkB,EAClB,8BAA8B,EAC9B,6BAA6B,EAG7B,uBAAuB,EACvB,kBAAkB,EAElB,eAAe,EACf,MAAM,aAAa,CAAC;AAErB,OAAO,EAAC,mBAAmB,EAAC,MAAM,sBAAsB,CAAC;AA8BzD,wBAAsB,eAAe,CACpC,eAAe,EAAE,eAAe,EAChC,eAAe,EAAE,MAAM,EACvB,WAAW,EAAE,MAAM,EACnB,iBAAiB,CAAC,EAAE,OAAO,EAC3B,aAAa,CAAC,EAAE;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,WAAW,CAAC,EAAE,KAAK,MAAM,EAAE,CAAC;IAAC,uCAAuC,CAAC,EAAE,OAAO,CAAA;CAAC,GAC/G,OAAO,CAAC;IACV,WAAW,EAAE,kBAAkB,CAAC;IAChC,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,WAAW,CAAC,EAAE,KAAK,MAAM,EAAE,CAAC;CAC5B,CAAC,CAgGD;AAED,wBAAsB,iBAAiB,CACtC,aAAa,SAAS,8BAA8B,GAAG,8BAA8B,EACrF,IAAI,SAAS,6BAA6B,GAAG,6BAA6B,EAC1E,WAAW,SAAS,kBAAkB,GAAG,kBAAkB,EAE3D,UAAU,EAAE,kBAAkB,CAAC,aAAa,EAAE,IAAI,CAAC,EACnD,uBAAuB,EAAE,uBAAuB,EAChD,eAAe,EAAE,eAAe,GAC9B,OAAO,CAAC;IAAC,QAAQ,EAAE,mBAAmB,CAAC;IAAC,QAAQ,EAAE,WAAW,CAAC,aAAa,EAAE,IAAI,EAAE,WAAW,CAAC,CAAA;CAAC,CAAC,CAgoBnG"}
|
|
@@ -0,0 +1,630 @@
|
|
|
1
|
+
import { JSONToString, stringToJSON } from '../utils/json.js';
|
|
2
|
+
import { log, spin } from '../internal/logging.js';
|
|
3
|
+
import { mergeArtifacts } from './utils/artifacts.js';
|
|
4
|
+
import { TransactionHashTrackerProvider } from './providers/TransactionHashTracker.js';
|
|
5
|
+
function wait(numSeconds) {
|
|
6
|
+
return new Promise((resolve) => {
|
|
7
|
+
setTimeout(resolve, numSeconds * 1000);
|
|
8
|
+
});
|
|
9
|
+
}
|
|
10
|
+
function displayTransaction(transaction) {
|
|
11
|
+
if (transaction.type === '0x2') {
|
|
12
|
+
return `(maxFeePerGas: ${BigInt(transaction.maxFeePerGas).toString()}, maxPriorityFeePerGas: ${BigInt(transaction.maxPriorityFeePerGas).toString()})`;
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
return `(gasPrice: ${BigInt(transaction.gasPrice).toString()})`;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
export async function loadDeployments(deploymentStore, deploymentsPath, networkName, onlyABIAndAddress, expectedChain) {
|
|
19
|
+
const deploymentsFound = {};
|
|
20
|
+
let fileNames;
|
|
21
|
+
try {
|
|
22
|
+
fileNames = await deploymentStore.listFiles(deploymentsPath, networkName, (name) => !(name.startsWith('.') && name !== '.migrations.json') && name !== 'solcInputs');
|
|
23
|
+
}
|
|
24
|
+
catch (e) {
|
|
25
|
+
// console.log('no folder at ' + deployPath);
|
|
26
|
+
return { deployments: {}, migrations: {} };
|
|
27
|
+
}
|
|
28
|
+
let chainId;
|
|
29
|
+
let genesisHash;
|
|
30
|
+
if (fileNames.length > 0) {
|
|
31
|
+
if (await deploymentStore.hasFile(deploymentsPath, networkName, '.chain')) {
|
|
32
|
+
const chainSTR = await deploymentStore.readFile(deploymentsPath, networkName, '.chain');
|
|
33
|
+
const chainData = JSON.parse(chainSTR);
|
|
34
|
+
chainId = chainData.chainId;
|
|
35
|
+
genesisHash = chainData.genesisHash;
|
|
36
|
+
}
|
|
37
|
+
else if (await deploymentStore.hasFile(deploymentsPath, networkName, '.chainId')) {
|
|
38
|
+
chainId = await deploymentStore.readFile(deploymentsPath, networkName, '.chainId');
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
throw new Error(`A '.chain' or '.chainId' file is expected to be present in the deployment folder for network ${networkName}`);
|
|
42
|
+
}
|
|
43
|
+
if (expectedChain) {
|
|
44
|
+
if (expectedChain.chainId !== chainId) {
|
|
45
|
+
throw new Error(`Loading deployment from environment '${networkName}' (with chainId: ${chainId}) for a different chainId (${expectedChain.chainId})`);
|
|
46
|
+
}
|
|
47
|
+
if (genesisHash) {
|
|
48
|
+
if (expectedChain.genesisHash && expectedChain.genesisHash !== genesisHash) {
|
|
49
|
+
if (expectedChain.deleteDeploymentsIfDifferentGenesisHash) {
|
|
50
|
+
// we delete the old folder
|
|
51
|
+
await deploymentStore.deleteAll(deploymentsPath, networkName);
|
|
52
|
+
return { deployments: {}, migrations: {} };
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
throw new Error(`Loading deployment from environment '${networkName}' (with genesisHash: ${genesisHash}) for a different genesisHash (${expectedChain.genesisHash})`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
console.warn(`genesisHash not found in environment '${networkName}' (with chainId: ${chainId}), writing .chain with expected one...`);
|
|
61
|
+
await deploymentStore.writeFile(deploymentsPath, networkName, '.chain', JSON.stringify({ chainId: expectedChain.chainId, genesisHash: expectedChain.genesisHash }));
|
|
62
|
+
try {
|
|
63
|
+
await deploymentStore.deleteFile(deploymentsPath, networkName, '.chainId');
|
|
64
|
+
}
|
|
65
|
+
catch { }
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
else {
|
|
70
|
+
return { deployments: {}, migrations: {} };
|
|
71
|
+
}
|
|
72
|
+
let migrations = {};
|
|
73
|
+
const migrationsFileName = '.migrations.json';
|
|
74
|
+
if (await deploymentStore.hasFile(deploymentsPath, networkName, migrationsFileName)) {
|
|
75
|
+
try {
|
|
76
|
+
migrations = JSON.parse(await deploymentStore.readFile(deploymentsPath, networkName, migrationsFileName));
|
|
77
|
+
}
|
|
78
|
+
catch (err) {
|
|
79
|
+
console.error(`failed to parse .migrations.json`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
for (const fileName of fileNames) {
|
|
83
|
+
if (fileName.substring(fileName.length - 5) === '.json' && fileName !== '.migrations.json') {
|
|
84
|
+
let deployment = JSON.parse(await deploymentStore.readFile(deploymentsPath, networkName, fileName));
|
|
85
|
+
if (onlyABIAndAddress) {
|
|
86
|
+
deployment = {
|
|
87
|
+
address: deployment.address,
|
|
88
|
+
abi: deployment.abi,
|
|
89
|
+
linkedData: deployment.linkedData,
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
const name = fileName.slice(0, fileName.length - 5);
|
|
93
|
+
// console.log('fetching ' + deploymentFileName + ' for ' + name);
|
|
94
|
+
deploymentsFound[name] = deployment;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return { deployments: deploymentsFound, migrations, chainId, genesisHash };
|
|
98
|
+
}
|
|
99
|
+
export async function createEnvironment(userConfig, resolvedExecutionParams, deploymentStore) {
|
|
100
|
+
const rawProvider = resolvedExecutionParams.provider;
|
|
101
|
+
const provider = new TransactionHashTrackerProvider(rawProvider);
|
|
102
|
+
const chainIdHex = await provider.request({ method: 'eth_chainId' });
|
|
103
|
+
const chainId = '' + Number(chainIdHex);
|
|
104
|
+
let genesisHash;
|
|
105
|
+
try {
|
|
106
|
+
let genesisBlock;
|
|
107
|
+
try {
|
|
108
|
+
genesisBlock = await provider.request({ method: 'eth_getBlockByNumber', params: ['earliest', false] });
|
|
109
|
+
}
|
|
110
|
+
catch {
|
|
111
|
+
genesisBlock = await provider.request({ method: 'eth_getBlockByNumber', params: ['0x0', false] });
|
|
112
|
+
}
|
|
113
|
+
if (!genesisBlock) {
|
|
114
|
+
console.error(`failed to get genesis block, returned null`);
|
|
115
|
+
}
|
|
116
|
+
genesisHash = genesisBlock?.hash;
|
|
117
|
+
}
|
|
118
|
+
catch (err) {
|
|
119
|
+
console.error(`failed to get genesis block`);
|
|
120
|
+
}
|
|
121
|
+
const deploymentsFolder = userConfig.deployments;
|
|
122
|
+
const environmentName = resolvedExecutionParams.environment.name;
|
|
123
|
+
const saveDeployments = resolvedExecutionParams.saveDeployments;
|
|
124
|
+
let networkTags = {};
|
|
125
|
+
for (const networkTag of resolvedExecutionParams.environment.tags) {
|
|
126
|
+
networkTags[networkTag] = true;
|
|
127
|
+
}
|
|
128
|
+
const resolvedAccounts = {};
|
|
129
|
+
const allRemoteAccounts = await provider.request({ method: 'eth_accounts' });
|
|
130
|
+
const accountCache = {};
|
|
131
|
+
async function getAccount(name, accounts, accountDef) {
|
|
132
|
+
if (accountCache[name]) {
|
|
133
|
+
return accountCache[name];
|
|
134
|
+
}
|
|
135
|
+
let account;
|
|
136
|
+
if (typeof accountDef === 'number') {
|
|
137
|
+
const accountPerIndex = allRemoteAccounts[accountDef];
|
|
138
|
+
if (accountPerIndex) {
|
|
139
|
+
accountCache[name] = account = {
|
|
140
|
+
type: 'remote',
|
|
141
|
+
address: accountPerIndex,
|
|
142
|
+
signer: provider,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
else if (typeof accountDef === 'string') {
|
|
147
|
+
if (accountDef.startsWith('0x')) {
|
|
148
|
+
if (accountDef.length === 66) {
|
|
149
|
+
const privateKeyProtocol = userConfig.signerProtocols?.['privateKey'];
|
|
150
|
+
if (privateKeyProtocol) {
|
|
151
|
+
const namedSigner = await privateKeyProtocol(`privateKey:${accountDef}`);
|
|
152
|
+
const [address] = await namedSigner.signer.request({ method: 'eth_accounts' });
|
|
153
|
+
accountCache[name] = account = {
|
|
154
|
+
...namedSigner,
|
|
155
|
+
address,
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
accountCache[name] = account = {
|
|
161
|
+
type: 'remote',
|
|
162
|
+
address: accountDef,
|
|
163
|
+
signer: provider,
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
if (accountDef.indexOf(':') > 0) {
|
|
169
|
+
const [protocolID, extra] = accountDef.split(':');
|
|
170
|
+
const protocol = userConfig.signerProtocols?.[protocolID];
|
|
171
|
+
if (!protocol) {
|
|
172
|
+
throw new Error(`protocol: ${protocolID} is not supported`);
|
|
173
|
+
}
|
|
174
|
+
const namedSigner = await protocol(accountDef);
|
|
175
|
+
const [address] = await namedSigner.signer.request({ method: 'eth_accounts' });
|
|
176
|
+
accountCache[name] = account = {
|
|
177
|
+
...namedSigner,
|
|
178
|
+
address,
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
else {
|
|
182
|
+
const accountFetched = await getAccount(name, accounts, accounts[accountDef]);
|
|
183
|
+
if (accountFetched) {
|
|
184
|
+
accountCache[name] = account = accountFetched;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
// TODO allow for canonical chain name ?
|
|
191
|
+
const accountForNetwork = accountDef[environmentName] || accountDef[chainId] || accountDef['default'];
|
|
192
|
+
if (typeof accountForNetwork !== undefined) {
|
|
193
|
+
const accountFetched = await getAccount(name, accounts, accountForNetwork);
|
|
194
|
+
if (accountFetched) {
|
|
195
|
+
accountCache[name] = account = accountFetched;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
return account;
|
|
200
|
+
}
|
|
201
|
+
if (userConfig.accounts) {
|
|
202
|
+
const accountNames = Object.keys(userConfig.accounts);
|
|
203
|
+
for (const accountName of accountNames) {
|
|
204
|
+
const account = await getAccount(accountName, userConfig.accounts, userConfig.accounts[accountName]);
|
|
205
|
+
if (!account) {
|
|
206
|
+
throw new Error(`cannot get account for ${accountName} = ${JSON.stringify(userConfig.accounts[accountName], null, 2)}\nEnsure your provider (or hardhat) has some accounts set up for ${environmentName}\n`);
|
|
207
|
+
}
|
|
208
|
+
resolvedAccounts[accountName] = account;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
const resolvedData = {};
|
|
212
|
+
async function getData(name, dataDef) {
|
|
213
|
+
const dataForNetwork = dataDef[environmentName] || dataDef[chainId] || dataDef['default'];
|
|
214
|
+
return dataForNetwork;
|
|
215
|
+
}
|
|
216
|
+
if (userConfig.data) {
|
|
217
|
+
const dataFields = Object.keys(userConfig.data);
|
|
218
|
+
for (const dataField of dataFields) {
|
|
219
|
+
let fieldData = await getData(dataField, userConfig.data[dataField]);
|
|
220
|
+
resolvedData[dataField] = fieldData;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
const context = {
|
|
224
|
+
accounts: resolvedAccounts,
|
|
225
|
+
data: resolvedData,
|
|
226
|
+
fork: resolvedExecutionParams.environment.fork,
|
|
227
|
+
saveDeployments,
|
|
228
|
+
tags: networkTags,
|
|
229
|
+
};
|
|
230
|
+
const { deployments, migrations } = await loadDeployments(deploymentStore, deploymentsFolder, environmentName, false, context.fork
|
|
231
|
+
? undefined
|
|
232
|
+
: {
|
|
233
|
+
chainId,
|
|
234
|
+
genesisHash,
|
|
235
|
+
deleteDeploymentsIfDifferentGenesisHash: true,
|
|
236
|
+
});
|
|
237
|
+
const namedAccounts = {};
|
|
238
|
+
const namedSigners = {};
|
|
239
|
+
const addressSigners = {};
|
|
240
|
+
for (const entry of Object.entries(resolvedAccounts)) {
|
|
241
|
+
const name = entry[0];
|
|
242
|
+
const { address, ...namedSigner } = entry[1];
|
|
243
|
+
namedAccounts[name] = address;
|
|
244
|
+
addressSigners[address] = namedSigner;
|
|
245
|
+
namedSigners[name] = namedSigner;
|
|
246
|
+
}
|
|
247
|
+
const unnamedAccounts = allRemoteAccounts.filter((v) => !addressSigners[v]);
|
|
248
|
+
for (const account of unnamedAccounts) {
|
|
249
|
+
addressSigners[account] = {
|
|
250
|
+
type: 'remote',
|
|
251
|
+
signer: provider,
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
const perliminaryEnvironment = {
|
|
255
|
+
context: {
|
|
256
|
+
saveDeployments: context.saveDeployments,
|
|
257
|
+
},
|
|
258
|
+
name: environmentName,
|
|
259
|
+
tags: context.tags,
|
|
260
|
+
deployments: deployments,
|
|
261
|
+
namedAccounts: namedAccounts,
|
|
262
|
+
data: resolvedData,
|
|
263
|
+
namedSigners: namedSigners,
|
|
264
|
+
unnamedAccounts,
|
|
265
|
+
addressSigners: addressSigners,
|
|
266
|
+
network: {
|
|
267
|
+
chain: resolvedExecutionParams.chain,
|
|
268
|
+
fork: context.fork,
|
|
269
|
+
provider,
|
|
270
|
+
deterministicDeployment: resolvedExecutionParams.environment.deterministicDeployment,
|
|
271
|
+
// for backward compatibility
|
|
272
|
+
tags: context.tags,
|
|
273
|
+
},
|
|
274
|
+
extra: resolvedExecutionParams.extra || {},
|
|
275
|
+
};
|
|
276
|
+
// const signer = {
|
|
277
|
+
// async sendTransaction(
|
|
278
|
+
// provider: EIP1193ProviderWithoutEvents,
|
|
279
|
+
// account: {
|
|
280
|
+
// addresss: EIP1193Account;
|
|
281
|
+
// config: unknown;
|
|
282
|
+
// },
|
|
283
|
+
// transaction: EIP1193TransactionEIP1193DATA
|
|
284
|
+
// ): Promise<EIP1193DATA> {
|
|
285
|
+
// return '0x';
|
|
286
|
+
// },
|
|
287
|
+
// };
|
|
288
|
+
// async function sendTransaction(transaction: EIP1193TransactionEIP1193DATA): Promise<EIP1193DATA> {
|
|
289
|
+
// return '0x';
|
|
290
|
+
// }
|
|
291
|
+
function get(name) {
|
|
292
|
+
const deployment = deployments[name];
|
|
293
|
+
if (!deployment) {
|
|
294
|
+
throw new Error(`no deployment named "${name}" found.`);
|
|
295
|
+
}
|
|
296
|
+
return deployment;
|
|
297
|
+
}
|
|
298
|
+
function getOrNull(name) {
|
|
299
|
+
return (deployments[name] || null);
|
|
300
|
+
}
|
|
301
|
+
function hasMigrationBeenDone(id) {
|
|
302
|
+
return migrations[id] ? true : false;
|
|
303
|
+
}
|
|
304
|
+
function recordMigration(id) {
|
|
305
|
+
migrations[id] = Math.floor(Date.now() / 1000);
|
|
306
|
+
if (context.saveDeployments) {
|
|
307
|
+
deploymentStore.writeFileWithChainInfo({ chainId, genesisHash }, deploymentsFolder, environmentName, '.migrations.json', JSON.stringify(migrations));
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
function fromAddressToNamedABIOrNull(address) {
|
|
311
|
+
let list = [];
|
|
312
|
+
for (const name of Object.keys(deployments)) {
|
|
313
|
+
const deployment = deployments[name];
|
|
314
|
+
if (deployment.address.toLowerCase() == address.toLowerCase()) {
|
|
315
|
+
list.push({ name, artifact: deployment });
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
if (list.length === 0) {
|
|
319
|
+
return null;
|
|
320
|
+
}
|
|
321
|
+
const { mergedABI } = mergeArtifacts(list);
|
|
322
|
+
return {
|
|
323
|
+
mergedABI: mergedABI,
|
|
324
|
+
names: list.map((v) => v.name),
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
function fromAddressToNamedABI(address) {
|
|
328
|
+
const n = fromAddressToNamedABIOrNull(address);
|
|
329
|
+
if (!n) {
|
|
330
|
+
throw new Error(`could not find artifact for address ${address}`);
|
|
331
|
+
}
|
|
332
|
+
return n;
|
|
333
|
+
}
|
|
334
|
+
async function save(name, deployment, options) {
|
|
335
|
+
if (!options?.doNotCountAsNewDeployment) {
|
|
336
|
+
let numDeployments = 1;
|
|
337
|
+
const oldDeployment = deployments[name];
|
|
338
|
+
if (oldDeployment) {
|
|
339
|
+
numDeployments = (oldDeployment.numDeployments || 1) + 1;
|
|
340
|
+
}
|
|
341
|
+
deployments[name] = { ...deployment, numDeployments };
|
|
342
|
+
}
|
|
343
|
+
else {
|
|
344
|
+
deployments[name] = { ...deployment, numDeployments: 1 };
|
|
345
|
+
}
|
|
346
|
+
if (context.saveDeployments) {
|
|
347
|
+
deploymentStore.writeFileWithChainInfo({ chainId, genesisHash }, deploymentsFolder, environmentName, `${name}.json`, JSONToString(deployment, 2));
|
|
348
|
+
}
|
|
349
|
+
return deployment;
|
|
350
|
+
}
|
|
351
|
+
async function recoverTransactionsIfAny() {
|
|
352
|
+
if (!context.saveDeployments) {
|
|
353
|
+
return;
|
|
354
|
+
}
|
|
355
|
+
let existingPendingTansactions;
|
|
356
|
+
try {
|
|
357
|
+
existingPendingTansactions = stringToJSON(await deploymentStore.readFile(deploymentsFolder, environmentName, '.pending_transactions.json'));
|
|
358
|
+
}
|
|
359
|
+
catch {
|
|
360
|
+
existingPendingTansactions = [];
|
|
361
|
+
}
|
|
362
|
+
if (existingPendingTansactions.length > 0) {
|
|
363
|
+
while (existingPendingTansactions.length > 0) {
|
|
364
|
+
const pendingTransaction = existingPendingTansactions.shift();
|
|
365
|
+
if (pendingTransaction) {
|
|
366
|
+
if (pendingTransaction.type === 'deployment') {
|
|
367
|
+
const spinner = spin(`recovering ${pendingTransaction.name} with transaction ${pendingTransaction.transaction.hash}`);
|
|
368
|
+
try {
|
|
369
|
+
await waitForDeploymentTransactionAndSave(pendingTransaction);
|
|
370
|
+
await deploymentStore.writeFileWithChainInfo({ chainId, genesisHash }, deploymentsFolder, environmentName, '.pending_transactions.json', JSONToString(existingPendingTansactions, 2));
|
|
371
|
+
spinner.succeed();
|
|
372
|
+
}
|
|
373
|
+
catch (e) {
|
|
374
|
+
spinner.fail();
|
|
375
|
+
throw e;
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
else {
|
|
379
|
+
const spinner = spin(`recovering execution's transaction ${pendingTransaction.transaction.hash}`);
|
|
380
|
+
try {
|
|
381
|
+
await waitForTransaction(pendingTransaction.transaction.hash);
|
|
382
|
+
await deploymentStore.writeFileWithChainInfo({ chainId, genesisHash }, deploymentsFolder, environmentName, '.pending_transactions.json', JSONToString(existingPendingTansactions, 2));
|
|
383
|
+
spinner.succeed();
|
|
384
|
+
}
|
|
385
|
+
catch (e) {
|
|
386
|
+
spinner.fail();
|
|
387
|
+
throw e;
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
await deploymentStore.deleteFile(deploymentsFolder, environmentName, '.pending_transactions.json');
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
async function savePendingTransaction(pendingTransaction) {
|
|
396
|
+
if (context.saveDeployments) {
|
|
397
|
+
let existingPendinTransactions;
|
|
398
|
+
try {
|
|
399
|
+
existingPendinTransactions = stringToJSON(await deploymentStore.readFile(deploymentsFolder, environmentName, '.pending_transactions.json'));
|
|
400
|
+
}
|
|
401
|
+
catch {
|
|
402
|
+
existingPendinTransactions = [];
|
|
403
|
+
}
|
|
404
|
+
existingPendinTransactions.push(pendingTransaction);
|
|
405
|
+
await deploymentStore.writeFileWithChainInfo({ chainId, genesisHash }, deploymentsFolder, environmentName, '.pending_transactions.json', JSONToString(existingPendinTransactions, 2));
|
|
406
|
+
}
|
|
407
|
+
return deployments;
|
|
408
|
+
}
|
|
409
|
+
async function waitForTransactionReceipt(params) {
|
|
410
|
+
const { hash, pollingInterval } = { pollingInterval: resolvedExecutionParams.pollingInterval, ...params };
|
|
411
|
+
let receipt = null;
|
|
412
|
+
try {
|
|
413
|
+
receipt = await provider.request({
|
|
414
|
+
method: 'eth_getTransactionReceipt',
|
|
415
|
+
params: [hash],
|
|
416
|
+
});
|
|
417
|
+
}
|
|
418
|
+
catch (err) { }
|
|
419
|
+
if (!receipt || !receipt.blockHash) {
|
|
420
|
+
await wait(pollingInterval);
|
|
421
|
+
return waitForTransactionReceipt(params);
|
|
422
|
+
}
|
|
423
|
+
return receipt;
|
|
424
|
+
}
|
|
425
|
+
async function deleteTransaction(hash) {
|
|
426
|
+
if (context.saveDeployments) {
|
|
427
|
+
let existingPendinTransactions;
|
|
428
|
+
try {
|
|
429
|
+
existingPendinTransactions = stringToJSON(await deploymentStore.readFile(deploymentsFolder, environmentName, '.pending_transactions.json'));
|
|
430
|
+
}
|
|
431
|
+
catch {
|
|
432
|
+
existingPendinTransactions = [];
|
|
433
|
+
}
|
|
434
|
+
existingPendinTransactions = existingPendinTransactions.filter((v) => v.transaction.hash !== hash);
|
|
435
|
+
if (existingPendinTransactions.length === 0) {
|
|
436
|
+
await deploymentStore.deleteFile(deploymentsFolder, environmentName, '.pending_transactions.json');
|
|
437
|
+
}
|
|
438
|
+
else {
|
|
439
|
+
await deploymentStore.writeFileWithChainInfo({ chainId, genesisHash }, deploymentsFolder, environmentName, '.pending_transactions.json', JSONToString(existingPendinTransactions, 2));
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
async function waitForTransaction(hash, info) {
|
|
444
|
+
const spinner = spin(info?.message
|
|
445
|
+
? info.message
|
|
446
|
+
: ` - Broadcasting tx:\n ${hash}${info?.transaction ? `\n ${displayTransaction(info?.transaction)}` : ''}`);
|
|
447
|
+
let receipt;
|
|
448
|
+
try {
|
|
449
|
+
receipt = await waitForTransactionReceipt({
|
|
450
|
+
hash,
|
|
451
|
+
});
|
|
452
|
+
}
|
|
453
|
+
catch (e) {
|
|
454
|
+
spinner.fail();
|
|
455
|
+
throw e;
|
|
456
|
+
}
|
|
457
|
+
if (!receipt) {
|
|
458
|
+
throw new Error(`receipt for ${hash} not found`);
|
|
459
|
+
}
|
|
460
|
+
else {
|
|
461
|
+
spinner.succeed();
|
|
462
|
+
}
|
|
463
|
+
return receipt;
|
|
464
|
+
}
|
|
465
|
+
async function waitForDeploymentTransactionAndSave(pendingDeployment, transaction) {
|
|
466
|
+
const nameToDisplay = pendingDeployment.name || '<no name>';
|
|
467
|
+
const message = ` - Deploying ${nameToDisplay} with tx:\n ${pendingDeployment.transaction.hash}${transaction ? `\n ${displayTransaction(transaction)}` : ''}`;
|
|
468
|
+
const receipt = await waitForTransaction(pendingDeployment.transaction.hash, {
|
|
469
|
+
message,
|
|
470
|
+
transaction,
|
|
471
|
+
});
|
|
472
|
+
// TODO we could make pendingDeployment.expectedAddress a spec for fetching address from event too
|
|
473
|
+
const contractAddress = pendingDeployment.expectedAddress || receipt.contractAddress;
|
|
474
|
+
if (!contractAddress) {
|
|
475
|
+
console.error(receipt);
|
|
476
|
+
throw new Error(`no contract address found for ${nameToDisplay}`);
|
|
477
|
+
}
|
|
478
|
+
showMessage(` => ${contractAddress}`);
|
|
479
|
+
const { abi, ...artifactObjectWithoutABI } = pendingDeployment.partialDeployment;
|
|
480
|
+
if (!pendingDeployment.transaction.nonce) {
|
|
481
|
+
// const spinner = spin(`fetching nonce for ${pendingDeployment.transaction.hash}`);
|
|
482
|
+
let transaction = null;
|
|
483
|
+
try {
|
|
484
|
+
transaction = await provider.request({
|
|
485
|
+
method: 'eth_getTransactionByHash',
|
|
486
|
+
params: [pendingDeployment.transaction.hash],
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
catch (e) {
|
|
490
|
+
// spinner.fail(`failed to get transaction, even after receipt was found`);
|
|
491
|
+
throw e;
|
|
492
|
+
}
|
|
493
|
+
if (!transaction) {
|
|
494
|
+
// spinner.fail(`tx ${pendingDeployment.transaction.hash} not found, even after receipt was found`);
|
|
495
|
+
// or : spinner.stop();
|
|
496
|
+
}
|
|
497
|
+
else {
|
|
498
|
+
// spinner.stop();
|
|
499
|
+
}
|
|
500
|
+
if (transaction) {
|
|
501
|
+
pendingDeployment.transaction = {
|
|
502
|
+
nonce: transaction.nonce,
|
|
503
|
+
hash: transaction.hash,
|
|
504
|
+
origin: transaction.from,
|
|
505
|
+
};
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
// TODO options
|
|
509
|
+
for (const key of Object.keys(artifactObjectWithoutABI)) {
|
|
510
|
+
if (key.startsWith('_')) {
|
|
511
|
+
delete artifactObjectWithoutABI[key];
|
|
512
|
+
}
|
|
513
|
+
if (key === 'evm') {
|
|
514
|
+
if (artifactObjectWithoutABI.evm) {
|
|
515
|
+
if ('gasEstimates' in artifactObjectWithoutABI['evm']) {
|
|
516
|
+
const { gasEstimates } = artifactObjectWithoutABI.evm;
|
|
517
|
+
artifactObjectWithoutABI.evm = {
|
|
518
|
+
gasEstimates,
|
|
519
|
+
};
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
const deployment = {
|
|
525
|
+
address: contractAddress,
|
|
526
|
+
abi,
|
|
527
|
+
...artifactObjectWithoutABI,
|
|
528
|
+
transaction: pendingDeployment.transaction,
|
|
529
|
+
receipt: {
|
|
530
|
+
blockHash: receipt.blockHash,
|
|
531
|
+
blockNumber: receipt.blockNumber,
|
|
532
|
+
transactionIndex: receipt.transactionIndex,
|
|
533
|
+
},
|
|
534
|
+
};
|
|
535
|
+
if (pendingDeployment.name) {
|
|
536
|
+
return save(pendingDeployment.name, deployment);
|
|
537
|
+
}
|
|
538
|
+
else {
|
|
539
|
+
return deployment;
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
async function savePendingExecution(pendingExecution) {
|
|
543
|
+
await savePendingTransaction(pendingExecution);
|
|
544
|
+
let transaction = null;
|
|
545
|
+
const spinner = spin(); // TODO spin(`fetching tx from peers ${pendingDeployment.txHash}`);
|
|
546
|
+
try {
|
|
547
|
+
transaction = await provider.request({
|
|
548
|
+
method: 'eth_getTransactionByHash',
|
|
549
|
+
params: [pendingExecution.transaction.hash],
|
|
550
|
+
});
|
|
551
|
+
}
|
|
552
|
+
catch (e) {
|
|
553
|
+
spinner.fail();
|
|
554
|
+
throw e;
|
|
555
|
+
}
|
|
556
|
+
if (!transaction) {
|
|
557
|
+
// spinner.fail(`execution tx ${pendingExecution.transaction.hash} not found in the mempool yet`);
|
|
558
|
+
spinner.stop();
|
|
559
|
+
}
|
|
560
|
+
else {
|
|
561
|
+
spinner.stop();
|
|
562
|
+
}
|
|
563
|
+
if (transaction) {
|
|
564
|
+
pendingExecution.transaction.nonce = transaction.nonce;
|
|
565
|
+
pendingExecution.transaction.origin = transaction.from;
|
|
566
|
+
}
|
|
567
|
+
const receipt = await waitForTransaction(pendingExecution.transaction.hash, { transaction });
|
|
568
|
+
await deleteTransaction(pendingExecution.transaction.hash);
|
|
569
|
+
return receipt;
|
|
570
|
+
}
|
|
571
|
+
async function savePendingDeployment(pendingDeployment) {
|
|
572
|
+
await savePendingTransaction(pendingDeployment);
|
|
573
|
+
let transaction = null;
|
|
574
|
+
const spinner = spin(); // TODO spin(`fetching tx from peers ${pendingDeployment.txHash}`);
|
|
575
|
+
try {
|
|
576
|
+
transaction = await provider.request({
|
|
577
|
+
method: 'eth_getTransactionByHash',
|
|
578
|
+
params: [pendingDeployment.transaction.hash],
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
catch (e) {
|
|
582
|
+
spinner.fail(`failed to fetch tx ${pendingDeployment.transaction.hash}. Can't know its status`);
|
|
583
|
+
throw e;
|
|
584
|
+
}
|
|
585
|
+
if (!transaction) {
|
|
586
|
+
// spinner.fail(`deployment tx ${pendingDeployment.transaction.hash} not found in the mempool yet`);
|
|
587
|
+
spinner.stop();
|
|
588
|
+
}
|
|
589
|
+
else {
|
|
590
|
+
spinner.stop();
|
|
591
|
+
}
|
|
592
|
+
if (transaction) {
|
|
593
|
+
// we update the tx data with the one we get from the network
|
|
594
|
+
pendingDeployment = {
|
|
595
|
+
...pendingDeployment,
|
|
596
|
+
transaction: { hash: transaction.hash, nonce: transaction.nonce, origin: transaction.from },
|
|
597
|
+
};
|
|
598
|
+
}
|
|
599
|
+
const deployment = await waitForDeploymentTransactionAndSave(pendingDeployment, transaction);
|
|
600
|
+
await deleteTransaction(pendingDeployment.transaction.hash);
|
|
601
|
+
return deployment;
|
|
602
|
+
}
|
|
603
|
+
function showMessage(message) {
|
|
604
|
+
log(message);
|
|
605
|
+
}
|
|
606
|
+
function showProgress(message) {
|
|
607
|
+
return spin(message);
|
|
608
|
+
}
|
|
609
|
+
let env = {
|
|
610
|
+
...perliminaryEnvironment,
|
|
611
|
+
save,
|
|
612
|
+
savePendingDeployment,
|
|
613
|
+
savePendingExecution,
|
|
614
|
+
get,
|
|
615
|
+
getOrNull,
|
|
616
|
+
fromAddressToNamedABI,
|
|
617
|
+
fromAddressToNamedABIOrNull,
|
|
618
|
+
showMessage,
|
|
619
|
+
showProgress,
|
|
620
|
+
hasMigrationBeenDone,
|
|
621
|
+
};
|
|
622
|
+
return {
|
|
623
|
+
external: env,
|
|
624
|
+
internal: {
|
|
625
|
+
recoverTransactionsIfAny,
|
|
626
|
+
recordMigration,
|
|
627
|
+
},
|
|
628
|
+
};
|
|
629
|
+
}
|
|
630
|
+
//# sourceMappingURL=index.js.map
|