olas-operate-middleware 0.1.0rc59__py3-none-any.whl → 0.13.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- olas_operate_middleware-0.13.2.dist-info/METADATA +75 -0
- olas_operate_middleware-0.13.2.dist-info/RECORD +101 -0
- {olas_operate_middleware-0.1.0rc59.dist-info → olas_operate_middleware-0.13.2.dist-info}/WHEEL +1 -1
- operate/__init__.py +17 -0
- operate/account/user.py +35 -9
- operate/bridge/bridge_manager.py +470 -0
- operate/bridge/providers/lifi_provider.py +377 -0
- operate/bridge/providers/native_bridge_provider.py +677 -0
- operate/bridge/providers/provider.py +469 -0
- operate/bridge/providers/relay_provider.py +457 -0
- operate/cli.py +1565 -417
- operate/constants.py +60 -12
- operate/data/README.md +19 -0
- operate/data/contracts/{service_staking_token → dual_staking_token}/__init__.py +2 -2
- operate/data/contracts/dual_staking_token/build/DualStakingToken.json +443 -0
- operate/data/contracts/dual_staking_token/contract.py +132 -0
- operate/data/contracts/dual_staking_token/contract.yaml +23 -0
- operate/{ledger/base.py → data/contracts/foreign_omnibridge/__init__.py} +2 -19
- operate/data/contracts/foreign_omnibridge/build/ForeignOmnibridge.json +1372 -0
- operate/data/contracts/foreign_omnibridge/contract.py +130 -0
- operate/data/contracts/foreign_omnibridge/contract.yaml +23 -0
- operate/{ledger/solana.py → data/contracts/home_omnibridge/__init__.py} +2 -20
- operate/data/contracts/home_omnibridge/build/HomeOmnibridge.json +1421 -0
- operate/data/contracts/home_omnibridge/contract.py +80 -0
- operate/data/contracts/home_omnibridge/contract.yaml +23 -0
- operate/data/contracts/l1_standard_bridge/__init__.py +20 -0
- operate/data/contracts/l1_standard_bridge/build/L1StandardBridge.json +831 -0
- operate/data/contracts/l1_standard_bridge/contract.py +158 -0
- operate/data/contracts/l1_standard_bridge/contract.yaml +23 -0
- operate/data/contracts/l2_standard_bridge/__init__.py +20 -0
- operate/data/contracts/l2_standard_bridge/build/L2StandardBridge.json +626 -0
- operate/data/contracts/l2_standard_bridge/contract.py +130 -0
- operate/data/contracts/l2_standard_bridge/contract.yaml +23 -0
- operate/data/contracts/mech_activity/__init__.py +20 -0
- operate/data/contracts/mech_activity/build/MechActivity.json +111 -0
- operate/data/contracts/mech_activity/contract.py +44 -0
- operate/data/contracts/mech_activity/contract.yaml +23 -0
- operate/data/contracts/optimism_mintable_erc20/__init__.py +20 -0
- operate/data/contracts/optimism_mintable_erc20/build/OptimismMintableERC20.json +491 -0
- operate/data/contracts/optimism_mintable_erc20/contract.py +45 -0
- operate/data/contracts/optimism_mintable_erc20/contract.yaml +23 -0
- operate/data/contracts/recovery_module/__init__.py +20 -0
- operate/data/contracts/recovery_module/build/RecoveryModule.json +811 -0
- operate/data/contracts/recovery_module/contract.py +61 -0
- operate/data/contracts/recovery_module/contract.yaml +23 -0
- operate/data/contracts/requester_activity_checker/__init__.py +20 -0
- operate/data/contracts/requester_activity_checker/build/RequesterActivityChecker.json +111 -0
- operate/data/contracts/requester_activity_checker/contract.py +33 -0
- operate/data/contracts/requester_activity_checker/contract.yaml +23 -0
- operate/data/contracts/staking_token/__init__.py +20 -0
- operate/data/contracts/staking_token/build/StakingToken.json +1336 -0
- operate/data/contracts/{service_staking_token → staking_token}/contract.py +27 -13
- operate/data/contracts/staking_token/contract.yaml +23 -0
- operate/data/contracts/uniswap_v2_erc20/contract.yaml +3 -1
- operate/data/contracts/uniswap_v2_erc20/tests/__init__.py +20 -0
- operate/data/contracts/uniswap_v2_erc20/tests/test_contract.py +363 -0
- operate/keys.py +118 -33
- operate/ledger/__init__.py +159 -56
- operate/ledger/profiles.py +321 -18
- operate/migration.py +555 -0
- operate/{http → operate_http}/__init__.py +3 -2
- operate/{http → operate_http}/exceptions.py +6 -4
- operate/operate_types.py +544 -0
- operate/pearl.py +13 -1
- operate/quickstart/analyse_logs.py +118 -0
- operate/quickstart/claim_staking_rewards.py +104 -0
- operate/quickstart/reset_configs.py +106 -0
- operate/quickstart/reset_password.py +70 -0
- operate/quickstart/reset_staking.py +145 -0
- operate/quickstart/run_service.py +726 -0
- operate/quickstart/stop_service.py +72 -0
- operate/quickstart/terminate_on_chain_service.py +83 -0
- operate/quickstart/utils.py +298 -0
- operate/resource.py +62 -3
- operate/services/agent_runner.py +202 -0
- operate/services/deployment_runner.py +868 -0
- operate/services/funding_manager.py +929 -0
- operate/services/health_checker.py +280 -0
- operate/services/manage.py +2356 -620
- operate/services/protocol.py +1246 -340
- operate/services/service.py +756 -391
- operate/services/utils/mech.py +103 -0
- operate/services/utils/tendermint.py +86 -12
- operate/settings.py +70 -0
- operate/utils/__init__.py +135 -0
- operate/utils/gnosis.py +407 -80
- operate/utils/single_instance.py +226 -0
- operate/utils/ssl.py +133 -0
- operate/wallet/master.py +708 -123
- operate/wallet/wallet_recovery_manager.py +507 -0
- olas_operate_middleware-0.1.0rc59.dist-info/METADATA +0 -304
- olas_operate_middleware-0.1.0rc59.dist-info/RECORD +0 -41
- operate/data/contracts/service_staking_token/build/ServiceStakingToken.json +0 -1273
- operate/data/contracts/service_staking_token/contract.yaml +0 -23
- operate/ledger/ethereum.py +0 -48
- operate/types.py +0 -260
- {olas_operate_middleware-0.1.0rc59.dist-info → olas_operate_middleware-0.13.2.dist-info}/entry_points.txt +0 -0
- {olas_operate_middleware-0.1.0rc59.dist-info → olas_operate_middleware-0.13.2.dist-info/licenses}/LICENSE +0 -0
operate/migration.py
ADDED
|
@@ -0,0 +1,555 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# ------------------------------------------------------------------------------
|
|
3
|
+
#
|
|
4
|
+
# Copyright 2025 Valory AG
|
|
5
|
+
#
|
|
6
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
7
|
+
# you may not use this file except in compliance with the License.
|
|
8
|
+
# You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
13
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
14
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
15
|
+
# See the License for the specific language governing permissions and
|
|
16
|
+
# limitations under the License.
|
|
17
|
+
#
|
|
18
|
+
# ------------------------------------------------------------------------------
|
|
19
|
+
|
|
20
|
+
"""Utilities for format migration"""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
import json
|
|
24
|
+
import logging
|
|
25
|
+
import shutil
|
|
26
|
+
import traceback
|
|
27
|
+
from pathlib import Path
|
|
28
|
+
from time import time
|
|
29
|
+
|
|
30
|
+
from aea_cli_ipfs.ipfs_utils import IPFSTool
|
|
31
|
+
from aea_ledger_ethereum import EthereumCrypto
|
|
32
|
+
from web3 import Web3
|
|
33
|
+
|
|
34
|
+
from operate.constants import USER_JSON, ZERO_ADDRESS
|
|
35
|
+
from operate.keys import KeysManager
|
|
36
|
+
from operate.operate_types import AgentRelease as AgentReleaseType
|
|
37
|
+
from operate.operate_types import AgentReleaseRepo, Chain, LedgerType
|
|
38
|
+
from operate.services.agent_runner import AgentRelease
|
|
39
|
+
from operate.services.manage import ServiceManager
|
|
40
|
+
from operate.services.service import (
|
|
41
|
+
NON_EXISTENT_MULTISIG,
|
|
42
|
+
SERVICE_CONFIG_PREFIX,
|
|
43
|
+
SERVICE_CONFIG_VERSION,
|
|
44
|
+
Service,
|
|
45
|
+
)
|
|
46
|
+
from operate.utils import create_backup, unrecoverable_delete
|
|
47
|
+
from operate.wallet.master import LEDGER_TYPE_TO_WALLET_CLASS, MasterWalletManager
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
DEFAULT_TRADER_ENV_VARS = {
|
|
51
|
+
"GNOSIS_LEDGER_RPC": {
|
|
52
|
+
"name": "Gnosis ledger RPC",
|
|
53
|
+
"description": "",
|
|
54
|
+
"value": "",
|
|
55
|
+
"provision_type": "computed",
|
|
56
|
+
},
|
|
57
|
+
"STAKING_CONTRACT_ADDRESS": {
|
|
58
|
+
"name": "Staking contract address",
|
|
59
|
+
"description": "",
|
|
60
|
+
"value": "",
|
|
61
|
+
"provision_type": "computed",
|
|
62
|
+
},
|
|
63
|
+
"MECH_MARKETPLACE_CONFIG": {
|
|
64
|
+
"name": "Mech marketplace configuration",
|
|
65
|
+
"description": "",
|
|
66
|
+
"value": "",
|
|
67
|
+
"provision_type": "computed",
|
|
68
|
+
},
|
|
69
|
+
"MECH_ACTIVITY_CHECKER_CONTRACT": {
|
|
70
|
+
"name": "Mech activity checker contract",
|
|
71
|
+
"description": "",
|
|
72
|
+
"value": "",
|
|
73
|
+
"provision_type": "computed",
|
|
74
|
+
},
|
|
75
|
+
"MECH_CONTRACT_ADDRESS": {
|
|
76
|
+
"name": "Mech contract address",
|
|
77
|
+
"description": "",
|
|
78
|
+
"value": "",
|
|
79
|
+
"provision_type": "computed",
|
|
80
|
+
},
|
|
81
|
+
"MECH_REQUEST_PRICE": {
|
|
82
|
+
"name": "Mech request price",
|
|
83
|
+
"description": "",
|
|
84
|
+
"value": "10000000000000000",
|
|
85
|
+
"provision_type": "computed",
|
|
86
|
+
},
|
|
87
|
+
"USE_MECH_MARKETPLACE": {
|
|
88
|
+
"name": "Use Mech marketplace",
|
|
89
|
+
"description": "",
|
|
90
|
+
"value": "False",
|
|
91
|
+
"provision_type": "computed",
|
|
92
|
+
},
|
|
93
|
+
"REQUESTER_STAKING_INSTANCE_ADDRESS": {
|
|
94
|
+
"name": "Requester staking instance address",
|
|
95
|
+
"description": "",
|
|
96
|
+
"value": "",
|
|
97
|
+
"provision_type": "computed",
|
|
98
|
+
},
|
|
99
|
+
"PRIORITY_MECH_ADDRESS": {
|
|
100
|
+
"name": "Priority Mech address",
|
|
101
|
+
"description": "",
|
|
102
|
+
"value": "",
|
|
103
|
+
"provision_type": "computed",
|
|
104
|
+
},
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class MigrationManager:
|
|
109
|
+
"""MigrationManager"""
|
|
110
|
+
|
|
111
|
+
def __init__(
|
|
112
|
+
self,
|
|
113
|
+
home: Path,
|
|
114
|
+
logger: logging.Logger,
|
|
115
|
+
) -> None:
|
|
116
|
+
"""Initialize object."""
|
|
117
|
+
super().__init__()
|
|
118
|
+
self._path = home
|
|
119
|
+
self.logger = logger
|
|
120
|
+
|
|
121
|
+
def log_directories(self, path: Path) -> None:
|
|
122
|
+
"""Log directories present in `path`."""
|
|
123
|
+
directories = [f" - {str(p)}" for p in path.iterdir() if p.is_dir()]
|
|
124
|
+
directories_str = "\n".join(directories)
|
|
125
|
+
self.logger.info(f"Directories in {path}:\n{directories_str}")
|
|
126
|
+
|
|
127
|
+
def migrate_user_account(self) -> None:
|
|
128
|
+
"""Migrates user.json"""
|
|
129
|
+
|
|
130
|
+
path = self._path / USER_JSON
|
|
131
|
+
if not path.exists():
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
135
|
+
data = json.load(f)
|
|
136
|
+
|
|
137
|
+
if "password_sha" not in data:
|
|
138
|
+
return
|
|
139
|
+
|
|
140
|
+
create_backup(path)
|
|
141
|
+
new_data = {"password_hash": data["password_sha"]}
|
|
142
|
+
with open(path, "w", encoding="utf-8") as f:
|
|
143
|
+
json.dump(new_data, f, indent=4)
|
|
144
|
+
|
|
145
|
+
self.logger.info("[MIGRATION MANAGER] Migrated user.json.")
|
|
146
|
+
|
|
147
|
+
def migrate_wallets(self, wallet_manager: MasterWalletManager) -> None:
|
|
148
|
+
"""Migrate old wallet config formats to new ones, if applies."""
|
|
149
|
+
self.logger.info("Migrating wallet configs...")
|
|
150
|
+
|
|
151
|
+
for ledger_type in LedgerType:
|
|
152
|
+
if not wallet_manager.exists(ledger_type=ledger_type):
|
|
153
|
+
continue
|
|
154
|
+
|
|
155
|
+
wallet_class = LEDGER_TYPE_TO_WALLET_CLASS.get(ledger_type)
|
|
156
|
+
if wallet_class is None:
|
|
157
|
+
continue
|
|
158
|
+
|
|
159
|
+
migrated = wallet_class.migrate_format(path=wallet_manager.path)
|
|
160
|
+
if migrated:
|
|
161
|
+
self.logger.info(f"Wallet {wallet_class} has been migrated.")
|
|
162
|
+
|
|
163
|
+
self.logger.info("Migrating wallet configs done.")
|
|
164
|
+
|
|
165
|
+
def _migrate_service( # pylint: disable=too-many-statements,too-many-locals,too-many-branches
|
|
166
|
+
self,
|
|
167
|
+
path: Path,
|
|
168
|
+
) -> bool:
|
|
169
|
+
"""Migrate the JSON file format if needed."""
|
|
170
|
+
|
|
171
|
+
if not path.is_dir():
|
|
172
|
+
self.logger.warning(f"Service config path {path} is not a directory.")
|
|
173
|
+
return False
|
|
174
|
+
|
|
175
|
+
if not path.name.startswith(SERVICE_CONFIG_PREFIX) and not path.name.startswith(
|
|
176
|
+
"bafybei"
|
|
177
|
+
):
|
|
178
|
+
self.logger.warning(
|
|
179
|
+
f"Service config path {path} is not a valid service config."
|
|
180
|
+
)
|
|
181
|
+
return False
|
|
182
|
+
|
|
183
|
+
if path.name.startswith("bafybei"):
|
|
184
|
+
backup_name = f"backup_{int(time())}_{path.name}"
|
|
185
|
+
backup_path = path.parent / backup_name
|
|
186
|
+
shutil.copytree(path, backup_path)
|
|
187
|
+
deployment_path = backup_path / "deployment"
|
|
188
|
+
if deployment_path.is_dir():
|
|
189
|
+
shutil.rmtree(deployment_path)
|
|
190
|
+
|
|
191
|
+
with open(
|
|
192
|
+
path / Service._file, # pylint: disable=protected-access
|
|
193
|
+
"r",
|
|
194
|
+
encoding="utf-8",
|
|
195
|
+
) as file:
|
|
196
|
+
data = json.load(file)
|
|
197
|
+
|
|
198
|
+
version = data.get("version", 0)
|
|
199
|
+
if version > SERVICE_CONFIG_VERSION:
|
|
200
|
+
raise RuntimeError(
|
|
201
|
+
f"Service configuration in {path} has version {version}, which means it was created with a newer version of olas-operate-middleware. Only configuration versions <= {SERVICE_CONFIG_VERSION} are supported by this version of olas-operate-middleware."
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
# Complete missing env vars for trader
|
|
205
|
+
if "trader" in data["name"].lower():
|
|
206
|
+
data.setdefault("env_variables", {})
|
|
207
|
+
|
|
208
|
+
for key, value in DEFAULT_TRADER_ENV_VARS.items():
|
|
209
|
+
if key not in data["env_variables"]:
|
|
210
|
+
data["env_variables"][key] = value
|
|
211
|
+
|
|
212
|
+
with open(
|
|
213
|
+
path / Service._file, # pylint: disable=protected-access
|
|
214
|
+
"w",
|
|
215
|
+
encoding="utf-8",
|
|
216
|
+
) as file:
|
|
217
|
+
json.dump(data, file, indent=2)
|
|
218
|
+
|
|
219
|
+
if version == SERVICE_CONFIG_VERSION:
|
|
220
|
+
return False
|
|
221
|
+
|
|
222
|
+
self.logger.info(
|
|
223
|
+
f"Migrating service config in {path} from version {version} to {SERVICE_CONFIG_VERSION}..."
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
# Migration steps for older versions
|
|
227
|
+
if version == 0:
|
|
228
|
+
new_data = {
|
|
229
|
+
"version": 2,
|
|
230
|
+
"hash": data.get("hash"),
|
|
231
|
+
"keys": data.get("keys"),
|
|
232
|
+
"home_chain_id": "100", # This is the default value for version 2 - do not change, will be corrected below
|
|
233
|
+
"chain_configs": {
|
|
234
|
+
"100": { # This is the default value for version 2 - do not change, will be corrected below
|
|
235
|
+
"ledger_config": {
|
|
236
|
+
"rpc": data.get("ledger_config", {}).get("rpc"),
|
|
237
|
+
"type": data.get("ledger_config", {}).get("type"),
|
|
238
|
+
"chain": data.get("ledger_config", {}).get("chain"),
|
|
239
|
+
},
|
|
240
|
+
"chain_data": {
|
|
241
|
+
"instances": data.get("chain_data", {}).get(
|
|
242
|
+
"instances", []
|
|
243
|
+
),
|
|
244
|
+
"token": data.get("chain_data", {}).get("token"),
|
|
245
|
+
"multisig": data.get("chain_data", {}).get("multisig"),
|
|
246
|
+
"staked": data.get("chain_data", {}).get("staked", False),
|
|
247
|
+
"on_chain_state": data.get("chain_data", {}).get(
|
|
248
|
+
"on_chain_state", 3
|
|
249
|
+
),
|
|
250
|
+
"user_params": {
|
|
251
|
+
"staking_program_id": "pearl_alpha",
|
|
252
|
+
"nft": data.get("chain_data", {})
|
|
253
|
+
.get("user_params", {})
|
|
254
|
+
.get("nft"),
|
|
255
|
+
"cost_of_bond": data.get("chain_data", {})
|
|
256
|
+
.get("user_params", {})
|
|
257
|
+
.get("cost_of_bond"),
|
|
258
|
+
"fund_requirements": data.get("chain_data", {})
|
|
259
|
+
.get("user_params", {})
|
|
260
|
+
.get("fund_requirements", {}),
|
|
261
|
+
"agent_id": data.get("chain_data", {})
|
|
262
|
+
.get("user_params", {})
|
|
263
|
+
.get("agent_id", "14"),
|
|
264
|
+
},
|
|
265
|
+
},
|
|
266
|
+
}
|
|
267
|
+
},
|
|
268
|
+
"service_path": data.get("service_path", ""),
|
|
269
|
+
"name": data.get("name", ""),
|
|
270
|
+
}
|
|
271
|
+
data = new_data
|
|
272
|
+
|
|
273
|
+
if version < 4:
|
|
274
|
+
# Add missing fields introduced in later versions, if necessary.
|
|
275
|
+
for _, chain_data in data.get("chain_configs", {}).items():
|
|
276
|
+
service_name = data.get("name", "")
|
|
277
|
+
agent_id = Service.determine_agent_id(service_name)
|
|
278
|
+
chain_data.setdefault("chain_data", {}).setdefault("user_params", {})[
|
|
279
|
+
"agent_id"
|
|
280
|
+
] = agent_id
|
|
281
|
+
|
|
282
|
+
data["description"] = data.setdefault("description", data.get("name"))
|
|
283
|
+
data["hash_history"] = data.setdefault(
|
|
284
|
+
"hash_history", {int(time()): data["hash"]}
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
if "service_config_id" not in data:
|
|
288
|
+
service_config_id = Service.get_new_service_config_id(path)
|
|
289
|
+
new_path = path.parent / service_config_id
|
|
290
|
+
data["service_config_id"] = service_config_id
|
|
291
|
+
path = path.rename(new_path)
|
|
292
|
+
|
|
293
|
+
old_to_new_ledgers = ["ethereum", "solana"]
|
|
294
|
+
for key_data in data["keys"]:
|
|
295
|
+
key_data["ledger"] = old_to_new_ledgers[key_data["ledger"]]
|
|
296
|
+
|
|
297
|
+
old_to_new_chains = [
|
|
298
|
+
"ethereum",
|
|
299
|
+
"goerli",
|
|
300
|
+
"gnosis",
|
|
301
|
+
"solana",
|
|
302
|
+
"optimism",
|
|
303
|
+
"base",
|
|
304
|
+
"mode",
|
|
305
|
+
]
|
|
306
|
+
new_chain_configs = {}
|
|
307
|
+
for chain_id, chain_data in data["chain_configs"].items():
|
|
308
|
+
chain_data["ledger_config"]["chain"] = old_to_new_chains[
|
|
309
|
+
chain_data["ledger_config"]["chain"]
|
|
310
|
+
]
|
|
311
|
+
del chain_data["ledger_config"]["type"]
|
|
312
|
+
new_chain_configs[Chain.from_id(int(chain_id)).value] = chain_data # type: ignore
|
|
313
|
+
|
|
314
|
+
data["chain_configs"] = new_chain_configs
|
|
315
|
+
data["home_chain"] = data.setdefault("home_chain", Chain.from_id(int(data.get("home_chain_id", "100"))).value) # type: ignore
|
|
316
|
+
del data["home_chain_id"]
|
|
317
|
+
|
|
318
|
+
if "env_variables" not in data:
|
|
319
|
+
if data["name"] == "valory/trader_pearl":
|
|
320
|
+
data["env_variables"] = DEFAULT_TRADER_ENV_VARS
|
|
321
|
+
else:
|
|
322
|
+
data["env_variables"] = {}
|
|
323
|
+
|
|
324
|
+
if version < 5:
|
|
325
|
+
new_chain_configs = {}
|
|
326
|
+
for chain, chain_data in data["chain_configs"].items():
|
|
327
|
+
fund_requirements = chain_data["chain_data"]["user_params"][
|
|
328
|
+
"fund_requirements"
|
|
329
|
+
]
|
|
330
|
+
if ZERO_ADDRESS not in fund_requirements:
|
|
331
|
+
chain_data["chain_data"]["user_params"]["fund_requirements"] = {
|
|
332
|
+
ZERO_ADDRESS: fund_requirements
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
new_chain_configs[chain] = chain_data # type: ignore
|
|
336
|
+
data["chain_configs"] = new_chain_configs
|
|
337
|
+
|
|
338
|
+
if version < 6 and "service_path" in data:
|
|
339
|
+
# Redownload service path
|
|
340
|
+
package_absolute_path = path / Path(data["service_path"]).name
|
|
341
|
+
data.pop("service_path")
|
|
342
|
+
data["package_path"] = str(package_absolute_path.name)
|
|
343
|
+
|
|
344
|
+
if version < 7:
|
|
345
|
+
for _, chain_data in data.get("chain_configs", {}).items():
|
|
346
|
+
if chain_data["chain_data"]["multisig"] == "0xm":
|
|
347
|
+
chain_data["chain_data"]["multisig"] = NON_EXISTENT_MULTISIG
|
|
348
|
+
|
|
349
|
+
data["agent_addresses"] = [key["address"] for key in data["keys"]]
|
|
350
|
+
del data["keys"]
|
|
351
|
+
|
|
352
|
+
if version < 8:
|
|
353
|
+
for _, chain_data in data.get("chain_configs", {}).items():
|
|
354
|
+
if chain_data["chain_data"]["multisig"] == "0xm":
|
|
355
|
+
chain_data["chain_data"]["multisig"] = NON_EXISTENT_MULTISIG
|
|
356
|
+
|
|
357
|
+
if "keys" in data:
|
|
358
|
+
data["agent_addresses"] = [key["address"] for key in data["keys"]]
|
|
359
|
+
del data["keys"]
|
|
360
|
+
|
|
361
|
+
if data["home_chain"] == "optimistic":
|
|
362
|
+
data["home_chain"] = Chain.OPTIMISM.value
|
|
363
|
+
|
|
364
|
+
if "optimistic" in data["chain_configs"]:
|
|
365
|
+
data["chain_configs"]["optimism"] = data["chain_configs"].pop(
|
|
366
|
+
"optimistic"
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
for _, chain_config in data["chain_configs"].items():
|
|
370
|
+
if chain_config["ledger_config"]["chain"] == "optimistic":
|
|
371
|
+
chain_config["ledger_config"]["chain"] = Chain.OPTIMISM.value
|
|
372
|
+
|
|
373
|
+
if version < 9:
|
|
374
|
+
agents_supported = {
|
|
375
|
+
"trader_pearl": AgentRelease(
|
|
376
|
+
is_aea=True, owner="valory-xyz", repo="trader", release="v0.0.101"
|
|
377
|
+
),
|
|
378
|
+
"optimus": AgentRelease(
|
|
379
|
+
is_aea=True, owner="valory-xyz", repo="optimus", release="v0.0.103"
|
|
380
|
+
),
|
|
381
|
+
"memeooorr": AgentRelease(
|
|
382
|
+
is_aea=True,
|
|
383
|
+
owner="valory-xyz",
|
|
384
|
+
repo="meme-ooorr",
|
|
385
|
+
release="v0.0.101",
|
|
386
|
+
),
|
|
387
|
+
}
|
|
388
|
+
package_path = data["package_path"]
|
|
389
|
+
try:
|
|
390
|
+
release_data = agents_supported[package_path]
|
|
391
|
+
except KeyError as e:
|
|
392
|
+
raise RuntimeError(f"Found unsupported {package_path=}") from e
|
|
393
|
+
|
|
394
|
+
data["agent_release"] = AgentReleaseType(
|
|
395
|
+
is_aea=release_data.is_aea,
|
|
396
|
+
repository=AgentReleaseRepo(
|
|
397
|
+
owner=release_data.owner,
|
|
398
|
+
name=release_data.repo,
|
|
399
|
+
version=release_data.release,
|
|
400
|
+
),
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
if data["name"] is None:
|
|
404
|
+
data["name"] = release_data.repo
|
|
405
|
+
|
|
406
|
+
data["version"] = SERVICE_CONFIG_VERSION
|
|
407
|
+
|
|
408
|
+
# Redownload service path
|
|
409
|
+
if "service_path" in data:
|
|
410
|
+
package_absolute_path = path / Path(data["service_path"]).name
|
|
411
|
+
data.pop("service_path")
|
|
412
|
+
else:
|
|
413
|
+
package_absolute_path = path / data["package_path"]
|
|
414
|
+
|
|
415
|
+
if package_absolute_path.exists() and package_absolute_path.is_dir():
|
|
416
|
+
shutil.rmtree(package_absolute_path)
|
|
417
|
+
|
|
418
|
+
package_absolute_path = Path(
|
|
419
|
+
IPFSTool().download(
|
|
420
|
+
hash_id=data["hash"],
|
|
421
|
+
target_dir=path,
|
|
422
|
+
)
|
|
423
|
+
)
|
|
424
|
+
data["package_path"] = str(package_absolute_path.name)
|
|
425
|
+
|
|
426
|
+
with open(
|
|
427
|
+
path / Service._file, # pylint: disable=protected-access
|
|
428
|
+
"w",
|
|
429
|
+
encoding="utf-8",
|
|
430
|
+
) as file:
|
|
431
|
+
json.dump(data, file, indent=2)
|
|
432
|
+
|
|
433
|
+
return True
|
|
434
|
+
|
|
435
|
+
def migrate_services(self, service_manager: ServiceManager) -> None:
|
|
436
|
+
"""Migrate old service config formats to new ones, if applies."""
|
|
437
|
+
self.log_directories(service_manager.path)
|
|
438
|
+
self.logger.info("Migrating service configs...")
|
|
439
|
+
|
|
440
|
+
bafybei_count = sum(
|
|
441
|
+
1
|
|
442
|
+
for path in service_manager.path.iterdir()
|
|
443
|
+
if path.name.startswith("bafybei")
|
|
444
|
+
)
|
|
445
|
+
if bafybei_count > 1:
|
|
446
|
+
raise RuntimeError(
|
|
447
|
+
f"Your services folder contains {bafybei_count} folders starting with 'bafybei'. This is an unintended situation. Please contact support."
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
paths = list(service_manager.path.iterdir())
|
|
451
|
+
for path in paths:
|
|
452
|
+
try:
|
|
453
|
+
migrated = self._migrate_service(path)
|
|
454
|
+
if migrated:
|
|
455
|
+
self.logger.info(f"Folder {str(path)} has been migrated.")
|
|
456
|
+
except Exception as e: # pylint: disable=broad-except
|
|
457
|
+
self.logger.error(
|
|
458
|
+
f"Failed to migrate service: {path.name}. Exception {e}: {traceback.format_exc()}"
|
|
459
|
+
)
|
|
460
|
+
|
|
461
|
+
self.logger.info("Migrating service configs done.")
|
|
462
|
+
self.log_directories(service_manager.path)
|
|
463
|
+
|
|
464
|
+
def migrate_qs_configs(self) -> None:
|
|
465
|
+
"""Migrates quickstart configs."""
|
|
466
|
+
|
|
467
|
+
for qs_config in self._path.glob("*-quickstart-config.json"):
|
|
468
|
+
if not qs_config.exists():
|
|
469
|
+
continue
|
|
470
|
+
|
|
471
|
+
migrated = False
|
|
472
|
+
with open(qs_config, "r", encoding="utf-8") as f:
|
|
473
|
+
data = json.load(f)
|
|
474
|
+
|
|
475
|
+
if "optimistic" in data.get("rpc", {}):
|
|
476
|
+
data["rpc"]["optimism"] = data["rpc"].pop("optimistic")
|
|
477
|
+
migrated = True
|
|
478
|
+
|
|
479
|
+
if "optimistic" == data.get("principal_chain", ""):
|
|
480
|
+
data["principal_chain"] = "optimism"
|
|
481
|
+
migrated = True
|
|
482
|
+
|
|
483
|
+
if not migrated:
|
|
484
|
+
continue
|
|
485
|
+
|
|
486
|
+
with open(qs_config, "w", encoding="utf-8") as f:
|
|
487
|
+
json.dump(data, f, indent=2)
|
|
488
|
+
|
|
489
|
+
self.logger.info(
|
|
490
|
+
"[MIGRATION MANAGER] Migrated quickstart config: %s.", qs_config.name
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
def migrate_keys(self, keys_manager: KeysManager) -> None:
|
|
494
|
+
"""Migrate keys format if needed."""
|
|
495
|
+
self.logger.info("Migrating keys...")
|
|
496
|
+
|
|
497
|
+
for key_file in keys_manager.path.iterdir():
|
|
498
|
+
if (
|
|
499
|
+
not key_file.is_file()
|
|
500
|
+
or key_file.suffix == ".bak"
|
|
501
|
+
or not Web3.is_address(key_file.name)
|
|
502
|
+
):
|
|
503
|
+
if not key_file.suffix == ".bak":
|
|
504
|
+
self.logger.warning(f"Skipping non-key file: {key_file}")
|
|
505
|
+
|
|
506
|
+
continue
|
|
507
|
+
|
|
508
|
+
migrated = False
|
|
509
|
+
backup_path = key_file.with_suffix(".bak")
|
|
510
|
+
|
|
511
|
+
try:
|
|
512
|
+
with open(key_file, "r", encoding="utf-8") as file:
|
|
513
|
+
data = json.load(file)
|
|
514
|
+
except Exception as e: # pylint: disable=broad-except
|
|
515
|
+
self.logger.error(
|
|
516
|
+
f"Failed to read key file: {key_file}\n"
|
|
517
|
+
f"Key file content:\n{key_file.read_text(encoding='utf-8')}\n"
|
|
518
|
+
f"Exception {e}: {traceback.format_exc()}"
|
|
519
|
+
)
|
|
520
|
+
raise e
|
|
521
|
+
|
|
522
|
+
old_to_new_ledgers = {0: "ethereum", 1: "solana"}
|
|
523
|
+
if data.get("ledger") in old_to_new_ledgers:
|
|
524
|
+
data["ledger"] = old_to_new_ledgers.get(data["ledger"])
|
|
525
|
+
with open(key_file, "w", encoding="utf-8") as file:
|
|
526
|
+
json.dump(data, file, indent=2)
|
|
527
|
+
|
|
528
|
+
migrated = True
|
|
529
|
+
|
|
530
|
+
private_key = data.get("private_key")
|
|
531
|
+
if (
|
|
532
|
+
private_key
|
|
533
|
+
and keys_manager.password is not None
|
|
534
|
+
and private_key.startswith("0x")
|
|
535
|
+
):
|
|
536
|
+
crypto: EthereumCrypto = keys_manager.private_key_to_crypto(
|
|
537
|
+
private_key=private_key,
|
|
538
|
+
password=None,
|
|
539
|
+
)
|
|
540
|
+
encrypted_private_key = crypto.encrypt(password=keys_manager.password)
|
|
541
|
+
data["private_key"] = encrypted_private_key
|
|
542
|
+
if backup_path.exists():
|
|
543
|
+
unrecoverable_delete(backup_path)
|
|
544
|
+
|
|
545
|
+
migrated = True
|
|
546
|
+
|
|
547
|
+
if migrated:
|
|
548
|
+
with open(key_file, "w", encoding="utf-8") as file:
|
|
549
|
+
json.dump(data, file, indent=2)
|
|
550
|
+
|
|
551
|
+
if not backup_path.exists():
|
|
552
|
+
shutil.copyfile(key_file, backup_path)
|
|
553
|
+
|
|
554
|
+
if migrated:
|
|
555
|
+
self.logger.info(f"Key {key_file.name} has been migrated.")
|
|
@@ -23,12 +23,13 @@ import json
|
|
|
23
23
|
import traceback
|
|
24
24
|
import typing as t
|
|
25
25
|
from abc import ABC
|
|
26
|
+
from http import HTTPStatus
|
|
26
27
|
|
|
27
28
|
from starlette.requests import Request
|
|
28
29
|
from starlette.responses import JSONResponse
|
|
29
30
|
from starlette.types import Receive, Scope, Send
|
|
30
31
|
|
|
31
|
-
from operate.
|
|
32
|
+
from operate.operate_http.exceptions import NotAllowed, ResourceException
|
|
32
33
|
|
|
33
34
|
|
|
34
35
|
# pylint: disable=no-self-use
|
|
@@ -142,7 +143,7 @@ class Resource(
|
|
|
142
143
|
tb = traceback.format_exc()
|
|
143
144
|
response = JSONResponse(
|
|
144
145
|
content={"error": str(e), "traceback": tb},
|
|
145
|
-
status_code=
|
|
146
|
+
status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
|
|
146
147
|
)
|
|
147
148
|
print(tb)
|
|
148
149
|
await response(scope=scope, receive=receive, send=send)
|
|
@@ -19,6 +19,8 @@
|
|
|
19
19
|
|
|
20
20
|
"""Exceptions."""
|
|
21
21
|
|
|
22
|
+
from http import HTTPStatus
|
|
23
|
+
|
|
22
24
|
|
|
23
25
|
class ResourceException(Exception):
|
|
24
26
|
"""Base resource exceptio."""
|
|
@@ -29,22 +31,22 @@ class ResourceException(Exception):
|
|
|
29
31
|
class BadRequest(ResourceException):
|
|
30
32
|
"""Bad request error."""
|
|
31
33
|
|
|
32
|
-
code =
|
|
34
|
+
code = HTTPStatus.BAD_REQUEST
|
|
33
35
|
|
|
34
36
|
|
|
35
37
|
class ResourceAlreadyExists(ResourceException):
|
|
36
38
|
"""Bad request error."""
|
|
37
39
|
|
|
38
|
-
code =
|
|
40
|
+
code = HTTPStatus.CONFLICT
|
|
39
41
|
|
|
40
42
|
|
|
41
43
|
class NotFound(ResourceException):
|
|
42
44
|
"""Not found error."""
|
|
43
45
|
|
|
44
|
-
code =
|
|
46
|
+
code = HTTPStatus.NOT_FOUND
|
|
45
47
|
|
|
46
48
|
|
|
47
49
|
class NotAllowed(ResourceException):
|
|
48
50
|
"""Not allowed error."""
|
|
49
51
|
|
|
50
|
-
code =
|
|
52
|
+
code = HTTPStatus.METHOD_NOT_ALLOWED
|