olas-operate-middleware 0.1.0rc59__py3-none-any.whl → 0.13.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- olas_operate_middleware-0.13.2.dist-info/METADATA +75 -0
- olas_operate_middleware-0.13.2.dist-info/RECORD +101 -0
- {olas_operate_middleware-0.1.0rc59.dist-info → olas_operate_middleware-0.13.2.dist-info}/WHEEL +1 -1
- operate/__init__.py +17 -0
- operate/account/user.py +35 -9
- operate/bridge/bridge_manager.py +470 -0
- operate/bridge/providers/lifi_provider.py +377 -0
- operate/bridge/providers/native_bridge_provider.py +677 -0
- operate/bridge/providers/provider.py +469 -0
- operate/bridge/providers/relay_provider.py +457 -0
- operate/cli.py +1565 -417
- operate/constants.py +60 -12
- operate/data/README.md +19 -0
- operate/data/contracts/{service_staking_token → dual_staking_token}/__init__.py +2 -2
- operate/data/contracts/dual_staking_token/build/DualStakingToken.json +443 -0
- operate/data/contracts/dual_staking_token/contract.py +132 -0
- operate/data/contracts/dual_staking_token/contract.yaml +23 -0
- operate/{ledger/base.py → data/contracts/foreign_omnibridge/__init__.py} +2 -19
- operate/data/contracts/foreign_omnibridge/build/ForeignOmnibridge.json +1372 -0
- operate/data/contracts/foreign_omnibridge/contract.py +130 -0
- operate/data/contracts/foreign_omnibridge/contract.yaml +23 -0
- operate/{ledger/solana.py → data/contracts/home_omnibridge/__init__.py} +2 -20
- operate/data/contracts/home_omnibridge/build/HomeOmnibridge.json +1421 -0
- operate/data/contracts/home_omnibridge/contract.py +80 -0
- operate/data/contracts/home_omnibridge/contract.yaml +23 -0
- operate/data/contracts/l1_standard_bridge/__init__.py +20 -0
- operate/data/contracts/l1_standard_bridge/build/L1StandardBridge.json +831 -0
- operate/data/contracts/l1_standard_bridge/contract.py +158 -0
- operate/data/contracts/l1_standard_bridge/contract.yaml +23 -0
- operate/data/contracts/l2_standard_bridge/__init__.py +20 -0
- operate/data/contracts/l2_standard_bridge/build/L2StandardBridge.json +626 -0
- operate/data/contracts/l2_standard_bridge/contract.py +130 -0
- operate/data/contracts/l2_standard_bridge/contract.yaml +23 -0
- operate/data/contracts/mech_activity/__init__.py +20 -0
- operate/data/contracts/mech_activity/build/MechActivity.json +111 -0
- operate/data/contracts/mech_activity/contract.py +44 -0
- operate/data/contracts/mech_activity/contract.yaml +23 -0
- operate/data/contracts/optimism_mintable_erc20/__init__.py +20 -0
- operate/data/contracts/optimism_mintable_erc20/build/OptimismMintableERC20.json +491 -0
- operate/data/contracts/optimism_mintable_erc20/contract.py +45 -0
- operate/data/contracts/optimism_mintable_erc20/contract.yaml +23 -0
- operate/data/contracts/recovery_module/__init__.py +20 -0
- operate/data/contracts/recovery_module/build/RecoveryModule.json +811 -0
- operate/data/contracts/recovery_module/contract.py +61 -0
- operate/data/contracts/recovery_module/contract.yaml +23 -0
- operate/data/contracts/requester_activity_checker/__init__.py +20 -0
- operate/data/contracts/requester_activity_checker/build/RequesterActivityChecker.json +111 -0
- operate/data/contracts/requester_activity_checker/contract.py +33 -0
- operate/data/contracts/requester_activity_checker/contract.yaml +23 -0
- operate/data/contracts/staking_token/__init__.py +20 -0
- operate/data/contracts/staking_token/build/StakingToken.json +1336 -0
- operate/data/contracts/{service_staking_token → staking_token}/contract.py +27 -13
- operate/data/contracts/staking_token/contract.yaml +23 -0
- operate/data/contracts/uniswap_v2_erc20/contract.yaml +3 -1
- operate/data/contracts/uniswap_v2_erc20/tests/__init__.py +20 -0
- operate/data/contracts/uniswap_v2_erc20/tests/test_contract.py +363 -0
- operate/keys.py +118 -33
- operate/ledger/__init__.py +159 -56
- operate/ledger/profiles.py +321 -18
- operate/migration.py +555 -0
- operate/{http → operate_http}/__init__.py +3 -2
- operate/{http → operate_http}/exceptions.py +6 -4
- operate/operate_types.py +544 -0
- operate/pearl.py +13 -1
- operate/quickstart/analyse_logs.py +118 -0
- operate/quickstart/claim_staking_rewards.py +104 -0
- operate/quickstart/reset_configs.py +106 -0
- operate/quickstart/reset_password.py +70 -0
- operate/quickstart/reset_staking.py +145 -0
- operate/quickstart/run_service.py +726 -0
- operate/quickstart/stop_service.py +72 -0
- operate/quickstart/terminate_on_chain_service.py +83 -0
- operate/quickstart/utils.py +298 -0
- operate/resource.py +62 -3
- operate/services/agent_runner.py +202 -0
- operate/services/deployment_runner.py +868 -0
- operate/services/funding_manager.py +929 -0
- operate/services/health_checker.py +280 -0
- operate/services/manage.py +2356 -620
- operate/services/protocol.py +1246 -340
- operate/services/service.py +756 -391
- operate/services/utils/mech.py +103 -0
- operate/services/utils/tendermint.py +86 -12
- operate/settings.py +70 -0
- operate/utils/__init__.py +135 -0
- operate/utils/gnosis.py +407 -80
- operate/utils/single_instance.py +226 -0
- operate/utils/ssl.py +133 -0
- operate/wallet/master.py +708 -123
- operate/wallet/wallet_recovery_manager.py +507 -0
- olas_operate_middleware-0.1.0rc59.dist-info/METADATA +0 -304
- olas_operate_middleware-0.1.0rc59.dist-info/RECORD +0 -41
- operate/data/contracts/service_staking_token/build/ServiceStakingToken.json +0 -1273
- operate/data/contracts/service_staking_token/contract.yaml +0 -23
- operate/ledger/ethereum.py +0 -48
- operate/types.py +0 -260
- {olas_operate_middleware-0.1.0rc59.dist-info → olas_operate_middleware-0.13.2.dist-info}/entry_points.txt +0 -0
- {olas_operate_middleware-0.1.0rc59.dist-info → olas_operate_middleware-0.13.2.dist-info/licenses}/LICENSE +0 -0
operate/services/service.py
CHANGED
|
@@ -23,16 +23,19 @@ import json
|
|
|
23
23
|
import os
|
|
24
24
|
import platform
|
|
25
25
|
import shutil
|
|
26
|
-
import signal
|
|
27
26
|
import subprocess # nosec
|
|
28
27
|
import sys
|
|
28
|
+
import tempfile
|
|
29
29
|
import time
|
|
30
30
|
import typing as t
|
|
31
|
-
|
|
31
|
+
import uuid
|
|
32
|
+
from copy import copy
|
|
32
33
|
from dataclasses import dataclass
|
|
34
|
+
from json import JSONDecodeError
|
|
33
35
|
from pathlib import Path
|
|
36
|
+
from traceback import print_exc
|
|
34
37
|
|
|
35
|
-
import
|
|
38
|
+
import requests
|
|
36
39
|
from aea.configurations.constants import (
|
|
37
40
|
DEFAULT_LEDGER,
|
|
38
41
|
LEDGER,
|
|
@@ -40,11 +43,13 @@ from aea.configurations.constants import (
|
|
|
40
43
|
PRIVATE_KEY_PATH_SCHEMA,
|
|
41
44
|
SKILL,
|
|
42
45
|
)
|
|
43
|
-
from aea.
|
|
46
|
+
from aea.helpers.logging import setup_logger
|
|
44
47
|
from aea.helpers.yaml_utils import yaml_dump, yaml_load, yaml_load_all
|
|
45
48
|
from aea_cli_ipfs.ipfs_utils import IPFSTool
|
|
46
49
|
from autonomy.cli.helpers.deployment import run_deployment, stop_deployment
|
|
47
|
-
from autonomy.configurations.
|
|
50
|
+
from autonomy.configurations.constants import DEFAULT_SERVICE_CONFIG_FILE
|
|
51
|
+
from autonomy.configurations.loader import apply_env_variables, load_service_config
|
|
52
|
+
from autonomy.constants import DEFAULT_KEYS_FILE, DOCKER_COMPOSE_YAML
|
|
48
53
|
from autonomy.deploy.base import BaseDeploymentGenerator
|
|
49
54
|
from autonomy.deploy.base import ServiceBuilder as BaseServiceBuilder
|
|
50
55
|
from autonomy.deploy.constants import (
|
|
@@ -57,39 +62,61 @@ from autonomy.deploy.constants import (
|
|
|
57
62
|
VENVS_DIR,
|
|
58
63
|
)
|
|
59
64
|
from autonomy.deploy.generators.docker_compose.base import DockerComposeGenerator
|
|
65
|
+
from autonomy.deploy.generators.kubernetes.base import KubernetesGenerator
|
|
60
66
|
from docker import from_env
|
|
61
67
|
|
|
62
68
|
from operate.constants import (
|
|
63
|
-
|
|
69
|
+
AGENT_FUNDS_STATUS_URL,
|
|
70
|
+
AGENT_PERSISTENT_STORAGE_ENV_VAR,
|
|
71
|
+
CONFIG_JSON,
|
|
72
|
+
DEPLOYMENT_DIR,
|
|
64
73
|
DEPLOYMENT_JSON,
|
|
65
|
-
|
|
66
|
-
|
|
74
|
+
HEALTHCHECK_JSON,
|
|
75
|
+
SERVICE_SAFE_PLACEHOLDER,
|
|
76
|
+
ZERO_ADDRESS,
|
|
67
77
|
)
|
|
68
|
-
from operate.
|
|
69
|
-
from operate.
|
|
70
|
-
from operate.
|
|
71
|
-
from operate.
|
|
72
|
-
|
|
73
|
-
|
|
78
|
+
from operate.keys import KeysManager
|
|
79
|
+
from operate.ledger import get_default_ledger_api, get_default_rpc
|
|
80
|
+
from operate.operate_http.exceptions import NotAllowed
|
|
81
|
+
from operate.operate_types import (
|
|
82
|
+
AgentRelease,
|
|
83
|
+
Chain,
|
|
84
|
+
ChainAmounts,
|
|
85
|
+
ChainConfig,
|
|
86
|
+
ChainConfigs,
|
|
74
87
|
DeployedNodes,
|
|
75
88
|
DeploymentConfig,
|
|
76
89
|
DeploymentStatus,
|
|
90
|
+
EnvVariables,
|
|
77
91
|
LedgerConfig,
|
|
78
|
-
|
|
92
|
+
LedgerConfigs,
|
|
79
93
|
OnChainData,
|
|
80
|
-
OnChainState,
|
|
81
94
|
OnChainUserParams,
|
|
95
|
+
ServiceEnvProvisionType,
|
|
96
|
+
ServiceTemplate,
|
|
82
97
|
)
|
|
98
|
+
from operate.resource import LocalResource
|
|
99
|
+
from operate.services.deployment_runner import run_host_deployment, stop_host_deployment
|
|
100
|
+
from operate.services.utils import tendermint
|
|
101
|
+
from operate.utils import unrecoverable_delete
|
|
102
|
+
from operate.utils.gnosis import get_asset_balance
|
|
103
|
+
from operate.utils.ssl import create_ssl_certificate
|
|
83
104
|
|
|
84
105
|
|
|
106
|
+
# pylint: disable=no-member,redefined-builtin,too-many-instance-attributes,too-many-locals
|
|
107
|
+
|
|
85
108
|
SAFE_CONTRACT_ADDRESS = "safe_contract_address"
|
|
86
109
|
ALL_PARTICIPANTS = "all_participants"
|
|
87
110
|
CONSENSUS_THRESHOLD = "consensus_threshold"
|
|
111
|
+
SERVICE_CONFIG_VERSION = 9
|
|
112
|
+
SERVICE_CONFIG_PREFIX = "sc-"
|
|
88
113
|
|
|
114
|
+
NON_EXISTENT_MULTISIG = None
|
|
115
|
+
NON_EXISTENT_TOKEN = -1
|
|
89
116
|
|
|
90
|
-
|
|
117
|
+
AGENT_TYPE_IDS = {"mech": 37, "optimus": 40, "modius": 40, "trader": 25}
|
|
91
118
|
|
|
92
|
-
|
|
119
|
+
logger = setup_logger("operate.services.service")
|
|
93
120
|
|
|
94
121
|
|
|
95
122
|
def mkdirs(build_dir: Path) -> None:
|
|
@@ -135,42 +162,6 @@ def remove_service_network(service_name: str, force: bool = True) -> None:
|
|
|
135
162
|
class ServiceBuilder(BaseServiceBuilder):
|
|
136
163
|
"""Service builder patch."""
|
|
137
164
|
|
|
138
|
-
def try_update_ledger_params(self, chain: str, address: str) -> None:
|
|
139
|
-
"""Try to update the ledger params."""
|
|
140
|
-
|
|
141
|
-
for override in deepcopy(self.service.overrides):
|
|
142
|
-
(
|
|
143
|
-
override,
|
|
144
|
-
component_id,
|
|
145
|
-
_,
|
|
146
|
-
) = self.service.process_metadata(
|
|
147
|
-
configuration=override,
|
|
148
|
-
)
|
|
149
|
-
|
|
150
|
-
if (
|
|
151
|
-
component_id.package_type == PackageType.CONNECTION
|
|
152
|
-
and component_id.name == "ledger"
|
|
153
|
-
):
|
|
154
|
-
ledger_connection_overrides = deepcopy(override)
|
|
155
|
-
break
|
|
156
|
-
else:
|
|
157
|
-
return
|
|
158
|
-
|
|
159
|
-
# TODO: Support for multiple overrides
|
|
160
|
-
ledger_connection_overrides["config"]["ledger_apis"][chain]["address"] = address
|
|
161
|
-
service_overrides = deepcopy(self.service.overrides)
|
|
162
|
-
service_overrides = [
|
|
163
|
-
override
|
|
164
|
-
for override in service_overrides
|
|
165
|
-
if override["public_id"] != str(component_id.public_id)
|
|
166
|
-
or override["type"] != PackageType.CONNECTION.value
|
|
167
|
-
]
|
|
168
|
-
|
|
169
|
-
ledger_connection_overrides["type"] = PackageType.CONNECTION.value
|
|
170
|
-
ledger_connection_overrides["public_id"] = str(component_id.public_id)
|
|
171
|
-
service_overrides.append(ledger_connection_overrides)
|
|
172
|
-
self.service.overrides = service_overrides
|
|
173
|
-
|
|
174
165
|
def try_update_runtime_params(
|
|
175
166
|
self,
|
|
176
167
|
multisig_address: t.Optional[str] = None,
|
|
@@ -219,9 +210,12 @@ class ServiceBuilder(BaseServiceBuilder):
|
|
|
219
210
|
has_multiple_overrides=has_multiple_overrides,
|
|
220
211
|
)
|
|
221
212
|
if service_id is not None:
|
|
222
|
-
|
|
223
|
-
"
|
|
224
|
-
|
|
213
|
+
if has_multiple_overrides:
|
|
214
|
+
os.environ["ON_CHAIN_SERVICE_ID"] = str(service_id)
|
|
215
|
+
else:
|
|
216
|
+
override["models"]["params"]["args"][
|
|
217
|
+
"on_chain_service_id"
|
|
218
|
+
] = service_id
|
|
225
219
|
|
|
226
220
|
override["type"] = component_id.package_type.value
|
|
227
221
|
override["public_id"] = str(component_id.public_id)
|
|
@@ -236,22 +230,29 @@ class ServiceHelper:
|
|
|
236
230
|
"""Initialize object."""
|
|
237
231
|
self.path = path
|
|
238
232
|
self.config = load_service_config(service_path=path)
|
|
233
|
+
self.config.overrides = apply_env_variables(
|
|
234
|
+
self.config.overrides, os.environ.copy()
|
|
235
|
+
)
|
|
239
236
|
|
|
240
|
-
def
|
|
241
|
-
"""Get ledger
|
|
242
|
-
|
|
237
|
+
def ledger_configs(self) -> LedgerConfigs:
|
|
238
|
+
"""Get ledger configs."""
|
|
239
|
+
ledger_configs = {}
|
|
243
240
|
for override in self.config.overrides:
|
|
244
241
|
if (
|
|
245
242
|
override["type"] == "connection"
|
|
246
243
|
and "valory/ledger" in override["public_id"]
|
|
247
244
|
):
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
245
|
+
if 0 in override: # take the values from the first config
|
|
246
|
+
override = override[0]
|
|
247
|
+
|
|
248
|
+
for _, config in override["config"]["ledger_apis"].items():
|
|
249
|
+
# TODO chain name is inferred from the chain_id. The actual id provided on service.yaml is ignored.
|
|
250
|
+
chain = Chain.from_id(chain_id=config["chain_id"]) # type: ignore
|
|
251
|
+
ledger_configs[chain.value] = LedgerConfig(
|
|
252
|
+
rpc=config["address"],
|
|
253
|
+
chain=chain,
|
|
254
|
+
)
|
|
255
|
+
return ledger_configs
|
|
255
256
|
|
|
256
257
|
def deployment_config(self) -> DeploymentConfig:
|
|
257
258
|
"""Returns deployment config."""
|
|
@@ -268,11 +269,30 @@ class HostDeploymentGenerator(BaseDeploymentGenerator):
|
|
|
268
269
|
def generate_config_tendermint(self) -> "HostDeploymentGenerator":
|
|
269
270
|
"""Generate tendermint configuration."""
|
|
270
271
|
tmhome = str(self.build_dir / "node")
|
|
272
|
+
tendermint_executable = str(
|
|
273
|
+
shutil.which("tendermint"),
|
|
274
|
+
)
|
|
275
|
+
env = {}
|
|
276
|
+
env["PATH"] = os.path.dirname(sys.executable) + ":" + os.environ["PATH"]
|
|
277
|
+
tendermint_executable = str(
|
|
278
|
+
Path(os.path.dirname(sys.executable)) / "tendermint"
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
if platform.system() == "Windows":
|
|
282
|
+
env["PATH"] = os.path.dirname(sys.executable) + ";" + os.environ["PATH"]
|
|
283
|
+
tendermint_executable = str(
|
|
284
|
+
Path(os.path.dirname(sys.executable)) / "tendermint.exe"
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
if not (getattr(sys, "frozen", False) and hasattr(sys, "_MEIPASS")):
|
|
288
|
+
# we dont run inside pyinstaller, mean DEV mode!
|
|
289
|
+
tendermint_executable = "tendermint"
|
|
290
|
+
if platform.system() == "Windows":
|
|
291
|
+
tendermint_executable = "tendermint.exe"
|
|
292
|
+
|
|
271
293
|
subprocess.run( # pylint: disable=subprocess-run-check # nosec
|
|
272
294
|
args=[
|
|
273
|
-
|
|
274
|
-
shutil.which("tendermint"),
|
|
275
|
-
),
|
|
295
|
+
tendermint_executable,
|
|
276
296
|
"--home",
|
|
277
297
|
tmhome,
|
|
278
298
|
"init",
|
|
@@ -309,6 +329,8 @@ class HostDeploymentGenerator(BaseDeploymentGenerator):
|
|
|
309
329
|
use_acn: bool = False,
|
|
310
330
|
) -> "HostDeploymentGenerator":
|
|
311
331
|
"""Generate agent and tendermint configurations"""
|
|
332
|
+
self.build_dir.mkdir(exist_ok=True, parents=True)
|
|
333
|
+
(self.build_dir / "agent").mkdir(exist_ok=True, parents=True)
|
|
312
334
|
agent = self.service_builder.generate_agent(agent_n=0)
|
|
313
335
|
agent = {key: f"{value}" for key, value in agent.items()}
|
|
314
336
|
(self.build_dir / "agent.json").write_text(
|
|
@@ -338,201 +360,6 @@ class HostDeploymentGenerator(BaseDeploymentGenerator):
|
|
|
338
360
|
return self
|
|
339
361
|
|
|
340
362
|
|
|
341
|
-
def _run_cmd(args: t.List[str], cwd: t.Optional[Path] = None) -> None:
|
|
342
|
-
"""Run command in a subprocess."""
|
|
343
|
-
print(f"Running: {' '.join(args)}")
|
|
344
|
-
# print working dir
|
|
345
|
-
print(f"Working dir: {os.getcwd()}")
|
|
346
|
-
result = subprocess.run( # pylint: disable=subprocess-run-check # nosec
|
|
347
|
-
args=args,
|
|
348
|
-
cwd=cwd,
|
|
349
|
-
stdout=subprocess.PIPE,
|
|
350
|
-
stderr=subprocess.PIPE,
|
|
351
|
-
)
|
|
352
|
-
if result.returncode != 0:
|
|
353
|
-
raise RuntimeError(f"Error running: {args} @ {cwd}\n{result.stderr.decode()}")
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
def _setup_agent(working_dir: Path) -> None:
|
|
357
|
-
"""Setup agent."""
|
|
358
|
-
env = json.loads((working_dir / "agent.json").read_text(encoding="utf-8"))
|
|
359
|
-
# Patch for trader agent
|
|
360
|
-
if "SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_STORE_PATH" in env:
|
|
361
|
-
data_dir = working_dir / "data"
|
|
362
|
-
data_dir.mkdir(exist_ok=True)
|
|
363
|
-
env["SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_STORE_PATH"] = str(data_dir)
|
|
364
|
-
|
|
365
|
-
# TODO: Dynamic port allocation, backport to service builder
|
|
366
|
-
env["CONNECTION_ABCI_CONFIG_HOST"] = "localhost"
|
|
367
|
-
env["CONNECTION_ABCI_CONFIG_PORT"] = "26658"
|
|
368
|
-
|
|
369
|
-
for var in env:
|
|
370
|
-
# Fix tendermint connection params
|
|
371
|
-
if var.endswith("MODELS_PARAMS_ARGS_TENDERMINT_COM_URL"):
|
|
372
|
-
env[var] = "http://localhost:8080"
|
|
373
|
-
|
|
374
|
-
if var.endswith("MODELS_PARAMS_ARGS_TENDERMINT_URL"):
|
|
375
|
-
env[var] = "http://localhost:26657"
|
|
376
|
-
|
|
377
|
-
if var.endswith("MODELS_PARAMS_ARGS_TENDERMINT_P2P_URL"):
|
|
378
|
-
env[var] = "localhost:26656"
|
|
379
|
-
|
|
380
|
-
if var.endswith("MODELS_BENCHMARK_TOOL_ARGS_LOG_DIR"):
|
|
381
|
-
benchmarks_dir = working_dir / "benchmarks"
|
|
382
|
-
benchmarks_dir.mkdir(exist_ok=True, parents=True)
|
|
383
|
-
env[var] = str(benchmarks_dir.resolve())
|
|
384
|
-
|
|
385
|
-
(working_dir / "agent.json").write_text(
|
|
386
|
-
json.dumps(env, indent=4),
|
|
387
|
-
encoding="utf-8",
|
|
388
|
-
)
|
|
389
|
-
|
|
390
|
-
abin = str(Path(sys._MEIPASS) / "aea_bin") # type: ignore # pylint: disable=protected-access
|
|
391
|
-
# Fetch agent
|
|
392
|
-
_run_cmd(
|
|
393
|
-
args=[
|
|
394
|
-
abin,
|
|
395
|
-
"init",
|
|
396
|
-
"--reset",
|
|
397
|
-
"--author",
|
|
398
|
-
"valory",
|
|
399
|
-
"--remote",
|
|
400
|
-
"--ipfs",
|
|
401
|
-
"--ipfs-node",
|
|
402
|
-
"/dns/registry.autonolas.tech/tcp/443/https",
|
|
403
|
-
],
|
|
404
|
-
cwd=working_dir,
|
|
405
|
-
)
|
|
406
|
-
_run_cmd(
|
|
407
|
-
args=[
|
|
408
|
-
abin,
|
|
409
|
-
"fetch",
|
|
410
|
-
env["AEA_AGENT"],
|
|
411
|
-
"--alias",
|
|
412
|
-
"agent",
|
|
413
|
-
],
|
|
414
|
-
cwd=working_dir,
|
|
415
|
-
)
|
|
416
|
-
|
|
417
|
-
# Add keys
|
|
418
|
-
shutil.copy(
|
|
419
|
-
working_dir / "ethereum_private_key.txt",
|
|
420
|
-
working_dir / "agent" / "ethereum_private_key.txt",
|
|
421
|
-
)
|
|
422
|
-
_run_cmd(
|
|
423
|
-
args=[abin, "add-key", "ethereum"],
|
|
424
|
-
cwd=working_dir / "agent",
|
|
425
|
-
)
|
|
426
|
-
_run_cmd(
|
|
427
|
-
args=[abin, "issue-certificates"],
|
|
428
|
-
cwd=working_dir / "agent",
|
|
429
|
-
)
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
def _start_agent(working_dir: Path) -> None:
|
|
433
|
-
"""Start agent process."""
|
|
434
|
-
env = json.loads((working_dir / "agent.json").read_text(encoding="utf-8"))
|
|
435
|
-
aea_bin = str(Path(sys._MEIPASS) / "aea_bin") # type: ignore # pylint: disable=protected-access
|
|
436
|
-
process = subprocess.Popen( # pylint: disable=consider-using-with # nosec
|
|
437
|
-
args=[aea_bin, "run"],
|
|
438
|
-
cwd=working_dir / "agent",
|
|
439
|
-
stdout=subprocess.DEVNULL,
|
|
440
|
-
stderr=subprocess.DEVNULL,
|
|
441
|
-
env={**os.environ, **env},
|
|
442
|
-
creationflags=(
|
|
443
|
-
0x00000008 if platform.system() == "Windows" else 0
|
|
444
|
-
), # Detach process from the main process
|
|
445
|
-
)
|
|
446
|
-
(working_dir / "agent.pid").write_text(
|
|
447
|
-
data=str(process.pid),
|
|
448
|
-
encoding="utf-8",
|
|
449
|
-
)
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
def _start_tendermint(working_dir: Path) -> None:
|
|
453
|
-
"""Start tendermint process."""
|
|
454
|
-
env = json.loads((working_dir / "tendermint.json").read_text(encoding="utf-8"))
|
|
455
|
-
tendermint_com = str(Path(sys._MEIPASS) / "tendermint") # type: ignore # pylint: disable=protected-access
|
|
456
|
-
process = subprocess.Popen( # pylint: disable=consider-using-with # nosec
|
|
457
|
-
args=[tendermint_com],
|
|
458
|
-
cwd=working_dir,
|
|
459
|
-
stdout=subprocess.DEVNULL,
|
|
460
|
-
stderr=subprocess.DEVNULL,
|
|
461
|
-
env={**os.environ, **env},
|
|
462
|
-
creationflags=(
|
|
463
|
-
0x00000008 if platform.system() == "Windows" else 0
|
|
464
|
-
), # Detach process from the main process
|
|
465
|
-
)
|
|
466
|
-
(working_dir / "tendermint.pid").write_text(
|
|
467
|
-
data=str(process.pid),
|
|
468
|
-
encoding="utf-8",
|
|
469
|
-
)
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
def _kill_process(pid: int) -> None:
|
|
473
|
-
"""Kill process."""
|
|
474
|
-
print(f"Trying to kill process: {pid}")
|
|
475
|
-
while True:
|
|
476
|
-
if not psutil.pid_exists(pid=pid):
|
|
477
|
-
return
|
|
478
|
-
if psutil.Process(pid=pid).status() in (
|
|
479
|
-
psutil.STATUS_DEAD,
|
|
480
|
-
psutil.STATUS_ZOMBIE,
|
|
481
|
-
):
|
|
482
|
-
return
|
|
483
|
-
try:
|
|
484
|
-
os.kill(
|
|
485
|
-
pid,
|
|
486
|
-
(
|
|
487
|
-
signal.CTRL_C_EVENT # type: ignore
|
|
488
|
-
if platform.platform() == "Windows"
|
|
489
|
-
else signal.SIGKILL
|
|
490
|
-
),
|
|
491
|
-
)
|
|
492
|
-
except OSError:
|
|
493
|
-
return
|
|
494
|
-
time.sleep(1)
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
def _stop_agent(working_dir: Path) -> None:
|
|
498
|
-
"""Start process."""
|
|
499
|
-
pid = working_dir / "agent.pid"
|
|
500
|
-
if not pid.exists():
|
|
501
|
-
return
|
|
502
|
-
_kill_process(int(pid.read_text(encoding="utf-8")))
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
def _stop_tendermint(working_dir: Path) -> None:
|
|
506
|
-
"""Start tendermint process."""
|
|
507
|
-
pid = working_dir / "tendermint.pid"
|
|
508
|
-
if not pid.exists():
|
|
509
|
-
return
|
|
510
|
-
_kill_process(int(pid.read_text(encoding="utf-8")))
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
def run_host_deployment(build_dir: Path) -> None:
|
|
514
|
-
"""Run host deployment."""
|
|
515
|
-
_setup_agent(
|
|
516
|
-
working_dir=build_dir,
|
|
517
|
-
)
|
|
518
|
-
_start_tendermint(
|
|
519
|
-
working_dir=build_dir,
|
|
520
|
-
)
|
|
521
|
-
_start_agent(
|
|
522
|
-
working_dir=build_dir,
|
|
523
|
-
)
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
def stop_host_deployment(build_dir: Path) -> None:
|
|
527
|
-
"""Stop host deployment."""
|
|
528
|
-
_stop_agent(
|
|
529
|
-
working_dir=build_dir,
|
|
530
|
-
)
|
|
531
|
-
_stop_tendermint(
|
|
532
|
-
working_dir=build_dir,
|
|
533
|
-
)
|
|
534
|
-
|
|
535
|
-
|
|
536
363
|
@dataclass
|
|
537
364
|
class Deployment(LocalResource):
|
|
538
365
|
"""Deployment resource for a service."""
|
|
@@ -541,7 +368,7 @@ class Deployment(LocalResource):
|
|
|
541
368
|
nodes: DeployedNodes
|
|
542
369
|
path: Path
|
|
543
370
|
|
|
544
|
-
_file =
|
|
371
|
+
_file = DEPLOYMENT_JSON
|
|
545
372
|
|
|
546
373
|
@staticmethod
|
|
547
374
|
def new(path: Path) -> "Deployment":
|
|
@@ -564,9 +391,58 @@ class Deployment(LocalResource):
|
|
|
564
391
|
"""Load a service"""
|
|
565
392
|
return super().load(path) # type: ignore
|
|
566
393
|
|
|
394
|
+
def copy_previous_agent_run_logs(self) -> None:
|
|
395
|
+
"""Copy previous agent logs."""
|
|
396
|
+
source_path = self.path / DEPLOYMENT_DIR / "agent" / "log.txt"
|
|
397
|
+
destination_path = self.path / "prev_log.txt"
|
|
398
|
+
if source_path.exists():
|
|
399
|
+
shutil.copy(source_path, destination_path)
|
|
400
|
+
|
|
401
|
+
def _build_kubernetes(self, keys_manager: KeysManager, force: bool = True) -> None:
|
|
402
|
+
"""Build kubernetes deployment."""
|
|
403
|
+
k8s_build = self.path / DEPLOYMENT_DIR / "abci_build_k8s"
|
|
404
|
+
if k8s_build.exists() and force:
|
|
405
|
+
shutil.rmtree(k8s_build)
|
|
406
|
+
mkdirs(build_dir=k8s_build)
|
|
407
|
+
|
|
408
|
+
service = Service.load(path=self.path)
|
|
409
|
+
keys_file = self.path / DEFAULT_KEYS_FILE
|
|
410
|
+
keys_file.write_text(
|
|
411
|
+
json.dumps(
|
|
412
|
+
[
|
|
413
|
+
keys_manager.get_decrypted(address)
|
|
414
|
+
for address in service.agent_addresses
|
|
415
|
+
],
|
|
416
|
+
indent=4,
|
|
417
|
+
),
|
|
418
|
+
encoding="utf-8",
|
|
419
|
+
)
|
|
420
|
+
builder = ServiceBuilder.from_dir(
|
|
421
|
+
path=service.package_absolute_path,
|
|
422
|
+
keys_file=keys_file,
|
|
423
|
+
number_of_agents=len(service.agent_addresses),
|
|
424
|
+
)
|
|
425
|
+
unrecoverable_delete(keys_file)
|
|
426
|
+
builder.deplopyment_type = KubernetesGenerator.deployment_type
|
|
427
|
+
(
|
|
428
|
+
KubernetesGenerator(
|
|
429
|
+
service_builder=builder,
|
|
430
|
+
build_dir=k8s_build.resolve(),
|
|
431
|
+
use_tm_testnet_setup=True,
|
|
432
|
+
image_author=builder.service.author,
|
|
433
|
+
)
|
|
434
|
+
.generate()
|
|
435
|
+
.generate_config_tendermint()
|
|
436
|
+
.write_config()
|
|
437
|
+
.populate_private_keys()
|
|
438
|
+
)
|
|
439
|
+
print(f"Kubernetes deployment built on {k8s_build.resolve()}\n")
|
|
440
|
+
|
|
567
441
|
def _build_docker(
|
|
568
442
|
self,
|
|
443
|
+
keys_manager: KeysManager,
|
|
569
444
|
force: bool = True,
|
|
445
|
+
chain: t.Optional[str] = None,
|
|
570
446
|
) -> None:
|
|
571
447
|
"""Build docker deployment."""
|
|
572
448
|
service = Service.load(path=self.path)
|
|
@@ -577,23 +453,20 @@ class Deployment(LocalResource):
|
|
|
577
453
|
force=force,
|
|
578
454
|
)
|
|
579
455
|
|
|
580
|
-
build = self.path /
|
|
456
|
+
build = self.path / DEPLOYMENT_DIR
|
|
581
457
|
if build.exists() and not force:
|
|
582
458
|
return
|
|
583
459
|
if build.exists() and force:
|
|
460
|
+
self.copy_previous_agent_run_logs()
|
|
584
461
|
shutil.rmtree(build)
|
|
585
462
|
mkdirs(build_dir=build)
|
|
586
463
|
|
|
587
|
-
keys_file = self.path /
|
|
464
|
+
keys_file = self.path / DEFAULT_KEYS_FILE
|
|
588
465
|
keys_file.write_text(
|
|
589
466
|
json.dumps(
|
|
590
467
|
[
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
"private_key": key.private_key,
|
|
594
|
-
"ledger": key.ledger.name.lower(),
|
|
595
|
-
}
|
|
596
|
-
for key in service.keys
|
|
468
|
+
keys_manager.get_decrypted(address)
|
|
469
|
+
for address in service.agent_addresses
|
|
597
470
|
],
|
|
598
471
|
indent=4,
|
|
599
472
|
),
|
|
@@ -601,36 +474,42 @@ class Deployment(LocalResource):
|
|
|
601
474
|
)
|
|
602
475
|
try:
|
|
603
476
|
builder = ServiceBuilder.from_dir(
|
|
604
|
-
path=service.
|
|
477
|
+
path=service.package_absolute_path,
|
|
605
478
|
keys_file=keys_file,
|
|
606
|
-
number_of_agents=len(service.
|
|
479
|
+
number_of_agents=len(service.agent_addresses),
|
|
607
480
|
)
|
|
481
|
+
unrecoverable_delete(keys_file)
|
|
608
482
|
builder.deplopyment_type = DockerComposeGenerator.deployment_type
|
|
609
483
|
builder.try_update_abci_connection_params()
|
|
484
|
+
|
|
485
|
+
if not chain:
|
|
486
|
+
chain = service.home_chain
|
|
487
|
+
|
|
488
|
+
chain_config = service.chain_configs[chain]
|
|
489
|
+
chain_data = chain_config.chain_data
|
|
490
|
+
|
|
610
491
|
builder.try_update_runtime_params(
|
|
611
|
-
multisig_address=
|
|
612
|
-
agent_instances=
|
|
613
|
-
service_id=
|
|
492
|
+
multisig_address=chain_data.multisig,
|
|
493
|
+
agent_instances=chain_data.instances,
|
|
494
|
+
service_id=chain_data.token,
|
|
614
495
|
consensus_threshold=None,
|
|
615
496
|
)
|
|
616
|
-
# TODO: Support for multiledger
|
|
617
|
-
builder.try_update_ledger_params(
|
|
618
|
-
chain=LedgerType(service.ledger_config.type).name.lower(),
|
|
619
|
-
address=service.ledger_config.rpc,
|
|
620
|
-
)
|
|
621
497
|
|
|
622
|
-
# build deployment
|
|
498
|
+
# build docker-compose deployment
|
|
623
499
|
(
|
|
624
500
|
DockerComposeGenerator(
|
|
625
501
|
service_builder=builder,
|
|
626
502
|
build_dir=build.resolve(),
|
|
627
503
|
use_tm_testnet_setup=True,
|
|
504
|
+
image_author=builder.service.author,
|
|
628
505
|
)
|
|
629
506
|
.generate()
|
|
630
507
|
.generate_config_tendermint()
|
|
631
508
|
.write_config()
|
|
632
509
|
.populate_private_keys()
|
|
633
510
|
)
|
|
511
|
+
print(f"Docker Compose deployment built on {build.resolve()} \n")
|
|
512
|
+
|
|
634
513
|
except Exception as e:
|
|
635
514
|
shutil.rmtree(build)
|
|
636
515
|
raise e
|
|
@@ -657,16 +536,17 @@ class Deployment(LocalResource):
|
|
|
657
536
|
for node in deployment["services"]:
|
|
658
537
|
if "abci" in node:
|
|
659
538
|
deployment["services"][node]["volumes"].extend(_volumes)
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
539
|
+
new_mappings = []
|
|
540
|
+
for mapping in deployment["services"][node]["volumes"]:
|
|
541
|
+
if mapping.startswith("./data"):
|
|
542
|
+
(self.path / "persistent_data").mkdir(
|
|
543
|
+
exist_ok=True, parents=True
|
|
544
|
+
)
|
|
545
|
+
mapping = mapping.replace("./data", "../persistent_data")
|
|
546
|
+
|
|
547
|
+
new_mappings.append(mapping)
|
|
548
|
+
|
|
549
|
+
deployment["services"][node]["volumes"] = new_mappings
|
|
670
550
|
|
|
671
551
|
with (build / DOCKER_COMPOSE_YAML).open("w", encoding="utf-8") as stream:
|
|
672
552
|
yaml_dump(data=deployment, stream=stream)
|
|
@@ -674,15 +554,30 @@ class Deployment(LocalResource):
|
|
|
674
554
|
self.status = DeploymentStatus.BUILT
|
|
675
555
|
self.store()
|
|
676
556
|
|
|
677
|
-
def _build_host(
|
|
557
|
+
def _build_host(
|
|
558
|
+
self,
|
|
559
|
+
keys_manager: KeysManager,
|
|
560
|
+
force: bool = True,
|
|
561
|
+
chain: t.Optional[str] = None,
|
|
562
|
+
with_tm: bool = True,
|
|
563
|
+
) -> None:
|
|
678
564
|
"""Build host depployment."""
|
|
679
|
-
build = self.path /
|
|
565
|
+
build = self.path / DEPLOYMENT_DIR
|
|
680
566
|
if build.exists() and not force:
|
|
681
567
|
return
|
|
682
568
|
|
|
683
569
|
if build.exists() and force:
|
|
684
570
|
stop_host_deployment(build_dir=build)
|
|
685
|
-
|
|
571
|
+
try:
|
|
572
|
+
# sleep needed to ensure all processes closed/killed otherwise it will block directory removal on windows
|
|
573
|
+
time.sleep(3)
|
|
574
|
+
self.copy_previous_agent_run_logs()
|
|
575
|
+
shutil.rmtree(build)
|
|
576
|
+
except: # noqa # pylint: disable=bare-except
|
|
577
|
+
# sleep and try again. exception if fails
|
|
578
|
+
print_exc()
|
|
579
|
+
time.sleep(3)
|
|
580
|
+
shutil.rmtree(build)
|
|
686
581
|
|
|
687
582
|
service = Service.load(path=self.path)
|
|
688
583
|
if service.helper.config.number_of_agents > 1:
|
|
@@ -690,87 +585,129 @@ class Deployment(LocalResource):
|
|
|
690
585
|
"Host deployment currently only supports single agent deployments"
|
|
691
586
|
)
|
|
692
587
|
|
|
693
|
-
|
|
588
|
+
if not chain:
|
|
589
|
+
chain = service.home_chain
|
|
590
|
+
|
|
591
|
+
chain_config = service.chain_configs[chain]
|
|
592
|
+
chain_data = chain_config.chain_data
|
|
593
|
+
|
|
594
|
+
keys_file = self.path / DEFAULT_KEYS_FILE
|
|
694
595
|
keys_file.write_text(
|
|
695
596
|
json.dumps(
|
|
696
|
-
[
|
|
697
|
-
{
|
|
698
|
-
"address": key.address,
|
|
699
|
-
"private_key": key.private_key,
|
|
700
|
-
"ledger": key.ledger.name.lower(),
|
|
701
|
-
}
|
|
702
|
-
for key in service.keys
|
|
703
|
-
],
|
|
597
|
+
[keys_manager.get(address).json for address in service.agent_addresses],
|
|
704
598
|
indent=4,
|
|
705
599
|
),
|
|
706
600
|
encoding="utf-8",
|
|
707
601
|
)
|
|
708
602
|
try:
|
|
709
603
|
builder = ServiceBuilder.from_dir(
|
|
710
|
-
path=service.
|
|
604
|
+
path=service.package_absolute_path,
|
|
711
605
|
keys_file=keys_file,
|
|
712
|
-
number_of_agents=len(service.
|
|
606
|
+
number_of_agents=len(service.agent_addresses),
|
|
713
607
|
)
|
|
714
608
|
builder.deplopyment_type = HostDeploymentGenerator.deployment_type
|
|
715
609
|
builder.try_update_abci_connection_params()
|
|
716
610
|
builder.try_update_runtime_params(
|
|
717
|
-
multisig_address=
|
|
718
|
-
agent_instances=
|
|
719
|
-
service_id=
|
|
611
|
+
multisig_address=chain_data.multisig,
|
|
612
|
+
agent_instances=chain_data.instances,
|
|
613
|
+
service_id=chain_data.token,
|
|
720
614
|
consensus_threshold=None,
|
|
721
615
|
)
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
616
|
+
|
|
617
|
+
deployement_generator = HostDeploymentGenerator(
|
|
618
|
+
service_builder=builder,
|
|
619
|
+
build_dir=build.resolve(),
|
|
620
|
+
use_tm_testnet_setup=True,
|
|
726
621
|
)
|
|
622
|
+
if with_tm:
|
|
623
|
+
deployement_generator.generate_config_tendermint()
|
|
727
624
|
|
|
728
|
-
(
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
.
|
|
735
|
-
.generate()
|
|
736
|
-
.populate_private_keys()
|
|
625
|
+
deployement_generator.generate()
|
|
626
|
+
deployement_generator.populate_private_keys()
|
|
627
|
+
|
|
628
|
+
# Add keys
|
|
629
|
+
shutil.copy(
|
|
630
|
+
build / "ethereum_private_key.txt",
|
|
631
|
+
build / "agent" / "ethereum_private_key.txt",
|
|
737
632
|
)
|
|
738
633
|
|
|
739
634
|
except Exception as e:
|
|
740
|
-
|
|
635
|
+
if build.exists():
|
|
636
|
+
shutil.rmtree(build)
|
|
741
637
|
raise e
|
|
742
638
|
|
|
743
|
-
# Mech price patch.
|
|
744
|
-
agent_vars = json.loads(Path(build, "agent.json").read_text(encoding="utf-8"))
|
|
745
|
-
if "SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE" in agent_vars:
|
|
746
|
-
agent_vars[
|
|
747
|
-
"SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE"
|
|
748
|
-
] = "10000000000000000"
|
|
749
|
-
Path(build, "agent.json").write_text(
|
|
750
|
-
json.dumps(agent_vars, indent=4),
|
|
751
|
-
encoding="utf-8",
|
|
752
|
-
)
|
|
753
|
-
|
|
754
639
|
self.status = DeploymentStatus.BUILT
|
|
755
640
|
self.store()
|
|
756
641
|
|
|
757
642
|
def build(
|
|
758
643
|
self,
|
|
644
|
+
keys_manager: KeysManager,
|
|
759
645
|
use_docker: bool = False,
|
|
646
|
+
use_kubernetes: bool = False,
|
|
760
647
|
force: bool = True,
|
|
648
|
+
chain: t.Optional[str] = None,
|
|
761
649
|
) -> None:
|
|
762
650
|
"""
|
|
763
651
|
Build a deployment
|
|
764
652
|
|
|
765
|
-
:param use_docker: Use
|
|
653
|
+
:param use_docker: Use a Docker Compose deployment. If True, then no host deployment.
|
|
654
|
+
:param use_kubernetes: Build Kubernetes deployment. If True, then no host deployment.
|
|
766
655
|
:param force: Remove existing deployment and build a new one
|
|
656
|
+
:param chain: Chain to set runtime parameters on the deployment (home_chain if not provided).
|
|
767
657
|
:return: Deployment object
|
|
768
658
|
"""
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
659
|
+
# TODO: Maybe remove usage of chain and use home_chain always?
|
|
660
|
+
original_env = os.environ.copy()
|
|
661
|
+
service = Service.load(path=self.path)
|
|
772
662
|
|
|
773
|
-
|
|
663
|
+
if use_docker or use_kubernetes:
|
|
664
|
+
ssl_key_path, ssl_cert_path = create_ssl_certificate(
|
|
665
|
+
ssl_dir=service.path / PERSISTENT_DATA_DIR / "ssl"
|
|
666
|
+
)
|
|
667
|
+
service.update_env_variables_values(
|
|
668
|
+
{
|
|
669
|
+
"STORE_PATH": "/data",
|
|
670
|
+
"SSL_KEY_PATH": (
|
|
671
|
+
Path("/data") / "ssl" / ssl_key_path.name
|
|
672
|
+
).as_posix(),
|
|
673
|
+
"SSL_CERT_PATH": (
|
|
674
|
+
Path("/data") / "ssl" / ssl_cert_path.name
|
|
675
|
+
).as_posix(),
|
|
676
|
+
}
|
|
677
|
+
)
|
|
678
|
+
service.consume_env_variables()
|
|
679
|
+
if use_docker:
|
|
680
|
+
self._build_docker(keys_manager=keys_manager, force=force, chain=chain)
|
|
681
|
+
if use_kubernetes:
|
|
682
|
+
self._build_kubernetes(keys_manager=keys_manager, force=force)
|
|
683
|
+
else:
|
|
684
|
+
ssl_key_path, ssl_cert_path = create_ssl_certificate(
|
|
685
|
+
ssl_dir=service.path / DEPLOYMENT_DIR / "ssl"
|
|
686
|
+
)
|
|
687
|
+
service.update_env_variables_values(
|
|
688
|
+
{
|
|
689
|
+
"SSL_KEY_PATH": str(ssl_key_path),
|
|
690
|
+
"SSL_CERT_PATH": str(ssl_cert_path),
|
|
691
|
+
}
|
|
692
|
+
)
|
|
693
|
+
service.consume_env_variables()
|
|
694
|
+
is_aea = service.agent_release["is_aea"]
|
|
695
|
+
self._build_host(
|
|
696
|
+
keys_manager=keys_manager,
|
|
697
|
+
force=force,
|
|
698
|
+
chain=chain,
|
|
699
|
+
with_tm=is_aea,
|
|
700
|
+
)
|
|
701
|
+
|
|
702
|
+
os.environ.clear()
|
|
703
|
+
os.environ.update(original_env)
|
|
704
|
+
|
|
705
|
+
def start(
|
|
706
|
+
self,
|
|
707
|
+
password: str,
|
|
708
|
+
use_docker: bool = False,
|
|
709
|
+
is_aea: bool = True,
|
|
710
|
+
) -> None:
|
|
774
711
|
"""Start the service"""
|
|
775
712
|
if self.status != DeploymentStatus.BUILT:
|
|
776
713
|
raise NotAllowed(
|
|
@@ -782,9 +719,17 @@ class Deployment(LocalResource):
|
|
|
782
719
|
|
|
783
720
|
try:
|
|
784
721
|
if use_docker:
|
|
785
|
-
run_deployment(
|
|
722
|
+
run_deployment(
|
|
723
|
+
build_dir=self.path / DEPLOYMENT_DIR,
|
|
724
|
+
detach=True,
|
|
725
|
+
project_name=self.path.name,
|
|
726
|
+
)
|
|
786
727
|
else:
|
|
787
|
-
run_host_deployment(
|
|
728
|
+
run_host_deployment(
|
|
729
|
+
build_dir=self.path / DEPLOYMENT_DIR,
|
|
730
|
+
password=password,
|
|
731
|
+
is_aea=is_aea,
|
|
732
|
+
)
|
|
788
733
|
except Exception:
|
|
789
734
|
self.status = DeploymentStatus.BUILT
|
|
790
735
|
self.store()
|
|
@@ -793,7 +738,12 @@ class Deployment(LocalResource):
|
|
|
793
738
|
self.status = DeploymentStatus.DEPLOYED
|
|
794
739
|
self.store()
|
|
795
740
|
|
|
796
|
-
def stop(
|
|
741
|
+
def stop(
|
|
742
|
+
self,
|
|
743
|
+
use_docker: bool = False,
|
|
744
|
+
force: bool = False,
|
|
745
|
+
is_aea: bool = True,
|
|
746
|
+
) -> None:
|
|
797
747
|
"""Stop the deployment."""
|
|
798
748
|
if self.status != DeploymentStatus.DEPLOYED and not force:
|
|
799
749
|
return
|
|
@@ -802,16 +752,20 @@ class Deployment(LocalResource):
|
|
|
802
752
|
self.store()
|
|
803
753
|
|
|
804
754
|
if use_docker:
|
|
805
|
-
stop_deployment(
|
|
755
|
+
stop_deployment(
|
|
756
|
+
build_dir=self.path / DEPLOYMENT_DIR,
|
|
757
|
+
project_name=self.path.name,
|
|
758
|
+
)
|
|
806
759
|
else:
|
|
807
|
-
stop_host_deployment(build_dir=self.path /
|
|
760
|
+
stop_host_deployment(build_dir=self.path / DEPLOYMENT_DIR, is_aea=is_aea)
|
|
808
761
|
|
|
809
762
|
self.status = DeploymentStatus.BUILT
|
|
810
763
|
self.store()
|
|
811
764
|
|
|
812
765
|
def delete(self) -> None:
|
|
813
766
|
"""Delete the deployment."""
|
|
814
|
-
|
|
767
|
+
build = self.path / DEPLOYMENT_DIR
|
|
768
|
+
shutil.rmtree(build)
|
|
815
769
|
self.status = DeploymentStatus.DELETED
|
|
816
770
|
self.store()
|
|
817
771
|
|
|
@@ -820,20 +774,43 @@ class Deployment(LocalResource):
|
|
|
820
774
|
class Service(LocalResource):
|
|
821
775
|
"""Service class."""
|
|
822
776
|
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
chain_data: OnChainData
|
|
827
|
-
|
|
777
|
+
name: str
|
|
778
|
+
version: int
|
|
779
|
+
service_config_id: str
|
|
828
780
|
path: Path
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
781
|
+
package_path: Path
|
|
782
|
+
hash: str
|
|
783
|
+
hash_history: t.Dict[int, str]
|
|
784
|
+
agent_release: AgentRelease
|
|
785
|
+
agent_addresses: t.List[str]
|
|
786
|
+
home_chain: str
|
|
787
|
+
chain_configs: ChainConfigs
|
|
788
|
+
description: str
|
|
789
|
+
env_variables: EnvVariables
|
|
832
790
|
|
|
833
791
|
_helper: t.Optional[ServiceHelper] = None
|
|
834
792
|
_deployment: t.Optional[Deployment] = None
|
|
835
793
|
|
|
836
|
-
_file =
|
|
794
|
+
_file = CONFIG_JSON
|
|
795
|
+
|
|
796
|
+
@property
|
|
797
|
+
def json(self) -> t.Dict:
|
|
798
|
+
"""To dictionary object."""
|
|
799
|
+
obj = super().json
|
|
800
|
+
obj["service_public_id"] = self.service_public_id()
|
|
801
|
+
return obj
|
|
802
|
+
|
|
803
|
+
@staticmethod
|
|
804
|
+
def determine_agent_id(service_name: str) -> int:
|
|
805
|
+
"""Determine the appropriate agent ID based on service name."""
|
|
806
|
+
service_name_lower = service_name.lower()
|
|
807
|
+
if "mech" in service_name_lower:
|
|
808
|
+
return AGENT_TYPE_IDS["mech"]
|
|
809
|
+
if "optimus" in service_name_lower:
|
|
810
|
+
return AGENT_TYPE_IDS["optimus"]
|
|
811
|
+
if "modius" in service_name_lower:
|
|
812
|
+
return AGENT_TYPE_IDS["modius"]
|
|
813
|
+
return AGENT_TYPE_IDS["trader"]
|
|
837
814
|
|
|
838
815
|
@classmethod
|
|
839
816
|
def load(cls, path: Path) -> "Service":
|
|
@@ -844,7 +821,7 @@ class Service(LocalResource):
|
|
|
844
821
|
def helper(self) -> ServiceHelper:
|
|
845
822
|
"""Get service helper."""
|
|
846
823
|
if self._helper is None:
|
|
847
|
-
self._helper = ServiceHelper(path=self.
|
|
824
|
+
self._helper = ServiceHelper(path=self.package_absolute_path)
|
|
848
825
|
return t.cast(ServiceHelper, self._helper)
|
|
849
826
|
|
|
850
827
|
@property
|
|
@@ -852,54 +829,442 @@ class Service(LocalResource):
|
|
|
852
829
|
"""Load deployment object for the service."""
|
|
853
830
|
if not (self.path / DEPLOYMENT_JSON).exists():
|
|
854
831
|
self._deployment = Deployment.new(path=self.path)
|
|
855
|
-
|
|
832
|
+
try:
|
|
856
833
|
self._deployment = Deployment.load(path=self.path)
|
|
834
|
+
except JSONDecodeError:
|
|
835
|
+
self._deployment = Deployment.new(path=self.path)
|
|
857
836
|
return t.cast(Deployment, self._deployment)
|
|
858
837
|
|
|
838
|
+
@property
|
|
839
|
+
def package_absolute_path(self) -> Path:
|
|
840
|
+
"""Get the package_absolute_path."""
|
|
841
|
+
self._ensure_package_exists()
|
|
842
|
+
package_absolute_path = self.path / self.package_path
|
|
843
|
+
return package_absolute_path
|
|
844
|
+
|
|
845
|
+
def _ensure_package_exists(self) -> None:
|
|
846
|
+
package_absolute_path = self.path / self.package_path
|
|
847
|
+
if (
|
|
848
|
+
not package_absolute_path.exists()
|
|
849
|
+
or not (package_absolute_path / DEFAULT_SERVICE_CONFIG_FILE).exists()
|
|
850
|
+
):
|
|
851
|
+
with tempfile.TemporaryDirectory(dir=self.path) as temp_dir:
|
|
852
|
+
package_temp_path = Path(
|
|
853
|
+
IPFSTool().download(
|
|
854
|
+
hash_id=self.hash,
|
|
855
|
+
target_dir=temp_dir,
|
|
856
|
+
)
|
|
857
|
+
)
|
|
858
|
+
target_path = self.path / package_temp_path.name
|
|
859
|
+
|
|
860
|
+
if target_path.exists():
|
|
861
|
+
shutil.rmtree(target_path)
|
|
862
|
+
|
|
863
|
+
shutil.move(package_temp_path, target_path)
|
|
864
|
+
self.package_path = Path(target_path.name)
|
|
865
|
+
self.store()
|
|
866
|
+
|
|
859
867
|
@staticmethod
|
|
860
|
-
def new(
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
rpc: str,
|
|
864
|
-
on_chain_user_params: OnChainUserParams,
|
|
868
|
+
def new( # pylint: disable=too-many-locals
|
|
869
|
+
agent_addresses: t.List[str],
|
|
870
|
+
service_template: ServiceTemplate,
|
|
865
871
|
storage: Path,
|
|
866
872
|
) -> "Service":
|
|
867
873
|
"""Create a new service."""
|
|
868
|
-
|
|
874
|
+
|
|
875
|
+
service_config_id = Service.get_new_service_config_id(storage)
|
|
876
|
+
path = storage / service_config_id
|
|
869
877
|
path.mkdir()
|
|
870
|
-
|
|
878
|
+
package_absolute_path = Path(
|
|
871
879
|
IPFSTool().download(
|
|
872
|
-
hash_id=hash,
|
|
880
|
+
hash_id=service_template["hash"],
|
|
873
881
|
target_dir=path,
|
|
874
882
|
)
|
|
875
883
|
)
|
|
876
|
-
with (service_path / "service.yaml").open("r", encoding="utf-8") as fp:
|
|
877
|
-
config, *_ = yaml_load_all(fp)
|
|
878
884
|
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
),
|
|
889
|
-
chain_data=OnChainData(
|
|
885
|
+
chain_configs = {}
|
|
886
|
+
for chain_str, config in service_template["configurations"].items():
|
|
887
|
+
chain = Chain(chain_str)
|
|
888
|
+
ledger_config = LedgerConfig(
|
|
889
|
+
rpc=get_default_rpc(Chain(chain_str)), chain=chain
|
|
890
|
+
)
|
|
891
|
+
ledger_config.rpc = config["rpc"]
|
|
892
|
+
|
|
893
|
+
chain_data = OnChainData(
|
|
890
894
|
instances=[],
|
|
891
|
-
token
|
|
892
|
-
multisig=
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
895
|
+
token=NON_EXISTENT_TOKEN,
|
|
896
|
+
multisig=NON_EXISTENT_MULTISIG,
|
|
897
|
+
user_params=OnChainUserParams.from_json(config), # type: ignore
|
|
898
|
+
)
|
|
899
|
+
|
|
900
|
+
chain_configs[chain_str] = ChainConfig(
|
|
901
|
+
ledger_config=ledger_config,
|
|
902
|
+
chain_data=chain_data,
|
|
903
|
+
)
|
|
904
|
+
|
|
905
|
+
current_timestamp = int(time.time())
|
|
906
|
+
service = Service(
|
|
907
|
+
version=SERVICE_CONFIG_VERSION,
|
|
908
|
+
service_config_id=service_config_id,
|
|
909
|
+
name=service_template["name"],
|
|
910
|
+
description=service_template["description"],
|
|
911
|
+
hash=service_template["hash"],
|
|
912
|
+
agent_addresses=agent_addresses,
|
|
913
|
+
home_chain=service_template["home_chain"],
|
|
914
|
+
hash_history={current_timestamp: service_template["hash"]},
|
|
915
|
+
chain_configs=chain_configs,
|
|
916
|
+
path=package_absolute_path.parent,
|
|
917
|
+
package_path=Path(package_absolute_path.name),
|
|
918
|
+
env_variables=service_template["env_variables"],
|
|
919
|
+
agent_release=service_template["agent_release"],
|
|
899
920
|
)
|
|
900
921
|
service.store()
|
|
901
922
|
return service
|
|
902
923
|
|
|
903
|
-
def
|
|
904
|
-
"""
|
|
905
|
-
|
|
924
|
+
def service_public_id(self, include_version: bool = True) -> str:
|
|
925
|
+
"""Get the public id (based on the service hash)."""
|
|
926
|
+
with (self.package_absolute_path / DEFAULT_SERVICE_CONFIG_FILE).open(
|
|
927
|
+
"r", encoding="utf-8"
|
|
928
|
+
) as fp:
|
|
929
|
+
service_yaml, *_ = yaml_load_all(fp)
|
|
930
|
+
|
|
931
|
+
public_id = f"{service_yaml['author']}/{service_yaml['name']}"
|
|
932
|
+
|
|
933
|
+
if include_version:
|
|
934
|
+
public_id += f":{service_yaml['version']}"
|
|
935
|
+
|
|
936
|
+
return public_id
|
|
937
|
+
|
|
938
|
+
@staticmethod
|
|
939
|
+
def get_service_public_id(
|
|
940
|
+
hash: str, temp_dir: t.Optional[Path] = None, include_version: bool = True
|
|
941
|
+
) -> str:
|
|
942
|
+
"""
|
|
943
|
+
Get the service public ID from IPFS based on the hash.
|
|
944
|
+
|
|
945
|
+
:param hash: The IPFS hash of the service.
|
|
946
|
+
:param dir: Optional directory path where the temporary download folder will be created.
|
|
947
|
+
If None, a system-default temporary directory will be used.
|
|
948
|
+
:return: The public ID of the service in the format "author/name:version".
|
|
949
|
+
"""
|
|
950
|
+
with tempfile.TemporaryDirectory(dir=temp_dir) as path:
|
|
951
|
+
package_path = Path(
|
|
952
|
+
IPFSTool().download(
|
|
953
|
+
hash_id=hash,
|
|
954
|
+
target_dir=path,
|
|
955
|
+
)
|
|
956
|
+
)
|
|
957
|
+
|
|
958
|
+
with (package_path / DEFAULT_SERVICE_CONFIG_FILE).open(
|
|
959
|
+
"r", encoding="utf-8"
|
|
960
|
+
) as fp:
|
|
961
|
+
service_yaml, *_ = yaml_load_all(fp)
|
|
962
|
+
|
|
963
|
+
public_id = f"{service_yaml['author']}/{service_yaml['name']}"
|
|
964
|
+
|
|
965
|
+
if include_version:
|
|
966
|
+
public_id += f":{service_yaml['version']}"
|
|
967
|
+
|
|
968
|
+
return public_id
|
|
969
|
+
|
|
970
|
+
@staticmethod
|
|
971
|
+
def get_new_service_config_id(path: Path) -> str:
|
|
972
|
+
"""Get a new service config id that does not clash with any directory in path."""
|
|
973
|
+
while True:
|
|
974
|
+
service_config_id = f"{SERVICE_CONFIG_PREFIX}{uuid.uuid4()}"
|
|
975
|
+
new_path = path.parent / service_config_id
|
|
976
|
+
if not new_path.exists():
|
|
977
|
+
return service_config_id
|
|
978
|
+
|
|
979
|
+
def get_latest_healthcheck(self) -> t.Dict:
|
|
980
|
+
"""Return the latest stored healthcheck.json"""
|
|
981
|
+
healthcheck_json_path = self.path / HEALTHCHECK_JSON
|
|
982
|
+
|
|
983
|
+
if not healthcheck_json_path.exists():
|
|
984
|
+
return {}
|
|
985
|
+
|
|
986
|
+
try:
|
|
987
|
+
with open(healthcheck_json_path, "r", encoding="utf-8") as file:
|
|
988
|
+
return json.load(file)
|
|
989
|
+
except (IOError, json.JSONDecodeError) as e:
|
|
990
|
+
return {"error": f"Error reading healthcheck.json: {e}"}
|
|
991
|
+
|
|
992
|
+
def remove_latest_healthcheck(self) -> None:
|
|
993
|
+
"""Remove the latest healthcheck.json, if it exists"""
|
|
994
|
+
healthcheck_json_path = self.path / HEALTHCHECK_JSON
|
|
995
|
+
|
|
996
|
+
if healthcheck_json_path.exists():
|
|
997
|
+
try:
|
|
998
|
+
healthcheck_json_path.unlink()
|
|
999
|
+
except Exception as e: # pylint: disable=broad-except
|
|
1000
|
+
print(f"Exception deleting {healthcheck_json_path}: {e}")
|
|
1001
|
+
|
|
1002
|
+
def get_agent_performance(self) -> t.Dict:
|
|
1003
|
+
"""Return the agent activity"""
|
|
1004
|
+
|
|
1005
|
+
# Default values
|
|
1006
|
+
agent_performance: t.Dict[str, t.Any] = {
|
|
1007
|
+
"timestamp": None,
|
|
1008
|
+
"metrics": [],
|
|
1009
|
+
"last_activity": None,
|
|
1010
|
+
"last_chat_message": None,
|
|
1011
|
+
}
|
|
1012
|
+
|
|
1013
|
+
agent_performance_json_path = (
|
|
1014
|
+
Path(
|
|
1015
|
+
self.env_variables.get(
|
|
1016
|
+
AGENT_PERSISTENT_STORAGE_ENV_VAR, {"value": "."}
|
|
1017
|
+
).get("value", ".")
|
|
1018
|
+
)
|
|
1019
|
+
/ "agent_performance.json"
|
|
1020
|
+
)
|
|
1021
|
+
|
|
1022
|
+
if agent_performance_json_path.exists():
|
|
1023
|
+
try:
|
|
1024
|
+
with open(agent_performance_json_path, "r", encoding="utf-8") as f:
|
|
1025
|
+
data = json.load(f)
|
|
1026
|
+
if isinstance(data, dict):
|
|
1027
|
+
agent_performance.update(data)
|
|
1028
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
1029
|
+
# Keep default values if file is invalid
|
|
1030
|
+
print(
|
|
1031
|
+
f"Error reading file 'agent_performance.json': {e}"
|
|
1032
|
+
) # TODO Use logger
|
|
1033
|
+
|
|
1034
|
+
return dict(sorted(agent_performance.items()))
|
|
1035
|
+
|
|
1036
|
+
def update(
|
|
1037
|
+
self,
|
|
1038
|
+
service_template: ServiceTemplate,
|
|
1039
|
+
allow_different_service_public_id: bool = False,
|
|
1040
|
+
partial_update: bool = False,
|
|
1041
|
+
) -> None:
|
|
1042
|
+
"""Update service."""
|
|
1043
|
+
|
|
1044
|
+
target_hash = service_template.get("hash")
|
|
1045
|
+
if target_hash:
|
|
1046
|
+
target_service_public_id = Service.get_service_public_id(
|
|
1047
|
+
target_hash, self.path
|
|
1048
|
+
)
|
|
1049
|
+
|
|
1050
|
+
if not allow_different_service_public_id and (
|
|
1051
|
+
self.service_public_id() != target_service_public_id
|
|
1052
|
+
):
|
|
1053
|
+
raise ValueError(
|
|
1054
|
+
f"Trying to update a service with a different public id: {self.service_public_id()=} {self.hash=} {target_service_public_id=} {target_hash=}."
|
|
1055
|
+
)
|
|
1056
|
+
|
|
1057
|
+
self.hash = service_template.get("hash", self.hash)
|
|
1058
|
+
|
|
1059
|
+
# hash_history - Only update if latest inserted hash is different
|
|
1060
|
+
if self.hash_history[max(self.hash_history.keys())] != self.hash:
|
|
1061
|
+
current_timestamp = int(time.time())
|
|
1062
|
+
self.hash_history[current_timestamp] = self.hash
|
|
1063
|
+
|
|
1064
|
+
self.home_chain = service_template.get("home_chain", self.home_chain)
|
|
1065
|
+
self.description = service_template.get("description", self.description)
|
|
1066
|
+
self.name = service_template.get("name", self.name)
|
|
1067
|
+
|
|
1068
|
+
package_absolute_path = self.path / self.package_path
|
|
1069
|
+
if package_absolute_path.exists():
|
|
1070
|
+
shutil.rmtree(package_absolute_path)
|
|
1071
|
+
|
|
1072
|
+
package_absolute_path = Path(
|
|
1073
|
+
IPFSTool().download(
|
|
1074
|
+
hash_id=self.hash,
|
|
1075
|
+
target_dir=self.path,
|
|
1076
|
+
)
|
|
1077
|
+
)
|
|
1078
|
+
self.package_path = Path(package_absolute_path.name)
|
|
1079
|
+
|
|
1080
|
+
self.agent_release = service_template.get("agent_release", self.agent_release)
|
|
1081
|
+
|
|
1082
|
+
# env_variables
|
|
1083
|
+
if partial_update:
|
|
1084
|
+
for var, attrs in service_template.get("env_variables", {}).items():
|
|
1085
|
+
self.env_variables.setdefault(var, {}).update(attrs)
|
|
1086
|
+
else:
|
|
1087
|
+
self.env_variables = service_template["env_variables"]
|
|
1088
|
+
|
|
1089
|
+
# chain_configs
|
|
1090
|
+
# TODO support remove chains for non-partial updates
|
|
1091
|
+
# TODO ensure all and only existing chains are passed for non-partial updates
|
|
1092
|
+
ledger_configs = ServiceHelper(path=self.package_absolute_path).ledger_configs()
|
|
1093
|
+
for chain, new_config in service_template.get("configurations", {}).items():
|
|
1094
|
+
if chain in self.chain_configs:
|
|
1095
|
+
# The template is providing a chain configuration that already
|
|
1096
|
+
# exists in this service - update only the user parameters.
|
|
1097
|
+
# This is to avoid losing on-chain data like safe, token, etc.
|
|
1098
|
+
if partial_update:
|
|
1099
|
+
config = self.chain_configs[chain].chain_data.user_params.json
|
|
1100
|
+
config.update(new_config)
|
|
1101
|
+
else:
|
|
1102
|
+
config = new_config
|
|
1103
|
+
|
|
1104
|
+
self.chain_configs[
|
|
1105
|
+
chain
|
|
1106
|
+
].chain_data.user_params = OnChainUserParams.from_json(
|
|
1107
|
+
config # type: ignore
|
|
1108
|
+
)
|
|
1109
|
+
else:
|
|
1110
|
+
# The template is providing a chain configuration that does
|
|
1111
|
+
# not currently exist in this service - copy all config as
|
|
1112
|
+
# when creating a new service.
|
|
1113
|
+
ledger_config = ledger_configs[chain]
|
|
1114
|
+
ledger_config.rpc = new_config["rpc"]
|
|
1115
|
+
|
|
1116
|
+
chain_data = OnChainData(
|
|
1117
|
+
instances=[],
|
|
1118
|
+
token=NON_EXISTENT_TOKEN,
|
|
1119
|
+
multisig=NON_EXISTENT_MULTISIG,
|
|
1120
|
+
user_params=OnChainUserParams.from_json(new_config), # type: ignore
|
|
1121
|
+
)
|
|
1122
|
+
|
|
1123
|
+
self.chain_configs[chain] = ChainConfig(
|
|
1124
|
+
ledger_config=ledger_config,
|
|
1125
|
+
chain_data=chain_data,
|
|
1126
|
+
)
|
|
1127
|
+
|
|
1128
|
+
self.store()
|
|
1129
|
+
|
|
1130
|
+
def update_user_params_from_template(
|
|
1131
|
+
self, service_template: ServiceTemplate
|
|
1132
|
+
) -> None:
|
|
1133
|
+
"""Update user params from template."""
|
|
1134
|
+
for chain, config in service_template["configurations"].items():
|
|
1135
|
+
self.chain_configs[
|
|
1136
|
+
chain
|
|
1137
|
+
].chain_data.user_params = OnChainUserParams.from_json(
|
|
1138
|
+
config # type: ignore
|
|
1139
|
+
)
|
|
1140
|
+
|
|
1141
|
+
self.chain_configs[chain].ledger_config.rpc = config["rpc"]
|
|
1142
|
+
|
|
1143
|
+
self.store()
|
|
1144
|
+
|
|
1145
|
+
def consume_env_variables(self) -> None:
|
|
1146
|
+
"""Consume (apply) environment variables.
|
|
1147
|
+
|
|
1148
|
+
Note that this method modifies os.environ. Consider if you need a backup of os.environ before using this method.
|
|
1149
|
+
"""
|
|
1150
|
+
for env_var, attributes in self.env_variables.items():
|
|
1151
|
+
os.environ[env_var] = str(attributes.get("value", ""))
|
|
1152
|
+
|
|
1153
|
+
def update_env_variables_values(
|
|
1154
|
+
self, env_var_to_value: t.Dict[str, t.Any], except_if_undefined: bool = False
|
|
1155
|
+
) -> None:
|
|
1156
|
+
"""
|
|
1157
|
+
Updates and stores the values of the env variables to override service.yaml on the deployment.
|
|
1158
|
+
|
|
1159
|
+
This method does not apply the variables to the environment. Use consume_env_variables to apply the
|
|
1160
|
+
env variables.
|
|
1161
|
+
"""
|
|
1162
|
+
|
|
1163
|
+
updated = False
|
|
1164
|
+
for var, value in env_var_to_value.items():
|
|
1165
|
+
value_str = str(value)
|
|
1166
|
+
attributes = self.env_variables.get(var)
|
|
1167
|
+
if (
|
|
1168
|
+
attributes
|
|
1169
|
+
and self.env_variables[var]["provision_type"]
|
|
1170
|
+
== ServiceEnvProvisionType.COMPUTED
|
|
1171
|
+
and attributes["value"] != value_str
|
|
1172
|
+
):
|
|
1173
|
+
attributes["value"] = value_str
|
|
1174
|
+
updated = True
|
|
1175
|
+
elif except_if_undefined:
|
|
1176
|
+
raise ValueError(
|
|
1177
|
+
f"Trying to set value for an environment variable ({var}) not present on service configuration {self.service_config_id}."
|
|
1178
|
+
)
|
|
1179
|
+
|
|
1180
|
+
if updated:
|
|
1181
|
+
self.store()
|
|
1182
|
+
|
|
1183
|
+
def get_initial_funding_amounts(self) -> ChainAmounts:
|
|
1184
|
+
"""Get funding amounts as a dict structure."""
|
|
1185
|
+
amounts = ChainAmounts()
|
|
1186
|
+
|
|
1187
|
+
for chain_str, chain_config in self.chain_configs.items():
|
|
1188
|
+
fund_requirements = chain_config.chain_data.user_params.fund_requirements
|
|
1189
|
+
service_safe = chain_config.chain_data.multisig
|
|
1190
|
+
|
|
1191
|
+
if service_safe is None or service_safe == ZERO_ADDRESS:
|
|
1192
|
+
service_safe = SERVICE_SAFE_PLACEHOLDER
|
|
1193
|
+
|
|
1194
|
+
chain_amounts = amounts.setdefault(chain_str, {})
|
|
1195
|
+
for asset, req in fund_requirements.items():
|
|
1196
|
+
chain_amounts.setdefault(service_safe, {})[asset] = req.safe
|
|
1197
|
+
for agent_address in self.agent_addresses:
|
|
1198
|
+
chain_amounts.setdefault(agent_address, {})[asset] = req.agent
|
|
1199
|
+
|
|
1200
|
+
return amounts
|
|
1201
|
+
|
|
1202
|
+
def get_balances(self) -> ChainAmounts:
|
|
1203
|
+
"""Get balances of the agent addresses and service safe."""
|
|
1204
|
+
initial_funding_amounts = self.get_initial_funding_amounts()
|
|
1205
|
+
return ChainAmounts(
|
|
1206
|
+
{
|
|
1207
|
+
chain_str: {
|
|
1208
|
+
address: {
|
|
1209
|
+
asset: get_asset_balance(
|
|
1210
|
+
ledger_api=get_default_ledger_api(
|
|
1211
|
+
Chain.from_string(chain_str)
|
|
1212
|
+
),
|
|
1213
|
+
asset_address=asset,
|
|
1214
|
+
address=address,
|
|
1215
|
+
raise_on_invalid_address=False,
|
|
1216
|
+
)
|
|
1217
|
+
for asset in tokens
|
|
1218
|
+
}
|
|
1219
|
+
for address, tokens in addresses.items()
|
|
1220
|
+
}
|
|
1221
|
+
for chain_str, addresses in initial_funding_amounts.items()
|
|
1222
|
+
}
|
|
1223
|
+
)
|
|
1224
|
+
|
|
1225
|
+
def get_funding_requests(self) -> ChainAmounts:
|
|
1226
|
+
"""Get funding amounts requested by the agent."""
|
|
1227
|
+
agent_response = {}
|
|
1228
|
+
funding_requests = ChainAmounts()
|
|
1229
|
+
|
|
1230
|
+
if self.deployment.status != DeploymentStatus.DEPLOYED:
|
|
1231
|
+
return funding_requests
|
|
1232
|
+
|
|
1233
|
+
try:
|
|
1234
|
+
resp = requests.get(AGENT_FUNDS_STATUS_URL, timeout=10)
|
|
1235
|
+
resp.raise_for_status()
|
|
1236
|
+
agent_response = resp.json()
|
|
1237
|
+
except Exception as e: # pylint: disable=broad-except
|
|
1238
|
+
logger.warning(
|
|
1239
|
+
f"[FUNDING MANAGER] Cannot read url {AGENT_FUNDS_STATUS_URL}: {e}"
|
|
1240
|
+
)
|
|
1241
|
+
|
|
1242
|
+
for chain_str, addresses in agent_response.items():
|
|
1243
|
+
for address, assets in addresses.items():
|
|
1244
|
+
if chain_str not in self.chain_configs:
|
|
1245
|
+
raise ValueError(
|
|
1246
|
+
f"Service {self.service_config_id} asked funding for an unknown chain {chain_str}."
|
|
1247
|
+
)
|
|
1248
|
+
|
|
1249
|
+
if (
|
|
1250
|
+
address not in self.agent_addresses
|
|
1251
|
+
and address != self.chain_configs[chain_str].chain_data.multisig
|
|
1252
|
+
):
|
|
1253
|
+
raise ValueError(
|
|
1254
|
+
f"Service {self.service_config_id} asked funding for an unknown address {address} on chain {chain_str}."
|
|
1255
|
+
)
|
|
1256
|
+
|
|
1257
|
+
funding_requests.setdefault(chain_str, {})
|
|
1258
|
+
funding_requests[chain_str].setdefault(address, {})
|
|
1259
|
+
for asset, amounts in assets.items():
|
|
1260
|
+
try:
|
|
1261
|
+
funding_requests[chain_str][address][asset] = int(
|
|
1262
|
+
amounts["deficit"]
|
|
1263
|
+
)
|
|
1264
|
+
except (ValueError, TypeError):
|
|
1265
|
+
logger.warning(
|
|
1266
|
+
f"[FUNDING MANAGER] Invalid funding amount {amounts['deficit']} for asset {asset} on chain {chain_str} for address {address}. Setting to 0."
|
|
1267
|
+
)
|
|
1268
|
+
funding_requests[chain_str][address][asset] = 0
|
|
1269
|
+
|
|
1270
|
+
return funding_requests
|