olas-operate-middleware 0.9.0__py3-none-any.whl → 0.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {olas_operate_middleware-0.9.0.dist-info → olas_operate_middleware-0.10.1.dist-info}/METADATA +1 -1
  2. {olas_operate_middleware-0.9.0.dist-info → olas_operate_middleware-0.10.1.dist-info}/RECORD +30 -30
  3. operate/bridge/bridge_manager.py +2 -3
  4. operate/bridge/providers/native_bridge_provider.py +1 -1
  5. operate/bridge/providers/provider.py +2 -3
  6. operate/bridge/providers/relay_provider.py +9 -1
  7. operate/cli.py +123 -43
  8. operate/constants.py +5 -0
  9. operate/keys.py +26 -14
  10. operate/ledger/profiles.py +1 -3
  11. operate/migration.py +288 -21
  12. operate/operate_types.py +9 -6
  13. operate/quickstart/analyse_logs.py +1 -4
  14. operate/quickstart/claim_staking_rewards.py +0 -3
  15. operate/quickstart/reset_configs.py +0 -3
  16. operate/quickstart/reset_password.py +0 -3
  17. operate/quickstart/reset_staking.py +2 -4
  18. operate/quickstart/run_service.py +3 -5
  19. operate/quickstart/stop_service.py +0 -3
  20. operate/quickstart/terminate_on_chain_service.py +0 -3
  21. operate/services/deployment_runner.py +170 -38
  22. operate/services/health_checker.py +3 -2
  23. operate/services/manage.py +90 -123
  24. operate/services/service.py +15 -225
  25. operate/utils/__init__.py +44 -0
  26. operate/utils/gnosis.py +22 -12
  27. operate/wallet/master.py +16 -20
  28. {olas_operate_middleware-0.9.0.dist-info → olas_operate_middleware-0.10.1.dist-info}/LICENSE +0 -0
  29. {olas_operate_middleware-0.9.0.dist-info → olas_operate_middleware-0.10.1.dist-info}/WHEEL +0 -0
  30. {olas_operate_middleware-0.9.0.dist-info → olas_operate_middleware-0.10.1.dist-info}/entry_points.txt +0 -0
@@ -63,8 +63,8 @@ from autonomy.deploy.generators.docker_compose.base import DockerComposeGenerato
63
63
  from autonomy.deploy.generators.kubernetes.base import KubernetesGenerator
64
64
  from docker import from_env
65
65
 
66
- from operate.constants import CONFIG_JSON, DEPLOYMENT_DIR, DEPLOYMENT_JSON, ZERO_ADDRESS
67
- from operate.keys import Keys
66
+ from operate.constants import CONFIG_JSON, DEPLOYMENT_DIR, DEPLOYMENT_JSON
67
+ from operate.keys import KeysManager
68
68
  from operate.operate_http.exceptions import NotAllowed
69
69
  from operate.operate_types import (
70
70
  Chain,
@@ -92,10 +92,10 @@ from operate.utils.ssl import create_ssl_certificate
92
92
  SAFE_CONTRACT_ADDRESS = "safe_contract_address"
93
93
  ALL_PARTICIPANTS = "all_participants"
94
94
  CONSENSUS_THRESHOLD = "consensus_threshold"
95
- SERVICE_CONFIG_VERSION = 7
95
+ SERVICE_CONFIG_VERSION = 8
96
96
  SERVICE_CONFIG_PREFIX = "sc-"
97
97
 
98
- NON_EXISTENT_MULTISIG = "0xm"
98
+ NON_EXISTENT_MULTISIG = None
99
99
  NON_EXISTENT_TOKEN = -1
100
100
 
101
101
  DEFAULT_TRADER_ENV_VARS = {
@@ -446,7 +446,7 @@ class Deployment(LocalResource):
446
446
  builder = ServiceBuilder.from_dir(
447
447
  path=service.package_absolute_path,
448
448
  keys_file=self.path / DEFAULT_KEYS_FILE,
449
- number_of_agents=len(service.keys),
449
+ number_of_agents=len(service.agent_addresses),
450
450
  )
451
451
  builder.deplopyment_type = KubernetesGenerator.deployment_type
452
452
  (
@@ -489,12 +489,8 @@ class Deployment(LocalResource):
489
489
  keys_file.write_text(
490
490
  json.dumps(
491
491
  [
492
- {
493
- "address": key.address,
494
- "private_key": key.private_key,
495
- "ledger": key.ledger.name.lower(),
496
- }
497
- for key in service.keys
492
+ KeysManager().get(address).json
493
+ for address in service.agent_addresses
498
494
  ],
499
495
  indent=4,
500
496
  ),
@@ -504,7 +500,7 @@ class Deployment(LocalResource):
504
500
  builder = ServiceBuilder.from_dir(
505
501
  path=service.package_absolute_path,
506
502
  keys_file=keys_file,
507
- number_of_agents=len(service.keys),
503
+ number_of_agents=len(service.agent_addresses),
508
504
  )
509
505
  builder.deplopyment_type = DockerComposeGenerator.deployment_type
510
506
  builder.try_update_abci_connection_params()
@@ -616,12 +612,8 @@ class Deployment(LocalResource):
616
612
  keys_file.write_text(
617
613
  json.dumps(
618
614
  [
619
- {
620
- "address": key.address,
621
- "private_key": key.private_key,
622
- "ledger": key.ledger.name.lower(),
623
- }
624
- for key in service.keys
615
+ KeysManager().get(address).json
616
+ for address in service.agent_addresses
625
617
  ],
626
618
  indent=4,
627
619
  ),
@@ -631,7 +623,7 @@ class Deployment(LocalResource):
631
623
  builder = ServiceBuilder.from_dir(
632
624
  path=service.package_absolute_path,
633
625
  keys_file=keys_file,
634
- number_of_agents=len(service.keys),
626
+ number_of_agents=len(service.agent_addresses),
635
627
  )
636
628
  builder.deplopyment_type = HostDeploymentGenerator.deployment_type
637
629
  builder.try_update_abci_connection_params()
@@ -779,7 +771,7 @@ class Service(LocalResource):
779
771
  service_config_id: str
780
772
  hash: str
781
773
  hash_history: t.Dict[int, str]
782
- keys: Keys
774
+ agent_addresses: t.List[str]
783
775
  home_chain: str
784
776
  chain_configs: ChainConfigs
785
777
  description: str
@@ -796,7 +788,7 @@ class Service(LocalResource):
796
788
  _file = CONFIG_JSON
797
789
 
798
790
  @staticmethod
799
- def _determine_agent_id(service_name: str) -> int:
791
+ def determine_agent_id(service_name: str) -> int:
800
792
  """Determine the appropriate agent ID based on service name."""
801
793
  service_name_lower = service_name.lower()
802
794
  if "mech" in service_name_lower:
@@ -807,208 +799,6 @@ class Service(LocalResource):
807
799
  return AGENT_TYPE_IDS["modius"]
808
800
  return AGENT_TYPE_IDS["trader"]
809
801
 
810
- @classmethod
811
- def migrate_format(cls, path: Path) -> bool: # pylint: disable=too-many-statements
812
- """Migrate the JSON file format if needed."""
813
-
814
- if not path.is_dir():
815
- return False
816
-
817
- if not path.name.startswith(SERVICE_CONFIG_PREFIX) and not path.name.startswith(
818
- "bafybei"
819
- ):
820
- return False
821
-
822
- if path.name.startswith("bafybei"):
823
- backup_name = f"backup_{int(time.time())}_{path.name}"
824
- backup_path = path.parent / backup_name
825
- shutil.copytree(path, backup_path)
826
- deployment_path = backup_path / "deployment"
827
- if deployment_path.is_dir():
828
- shutil.rmtree(deployment_path)
829
-
830
- with open(path / Service._file, "r", encoding="utf-8") as file:
831
- data = json.load(file)
832
-
833
- version = data.get("version", 0)
834
- if version > SERVICE_CONFIG_VERSION:
835
- raise RuntimeError(
836
- f"Service configuration in {path} has version {version}, which means it was created with a newer version of olas-operate-middleware. Only configuration versions <= {SERVICE_CONFIG_VERSION} are supported by this version of olas-operate-middleware."
837
- )
838
-
839
- # Complete missing env vars for trader
840
- if "trader" in data["name"].lower():
841
- data.setdefault("env_variables", {})
842
-
843
- for key, value in DEFAULT_TRADER_ENV_VARS.items():
844
- if key not in data["env_variables"]:
845
- data["env_variables"][key] = value
846
-
847
- with open(path / Service._file, "w", encoding="utf-8") as file:
848
- json.dump(data, file, indent=2)
849
-
850
- if version == SERVICE_CONFIG_VERSION:
851
- return False
852
-
853
- # Migration steps for older versions
854
- if version == 0:
855
- new_data = {
856
- "version": 2,
857
- "hash": data.get("hash"),
858
- "keys": data.get("keys"),
859
- "home_chain_id": "100", # This is the default value for version 2 - do not change, will be corrected below
860
- "chain_configs": {
861
- "100": { # This is the default value for version 2 - do not change, will be corrected below
862
- "ledger_config": {
863
- "rpc": data.get("ledger_config", {}).get("rpc"),
864
- "type": data.get("ledger_config", {}).get("type"),
865
- "chain": data.get("ledger_config", {}).get("chain"),
866
- },
867
- "chain_data": {
868
- "instances": data.get("chain_data", {}).get(
869
- "instances", []
870
- ),
871
- "token": data.get("chain_data", {}).get("token"),
872
- "multisig": data.get("chain_data", {}).get("multisig"),
873
- "staked": data.get("chain_data", {}).get("staked", False),
874
- "on_chain_state": data.get("chain_data", {}).get(
875
- "on_chain_state", 3
876
- ),
877
- "user_params": {
878
- "staking_program_id": "pearl_alpha",
879
- "nft": data.get("chain_data", {})
880
- .get("user_params", {})
881
- .get("nft"),
882
- "threshold": data.get("chain_data", {})
883
- .get("user_params", {})
884
- .get("threshold"),
885
- "use_staking": data.get("chain_data", {})
886
- .get("user_params", {})
887
- .get("use_staking"),
888
- "cost_of_bond": data.get("chain_data", {})
889
- .get("user_params", {})
890
- .get("cost_of_bond"),
891
- "fund_requirements": data.get("chain_data", {})
892
- .get("user_params", {})
893
- .get("fund_requirements", {}),
894
- "agent_id": data.get("chain_data", {})
895
- .get("user_params", {})
896
- .get("agent_id", "14"),
897
- },
898
- },
899
- }
900
- },
901
- "service_path": data.get("service_path", ""),
902
- "name": data.get("name", ""),
903
- }
904
- data = new_data
905
-
906
- if version < 4:
907
- # Add missing fields introduced in later versions, if necessary.
908
- for _, chain_data in data.get("chain_configs", {}).items():
909
- chain_data.setdefault("chain_data", {}).setdefault(
910
- "user_params", {}
911
- ).setdefault("use_mech_marketplace", False)
912
- service_name = data.get("name", "")
913
- agent_id = cls._determine_agent_id(service_name)
914
- chain_data.setdefault("chain_data", {}).setdefault("user_params", {})[
915
- "agent_id"
916
- ] = agent_id
917
-
918
- data["description"] = data.setdefault("description", data.get("name"))
919
- data["hash_history"] = data.setdefault(
920
- "hash_history", {int(time.time()): data["hash"]}
921
- )
922
-
923
- if "service_config_id" not in data:
924
- service_config_id = Service.get_new_service_config_id(path)
925
- new_path = path.parent / service_config_id
926
- data["service_config_id"] = service_config_id
927
- path = path.rename(new_path)
928
-
929
- old_to_new_ledgers = ["ethereum", "solana"]
930
- for key_data in data["keys"]:
931
- key_data["ledger"] = old_to_new_ledgers[key_data["ledger"]]
932
-
933
- old_to_new_chains = [
934
- "ethereum",
935
- "goerli",
936
- "gnosis",
937
- "solana",
938
- "optimism",
939
- "base",
940
- "mode",
941
- ]
942
- new_chain_configs = {}
943
- for chain_id, chain_data in data["chain_configs"].items():
944
- chain_data["ledger_config"]["chain"] = old_to_new_chains[
945
- chain_data["ledger_config"]["chain"]
946
- ]
947
- del chain_data["ledger_config"]["type"]
948
- new_chain_configs[Chain.from_id(int(chain_id)).value] = chain_data # type: ignore
949
-
950
- data["chain_configs"] = new_chain_configs
951
- data["home_chain"] = data.setdefault("home_chain", Chain.from_id(int(data.get("home_chain_id", "100"))).value) # type: ignore
952
- del data["home_chain_id"]
953
-
954
- if "env_variables" not in data:
955
- if data["name"] == "valory/trader_pearl":
956
- data["env_variables"] = DEFAULT_TRADER_ENV_VARS
957
- else:
958
- data["env_variables"] = {}
959
-
960
- if version < 5:
961
- new_chain_configs = {}
962
- for chain, chain_data in data["chain_configs"].items():
963
- fund_requirements = chain_data["chain_data"]["user_params"][
964
- "fund_requirements"
965
- ]
966
- if ZERO_ADDRESS not in fund_requirements:
967
- chain_data["chain_data"]["user_params"]["fund_requirements"] = {
968
- ZERO_ADDRESS: fund_requirements
969
- }
970
-
971
- new_chain_configs[chain] = chain_data # type: ignore
972
- data["chain_configs"] = new_chain_configs
973
-
974
- if version < 7:
975
- if data["home_chain"] == "optimistic":
976
- data["home_chain"] = Chain.OPTIMISM.value
977
-
978
- if "optimistic" in data["chain_configs"]:
979
- data["chain_configs"]["optimism"] = data["chain_configs"].pop(
980
- "optimistic"
981
- )
982
-
983
- for _, chain_config in data["chain_configs"].items():
984
- if chain_config["ledger_config"]["chain"] == "optimistic":
985
- chain_config["ledger_config"]["chain"] = Chain.OPTIMISM.value
986
-
987
- data["version"] = SERVICE_CONFIG_VERSION
988
-
989
- # Redownload service path
990
- if "service_path" in data:
991
- package_absolute_path = path / Path(data["service_path"]).name
992
- data.pop("service_path")
993
- else:
994
- package_absolute_path = path / data["package_path"]
995
-
996
- if package_absolute_path.exists() and package_absolute_path.is_dir():
997
- shutil.rmtree(package_absolute_path)
998
-
999
- package_absolute_path = Path(
1000
- IPFSTool().download(
1001
- hash_id=data["hash"],
1002
- target_dir=path,
1003
- )
1004
- )
1005
- data["package_path"] = str(package_absolute_path.name)
1006
-
1007
- with open(path / Service._file, "w", encoding="utf-8") as file:
1008
- json.dump(data, file, indent=2)
1009
-
1010
- return True
1011
-
1012
802
  @classmethod
1013
803
  def load(cls, path: Path) -> "Service":
1014
804
  """Load a service"""
@@ -1063,7 +853,7 @@ class Service(LocalResource):
1063
853
 
1064
854
  @staticmethod
1065
855
  def new( # pylint: disable=too-many-locals
1066
- keys: Keys,
856
+ agent_addresses: t.List[str],
1067
857
  service_template: ServiceTemplate,
1068
858
  storage: Path,
1069
859
  ) -> "Service":
@@ -1105,7 +895,7 @@ class Service(LocalResource):
1105
895
  name=service_template["name"],
1106
896
  description=service_template["description"],
1107
897
  hash=service_template["hash"],
1108
- keys=keys,
898
+ agent_addresses=agent_addresses,
1109
899
  home_chain=service_template["home_chain"],
1110
900
  hash_history={current_timestamp: service_template["hash"]},
1111
901
  chain_configs=chain_configs,
operate/utils/__init__.py CHANGED
@@ -19,10 +19,54 @@
19
19
 
20
20
  """Helper utilities."""
21
21
 
22
+ import functools
22
23
  import shutil
23
24
  import time
24
25
  import typing as t
25
26
  from pathlib import Path
27
+ from threading import Lock
28
+
29
+
30
+ class SingletonMeta(type):
31
+ """A metaclass for creating thread-safe singleton classes."""
32
+
33
+ _instances: t.Dict[t.Type, t.Any] = {}
34
+ _lock: Lock = Lock()
35
+ _class_locks: t.Dict[t.Type, Lock] = {}
36
+
37
+ def __new__(
38
+ cls, name: str, bases: t.Tuple[type, ...], dct: t.Dict[str, t.Any]
39
+ ) -> t.Type:
40
+ """Create a new class with thread-safe methods."""
41
+ # Wrap all callable methods (except special methods) with thread safety
42
+ for key, value in list(dct.items()):
43
+ if callable(value) and not key.startswith("__"):
44
+ dct[key] = cls._make_thread_safe(value)
45
+
46
+ new_class = super().__new__(cls, name, bases, dct)
47
+ cls._class_locks[new_class] = Lock()
48
+ return new_class
49
+
50
+ @staticmethod
51
+ def _make_thread_safe(func: t.Callable) -> t.Callable:
52
+ """Wrap a function to make it thread-safe."""
53
+
54
+ @functools.wraps(func)
55
+ def wrapper(self: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
56
+ class_lock = SingletonMeta._class_locks.get(type(self))
57
+ if class_lock:
58
+ with class_lock:
59
+ return func(self, *args, **kwargs)
60
+ return func(self, *args, **kwargs)
61
+
62
+ return wrapper
63
+
64
+ def __call__(cls, *args: t.Any, **kwargs: t.Any) -> t.Any:
65
+ """Override the __call__ method to control instance creation."""
66
+ with cls._lock:
67
+ if cls not in cls._instances:
68
+ cls._instances[cls] = super().__call__(*args, **kwargs)
69
+ return cls._instances[cls]
26
70
 
27
71
 
28
72
  def create_backup(path: Path) -> Path:
operate/utils/gnosis.py CHANGED
@@ -42,7 +42,7 @@ from operate.constants import (
42
42
  from operate.operate_types import Chain
43
43
 
44
44
 
45
- logger = setup_logger(name="operate.manager")
45
+ logger = setup_logger(name="operate.utils.gnosis")
46
46
  MAX_UINT256 = 2**256 - 1
47
47
  SENTINEL_OWNERS = "0x0000000000000000000000000000000000000001"
48
48
 
@@ -490,7 +490,7 @@ def drain_eoa(
490
490
  crypto: Crypto,
491
491
  withdrawal_address: str,
492
492
  chain_id: int,
493
- ) -> str:
493
+ ) -> t.Optional[str]:
494
494
  """Drain all the native tokens from the crypto wallet."""
495
495
  tx_helper = TxSettler(
496
496
  ledger_api=ledger_api,
@@ -516,7 +516,7 @@ def drain_eoa(
516
516
  )
517
517
  tx = ledger_api.update_with_gas_estimate(
518
518
  transaction=tx,
519
- raise_on_try=True,
519
+ raise_on_try=False,
520
520
  )
521
521
 
522
522
  chain_fee = tx["gas"] * tx["maxFeePerGas"]
@@ -530,7 +530,6 @@ def drain_eoa(
530
530
 
531
531
  tx["value"] = ledger_api.get_balance(crypto.address) - chain_fee
532
532
  if tx["value"] <= 0:
533
- logger.warning(f"No balance to drain from wallet: {crypto.address}")
534
533
  raise ChainInteractionError(
535
534
  f"No balance to drain from wallet: {crypto.address}"
536
535
  )
@@ -542,14 +541,25 @@ def drain_eoa(
542
541
  return tx
543
542
 
544
543
  setattr(tx_helper, "build", _build_tx) # noqa: B010
545
- tx_receipt = tx_helper.transact(
546
- method=lambda: {},
547
- contract="",
548
- kwargs={},
549
- dry_run=False,
550
- )
551
- tx_hash = tx_receipt.get("transactionHash", "").hex()
552
- return tx_hash
544
+ try:
545
+ tx_receipt = tx_helper.transact(
546
+ method=lambda: {},
547
+ contract="",
548
+ kwargs={},
549
+ dry_run=False,
550
+ )
551
+ except ChainInteractionError as e:
552
+ if "No balance to drain from wallet" in str(e):
553
+ logger.warning(f"Failed to drain wallet {crypto.address} with error: {e}.")
554
+ return None
555
+
556
+ raise e
557
+
558
+ tx_hash = tx_receipt.get("transactionHash", None)
559
+ if tx_hash is not None:
560
+ return tx_hash.hex()
561
+
562
+ return None
553
563
 
554
564
 
555
565
  def get_asset_balance(
operate/wallet/master.py CHANGED
@@ -29,7 +29,6 @@ from pathlib import Path
29
29
 
30
30
  from aea.crypto.base import Crypto, LedgerApi
31
31
  from aea.crypto.registries import make_ledger_api
32
- from aea.helpers.logging import setup_logger
33
32
  from aea_ledger_ethereum import DEFAULT_GAS_PRICE_STRATEGIES, EIP1559, GWEI, to_wei
34
33
  from aea_ledger_ethereum.ethereum import EthereumApi, EthereumCrypto
35
34
  from autonomy.chain.base import registry_contracts
@@ -516,6 +515,9 @@ class EthereumMasterWallet(MasterWallet):
516
515
  asset_address=asset,
517
516
  address=self.safes[chain] if from_safe else self.crypto.address,
518
517
  )
518
+ if balance <= 0:
519
+ continue
520
+
519
521
  self.transfer_asset(
520
522
  to=withdrawal_address,
521
523
  amount=balance,
@@ -808,6 +810,17 @@ class EthereumMasterWallet(MasterWallet):
808
810
  safes[chain] = address
809
811
  data["safes"] = safes
810
812
 
813
+ if "optimistic" in data.get("safes", {}):
814
+ data["safes"]["optimism"] = data["safes"].pop("optimistic")
815
+ migrated = True
816
+
817
+ if "optimistic" in data.get("safe_chains"):
818
+ data["safe_chains"] = [
819
+ "optimism" if chain == "optimistic" else chain
820
+ for chain in data["safe_chains"]
821
+ ]
822
+ migrated = True
823
+
811
824
  with open(wallet_path, "w", encoding="utf-8") as file:
812
825
  json.dump(data, file, indent=2)
813
826
 
@@ -825,13 +838,13 @@ class MasterWalletManager:
825
838
  def __init__(
826
839
  self,
827
840
  path: Path,
841
+ logger: logging.Logger,
828
842
  password: t.Optional[str] = None,
829
- logger: t.Optional[logging.Logger] = None,
830
843
  ) -> None:
831
844
  """Initialize master wallet manager."""
832
845
  self.path = path
846
+ self.logger = logger
833
847
  self._password = password
834
- self.logger = logger or setup_logger(name="operate.master_wallet_manager")
835
848
 
836
849
  @property
837
850
  def json(self) -> t.List[t.Dict]:
@@ -927,20 +940,3 @@ class MasterWalletManager:
927
940
  if not self.exists(ledger_type=ledger_type):
928
941
  continue
929
942
  yield LEDGER_TYPE_TO_WALLET_CLASS[ledger_type].load(path=self.path)
930
-
931
- def migrate_wallet_configs(self) -> None:
932
- """Migrate old wallet config formats to new ones, if applies."""
933
-
934
- print(self.path)
935
-
936
- for ledger_type in LedgerType:
937
- if not self.exists(ledger_type=ledger_type):
938
- continue
939
-
940
- wallet_class = LEDGER_TYPE_TO_WALLET_CLASS.get(ledger_type)
941
- if wallet_class is None:
942
- continue
943
-
944
- migrated = wallet_class.migrate_format(path=self.path)
945
- if migrated:
946
- self.logger.info(f"Wallet {wallet_class} has been migrated.")