eth-portfolio-temp 0.0.31.dev0__cp311-cp311-win_amd64.whl → 0.2.16__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eth-portfolio-temp might be problematic. Click here for more details.

Files changed (48) hide show
  1. eth_portfolio/_argspec.cp311-win_amd64.pyd +0 -0
  2. eth_portfolio/_cache.py +2 -2
  3. eth_portfolio/_config.cp311-win_amd64.pyd +0 -0
  4. eth_portfolio/_db/utils.py +7 -9
  5. eth_portfolio/_decimal.py +11 -10
  6. eth_portfolio/_ledgers/address.py +1 -1
  7. eth_portfolio/_loaders/_nonce.cp311-win_amd64.pyd +0 -0
  8. eth_portfolio/_loaders/_nonce.py +4 -4
  9. eth_portfolio/_loaders/balances.cp311-win_amd64.pyd +0 -0
  10. eth_portfolio/_loaders/token_transfer.py +1 -1
  11. eth_portfolio/_loaders/transaction.py +1 -1
  12. eth_portfolio/_loaders/utils.cp311-win_amd64.pyd +0 -0
  13. eth_portfolio/_loaders/utils.py +1 -1
  14. eth_portfolio/_shitcoins.cp311-win_amd64.pyd +0 -0
  15. eth_portfolio/_shitcoins.py +56 -0
  16. eth_portfolio/_stableish.cp311-win_amd64.pyd +0 -0
  17. eth_portfolio/_stableish.py +6 -0
  18. eth_portfolio/_utils.py +12 -10
  19. eth_portfolio/_ydb/token_transfers.py +32 -23
  20. eth_portfolio/address.py +2 -1
  21. eth_portfolio/buckets.py +19 -10
  22. eth_portfolio/constants.cp311-win_amd64.pyd +0 -0
  23. eth_portfolio/constants.py +20 -1
  24. eth_portfolio/portfolio.py +1 -1
  25. eth_portfolio/protocols/lending/liquity.py +1 -1
  26. eth_portfolio/protocols/lending/maker.py +13 -14
  27. eth_portfolio/structs/structs.py +2 -2
  28. eth_portfolio/typing/__init__.py +6 -6
  29. eth_portfolio__mypyc.cp311-win_amd64.pyd +0 -0
  30. eth_portfolio_scripts/_portfolio.py +54 -41
  31. eth_portfolio_scripts/_utils.py +20 -6
  32. eth_portfolio_scripts/balances.cp311-win_amd64.pyd +0 -0
  33. eth_portfolio_scripts/balances.py +7 -4
  34. eth_portfolio_scripts/docker/__init__.cp311-win_amd64.pyd +0 -0
  35. eth_portfolio_scripts/docker/check.cp311-win_amd64.pyd +0 -0
  36. eth_portfolio_scripts/docker/check.py +28 -17
  37. eth_portfolio_scripts/docker/docker-compose.yaml +2 -2
  38. eth_portfolio_scripts/docker/docker_compose.cp311-win_amd64.pyd +0 -0
  39. eth_portfolio_scripts/docker/docker_compose.py +36 -18
  40. eth_portfolio_scripts/main.py +6 -0
  41. eth_portfolio_scripts/victoria/__init__.py +3 -0
  42. {eth_portfolio_temp-0.0.31.dev0.dist-info → eth_portfolio_temp-0.2.16.dist-info}/METADATA +8 -7
  43. eth_portfolio_temp-0.2.16.dist-info/RECORD +83 -0
  44. {eth_portfolio_temp-0.0.31.dev0.dist-info → eth_portfolio_temp-0.2.16.dist-info}/top_level.txt +1 -1
  45. 295eace8438df6ec133b__mypyc.cp311-win_amd64.pyd +0 -0
  46. eth_portfolio_temp-0.0.31.dev0.dist-info/RECORD +0 -83
  47. {eth_portfolio_temp-0.0.31.dev0.dist-info → eth_portfolio_temp-0.2.16.dist-info}/WHEEL +0 -0
  48. {eth_portfolio_temp-0.0.31.dev0.dist-info → eth_portfolio_temp-0.2.16.dist-info}/entry_points.txt +0 -0
@@ -28,8 +28,19 @@ ETH_LIKE: Final = {
28
28
  "0x5e74C9036fb86BD7eCdcb084a0673EFc32eA31cb", # seth
29
29
  "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", # eth
30
30
  "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", # steth
31
+ "0x7f39C581F595B53c5cb19bD0b3f8dA6c935E2Ca0", # wstETH
31
32
  "0x9559Aaa82d9649C7A7b220E7c461d2E74c9a3593", # reth
32
33
  "0xE95A203B1a91a908F9B9CE46459d101078c2c3cb", # ankreth
34
+ "0x04C154b66CB340F3Ae24111CC767e0184Ed00Cc6", # pxETH
35
+ "0x856c4Efb76C1D1AE02e20CEB03A2A6a08b0b8dC3", # oETH
36
+ "0x0100546F2cD4C9D97f798fFC9755E47865FF7Ee6", # alETH
37
+ "0x1BED97CBC3c24A4fb5C069C6E311a967386131f7", # yETH
38
+ "0x24Ae2dA0f361AA4BE46b48EB19C91e02c5e4f27E", # mevETH
39
+ "0x5E8422345238F34275888049021821E8E08CAa1f", # frxETH
40
+ "0x821A278dFff762c76410264303F25bF42e195C0C", # pETH
41
+ "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", # cbETH
42
+ "0xCd5fE23C85820F7B72D0926FC9b05b43E359b7ee", # weETH
43
+ "0x7C07F7aBe10CE8e33DC6C5aD68FE033085256A84", # icETH
33
44
  },
34
45
  }.get(chain.id, set())
35
46
 
@@ -43,6 +54,8 @@ BTC_LIKE: Final = {
43
54
  "0x0316EB71485b0Ab14103307bf65a021042c6d380", # hbtc
44
55
  "0x5228a22e72ccC52d415EcFd199F99D0665E7733b", # pbtc
45
56
  "0x8dAEBADE922dF735c38C80C7eBD708Af50815fAa", # tbtc
57
+ "0x66eFF5221ca926636224650Fd3B9c497FF828F7D", # multiBTC
58
+ "0x657e8C867D8B37dCC18fA4Caead9C45EB088C642", # eBTC
46
59
  },
47
60
  }.get(chain.id, set())
48
61
 
@@ -53,11 +66,17 @@ INTL_STABLECOINS: Final = {
53
66
  "0xdB25f211AB05b1c97D595516F45794528a807ad8", # EURS
54
67
  "0x96E61422b6A9bA0e068B6c5ADd4fFaBC6a4aae27", # ibEUR
55
68
  "0x9fcf418B971134625CdF38448B949C8640971671", # EURN
56
- "0x269895a3dF4D73b077Fc823dD6dA1B95f72Aaf9B", # sKRW
69
+ "0x39b8B6385416f4cA36a20319F70D28621895279D", # EURe
70
+ "0x1aBaEA1f7C830bD89Acc67eC4af516284b1bC33c", # EURC
57
71
  "0x3F1B0278A9ee595635B61817630cC19DE792f506", # sAUD
72
+ "0xFAFdF0C4c1CB09d430Bf88c75D88BB46DAe09967", # ibAUD
58
73
  "0x97fe22E7341a0Cd8Db6F6C021A24Dc8f4DAD855F", # sGBP
74
+ "0x69681f8fde45345C3870BCD5eaf4A05a60E7D227", # ibGBP
59
75
  "0xF6b1C627e95BFc3c1b4c9B825a032Ff0fBf3e07d", # sJPY
76
+ "0x5555f75e3d5278082200Fb451D1b6bA946D8e13b", # ibJPY
60
77
  "0x0F83287FF768D1c1e17a42F44d644D7F22e8ee1d", # sCHF
78
+ "0x1CC481cE2BD2EC7Bf67d1Be64d4878b16078F309", # ibCHF
61
79
  "0x269895a3dF4D73b077Fc823dD6dA1B95f72Aaf9B", # sKRW
80
+ "0x95dFDC8161832e4fF7816aC4B6367CE201538253", # ibKRW
62
81
  },
63
82
  }.get(chain.id, set())
@@ -37,7 +37,7 @@ from eth_portfolio.typing import Addresses, PortfolioBalances
37
37
  logger = logging.getLogger(__name__)
38
38
 
39
39
 
40
- class PortfolioWallets(Iterable[PortfolioAddress], Dict[Address, PortfolioAddress]): # type: ignore [misc]
40
+ class PortfolioWallets(Iterable[PortfolioAddress], Dict[Address, PortfolioAddress]): # type: ignore [metaclass]
41
41
  """
42
42
  A container that holds all :class:`~eth_portfolio.address.PortfolioAddress` objects for a specific :class:`~eth_portfolio.Portfolio`.
43
43
 
@@ -1,6 +1,6 @@
1
1
  from typing import Optional
2
2
 
3
- from async_lru import alru_cache
3
+ from faster_async_lru import alru_cache
4
4
  from y import Contract, Network, get_price
5
5
  from y._decorators import stuck_coro_debugger
6
6
  from y.constants import EEE_ADDRESS
@@ -1,10 +1,12 @@
1
1
  from asyncio import gather
2
- from typing import List, Optional
2
+ from typing import Final, List, Optional
3
3
 
4
4
  from a_sync import igather
5
- from async_lru import alru_cache
5
+ from brownie import ZERO_ADDRESS
6
6
  from dank_mids.exceptions import Revert
7
7
  from eth_typing import HexStr
8
+ from faster_async_lru import alru_cache
9
+ from faster_eth_abi import encode
8
10
  from y import Contract, Network, contract_creation_block_async, get_price
9
11
  from y._decorators import stuck_coro_debugger
10
12
  from y.constants import dai
@@ -14,18 +16,15 @@ from eth_portfolio._utils import Decimal
14
16
  from eth_portfolio.protocols.lending._base import LendingProtocolWithLockedCollateral
15
17
  from eth_portfolio.typing import Balance, TokenBalances
16
18
 
17
- try:
18
- # this is only available in 4.0.0+
19
- from eth_abi import encode
20
19
 
21
- encode_bytes = lambda bytestring: encode(["bytes32"], [bytestring])
22
- except ImportError:
23
- from eth_abi import encode_single
20
+ yfi: Final = "0x0bc529c00C6401aEF6D220BE8C6Ea1667F6Ad93e"
21
+ dai: Contract
22
+ _1e18: Final = Decimal(10**18)
23
+ _1e45: Final = Decimal(10**45)
24
24
 
25
- encode_bytes = lambda bytestring: encode_single("bytes32", bytestring)
26
25
 
27
- yfi = "0x0bc529c00C6401aEF6D220BE8C6Ea1667F6Ad93e"
28
- dai: Contract
26
+ def encode_bytes(bytestring: str) -> bytes:
27
+ return encode(["bytes32"], [bytestring])
29
28
 
30
29
 
31
30
  class Maker(LendingProtocolWithLockedCollateral):
@@ -50,8 +49,8 @@ class Maker(LendingProtocolWithLockedCollateral):
50
49
 
51
50
  balances: TokenBalances = TokenBalances(block=block)
52
51
  for token, data in zip(gems, ink_data):
53
- if ink := data.dict()["ink"]:
54
- balance = ink / Decimal(10**18)
52
+ if token != ZERO_ADDRESS and (ink := data.dict()["ink"]):
53
+ balance = ink / _1e18
55
54
  value = round(balance * Decimal(await get_price(token, block, sync=False)), 18)
56
55
  balances[token] = Balance(balance, value, token=token, block=block)
57
56
  return balances
@@ -75,7 +74,7 @@ class Maker(LendingProtocolWithLockedCollateral):
75
74
  for urns, ilk_info in data:
76
75
  art = urns.dict()["art"]
77
76
  rate = ilk_info.dict()["rate"]
78
- debt = art * rate / Decimal(1e45)
77
+ debt = art * rate / _1e45
79
78
  balances[dai.address] += Balance(debt, debt, token=dai, block=block)
80
79
  return balances
81
80
 
@@ -80,7 +80,7 @@ class _LedgerEntryBase(DictStruct, kw_only=True, frozen=True, omit_defaults=True
80
80
  The USD value of the cryptocurrency transferred in the {cls_name}, if price is known.
81
81
  """
82
82
 
83
- def __init_subclass__(cls, **kwargs):
83
+ def __init_subclass__(cls, **kwargs: Any) -> None:
84
84
  super().__init_subclass__(**kwargs)
85
85
 
86
86
  # Replace {cls_name} in attribute-level docstrings
@@ -273,7 +273,7 @@ class _TransactionBase(
273
273
  return self.transaction.yParity
274
274
 
275
275
  @property
276
- def __db_primary_key__(self):
276
+ def __db_primary_key__(self) -> Dict[str, tuple[int, Address] | int]:
277
277
  return {"from_address": (chain.id, self.from_address), "nonce": self.nonce}
278
278
 
279
279
 
@@ -43,10 +43,10 @@ from typing import (
43
43
  )
44
44
 
45
45
  from checksum_dict import DefaultChecksumDict
46
- from eth_typing import BlockNumber
46
+ from eth_typing import BlockNumber, HexAddress
47
47
  from pandas import DataFrame, concat
48
48
  from typing_extensions import ParamSpec, Self
49
- from y import ERC20
49
+ from y import Contract, ERC20
50
50
  from y.datatypes import Address
51
51
 
52
52
  from eth_portfolio._decimal import Decimal
@@ -159,10 +159,10 @@ class TokenBalances(DefaultChecksumDict[Balance], _SummableNonNumericMixin): #
159
159
  raise
160
160
  self[token.address] += balance
161
161
 
162
- def __getitem__(self, key) -> Balance:
162
+ def __getitem__(self, key: HexAddress) -> Balance:
163
163
  return super().__getitem__(key) if key in self else Balance(token=key, block=self.block)
164
164
 
165
- def __setitem__(self, key, value):
165
+ def __setitem__(self, key: HexAddress, value: Balance) -> None:
166
166
  """
167
167
  Sets the balance for a given token address.
168
168
 
@@ -393,7 +393,7 @@ class RemoteTokenBalances(DefaultDict[ProtocolLabel, TokenBalances], _SummableNo
393
393
  )
394
394
  self[remote] += token_balances # type: ignore [has-type]
395
395
 
396
- def __setitem__(self, protocol: str, value: TokenBalances):
396
+ def __setitem__(self, protocol: str, value: TokenBalances) -> None:
397
397
  """
398
398
  Sets the token balances for a given protocol.
399
399
 
@@ -911,7 +911,7 @@ class PortfolioBalances(DefaultChecksumDict[WalletBalances], _SummableNonNumeric
911
911
  )
912
912
  self[wallet] += balances
913
913
 
914
- def __setitem__(self, key, value):
914
+ def __setitem__(self, key: HexAddress, value: WalletBalances) -> None:
915
915
  if not isinstance(value, WalletBalances):
916
916
  raise TypeError(
917
917
  f"value must be a `WalletBalances` object. You passed {value}"
Binary file
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  from datetime import datetime, timezone
2
3
  from logging import getLogger
3
4
  from math import floor
@@ -5,10 +6,11 @@ from typing import Awaitable, Callable, Final, Iterator, List, Optional, Tuple
5
6
 
6
7
  import a_sync
7
8
  import eth_retry
9
+ import y
8
10
  from a_sync.functools import cached_property_unsafe as cached_property
9
11
  from eth_typing import BlockNumber, ChecksumAddress
10
12
  from msgspec import ValidationError, json
11
- from y import ERC20, Network, NonStandardERC20, get_block_at_timestamp
13
+ from y import ERC20, Network, NonStandardERC20
12
14
  from y.constants import CHAINID
13
15
  from y.time import NoBlockFound
14
16
 
@@ -33,6 +35,19 @@ logger: Final = getLogger("eth_portfolio")
33
35
  log_debug: Final = logger.debug
34
36
  log_error: Final = logger.error
35
37
 
38
+ _block_at_timestamp_semaphore: Final = a_sync.Semaphore(
39
+ 50, name="eth-portfolio get_block_at_timestamp"
40
+ )
41
+
42
+
43
+ async def get_block_at_timestamp(dt: datetime) -> BlockNumber:
44
+ async with _block_at_timestamp_semaphore:
45
+ while True:
46
+ try:
47
+ return await y.get_block_at_timestamp(dt, sync=False)
48
+ except NoBlockFound:
49
+ await asyncio.sleep(10)
50
+
36
51
 
37
52
  class ExportablePortfolio(Portfolio):
38
53
  """Adds methods to export full portoflio data."""
@@ -40,8 +55,10 @@ class ExportablePortfolio(Portfolio):
40
55
  def __init__(
41
56
  self,
42
57
  addresses: Addresses,
58
+ *,
43
59
  start_block: int = 0,
44
60
  label: str = _DEFAULT_LABEL,
61
+ concurrency: int = 40,
45
62
  load_prices: bool = True,
46
63
  get_bucket: Callable[[ChecksumAddress], Awaitable[str]] = get_token_bucket,
47
64
  num_workers_transactions: int = 1000,
@@ -51,6 +68,7 @@ class ExportablePortfolio(Portfolio):
51
68
  addresses, start_block, label, load_prices, num_workers_transactions, asynchronous
52
69
  )
53
70
  self.get_bucket = get_bucket
71
+ self._semaphore = a_sync.Semaphore(concurrency)
54
72
 
55
73
  @cached_property
56
74
  def _data_queries(self) -> Tuple[str, str]:
@@ -71,53 +89,48 @@ class ExportablePortfolio(Portfolio):
71
89
  return True
72
90
  return False
73
91
 
74
- async def export_snapshot(self, dt: datetime):
92
+ async def export_snapshot(self, dt: datetime) -> None:
75
93
  log_debug("checking data at %s for %s", dt, self.label)
76
94
  try:
77
- if not await self.data_exists(dt, sync=False):
78
- while True:
79
- try:
80
- block = await get_block_at_timestamp(dt, sync=False)
81
- except NoBlockFound:
82
- pass
83
- else:
84
- break
85
- log_debug("block at %s: %s", dt, block)
86
- data = await self.get_data_for_export(block, dt, sync=False)
87
- await victoria.post_data(data)
95
+ if await self.data_exists(dt, sync=False):
96
+ return
97
+ block = await get_block_at_timestamp(dt)
98
+ log_debug("block at %s: %s", dt, block)
99
+ data = await self.get_data_for_export(block, dt, sync=False)
100
+ await victoria.post_data(data)
88
101
  except Exception as e:
89
102
  log_error("Error processing %s:", dt, exc_info=True)
90
103
 
91
- @a_sync.Semaphore(60)
92
104
  async def get_data_for_export(self, block: BlockNumber, ts: datetime) -> List[victoria.Metric]:
93
- print(f"exporting {ts} for {self.label}")
94
- start = datetime.now(tz=timezone.utc)
95
-
96
- metrics_to_export = []
97
- data: PortfolioBalances = await self.describe(block, sync=False)
98
-
99
- for wallet, wallet_data in dict.items(data):
100
- for section, section_data in wallet_data.items():
101
- if isinstance(section_data, TokenBalances):
102
- for token, bals in dict.items(section_data):
103
- metrics_to_export.extend(
104
- await self.__process_token(ts, section, wallet, token, bals)
105
- )
106
- elif isinstance(section_data, RemoteTokenBalances):
107
- if section == "external":
108
- section = "assets"
109
- for protocol, token_bals in section_data.items():
110
- for token, bals in dict.items(token_bals):
105
+ async with self._semaphore:
106
+ print(f"exporting {ts} for {self.label}")
107
+ start = datetime.now(tz=timezone.utc)
108
+
109
+ metrics_to_export = []
110
+ data: PortfolioBalances = await self.describe(block, sync=False)
111
+
112
+ for wallet, wallet_data in dict.items(data):
113
+ for section, section_data in wallet_data.items():
114
+ if isinstance(section_data, TokenBalances):
115
+ for token, bals in dict.items(section_data):
111
116
  metrics_to_export.extend(
112
- await self.__process_token(
113
- ts, section, wallet, token, bals, protocol=protocol
114
- )
117
+ await self.__process_token(ts, section, wallet, token, bals)
115
118
  )
116
- else:
117
- raise NotImplementedError()
119
+ elif isinstance(section_data, RemoteTokenBalances):
120
+ if section == "external":
121
+ section = "assets"
122
+ for protocol, token_bals in section_data.items():
123
+ for token, bals in dict.items(token_bals):
124
+ metrics_to_export.extend(
125
+ await self.__process_token(
126
+ ts, section, wallet, token, bals, protocol=protocol
127
+ )
128
+ )
129
+ else:
130
+ raise NotImplementedError()
118
131
 
119
- print(f"got data for {ts} in {datetime.now(tz=timezone.utc) - start}")
120
- return metrics_to_export
132
+ print(f"got data for {ts} in {datetime.now(tz=timezone.utc) - start}")
133
+ return metrics_to_export
121
134
 
122
135
  def __get_data_exists_coros(self, dt: datetime) -> Iterator[str]:
123
136
  for query in self._data_queries:
@@ -131,7 +144,7 @@ class ExportablePortfolio(Portfolio):
131
144
  token: ChecksumAddress,
132
145
  bal: Balance,
133
146
  protocol: Optional[str] = None,
134
- ):
147
+ ) -> Tuple[victoria.types.PrometheusItem, victoria.types.PrometheusItem]:
135
148
  # TODO wallet nicknames in grafana
136
149
  # wallet = KNOWN_ADDRESSES[wallet] if wallet in KNOWN_ADDRESSES else wallet
137
150
  if protocol is not None:
@@ -172,7 +185,7 @@ class ExportablePortfolio(Portfolio):
172
185
  )
173
186
 
174
187
 
175
- async def _get_symbol(token) -> str:
188
+ async def _get_symbol(token: str) -> str:
176
189
  if token == "ETH":
177
190
  return "ETH"
178
191
  try:
@@ -1,18 +1,18 @@
1
1
  import re
2
2
  from asyncio import Task, create_task, sleep
3
3
  from datetime import datetime, timedelta, timezone
4
- from typing import Any, AsyncGenerator, Dict, List, Optional
4
+ from typing import Any, AsyncGenerator, Dict, Final, List, Optional
5
5
 
6
6
  from brownie import chain
7
7
 
8
8
 
9
- def parse_timedelta(value: str) -> timedelta:
10
- regex = re.compile(r"(\d+)([dhms]?)")
11
- result = regex.findall(value)
9
+ timedelta_pattern: Final = re.compile(r"(\d+)([dhms]?)")
10
+
12
11
 
12
+ def parse_timedelta(value: str) -> timedelta:
13
13
  days, hours, minutes, seconds = 0, 0, 0, 0
14
14
 
15
- for val, unit in result:
15
+ for val, unit in timedelta_pattern.findall(value):
16
16
  val = int(val)
17
17
  if unit == "d":
18
18
  days = val
@@ -58,10 +58,24 @@ async def aiter_timestamps(
58
58
 
59
59
  timestamp = start
60
60
 
61
+ timestamps = []
61
62
  while timestamp <= datetime.now(tz=timezone.utc):
62
- yield timestamp
63
+ timestamps.append(timestamp)
63
64
  timestamp = timestamp + interval
64
65
 
66
+ # cycle between yielding earliest, latest, and middle from `timestamps` until complete
67
+ while timestamps:
68
+ # yield the earliest timestamp
69
+ yield timestamps.pop(0)
70
+ # yield the most recent timestamp if there is one
71
+ if timestamps:
72
+ yield timestamps.pop(-1)
73
+ # yield the most middle timestamp if there is one
74
+ if timestamps:
75
+ yield timestamps.pop(len(timestamps) // 2)
76
+
77
+ del timestamps
78
+
65
79
  while run_forever:
66
80
  while timestamp > datetime.now(tz=timezone.utc):
67
81
  await _get_waiter(timestamp)
@@ -26,13 +26,16 @@ async def export_balances(args: Namespace) -> None:
26
26
 
27
27
  interval = parse_timedelta(args.interval)
28
28
  portfolio = ExportablePortfolio(
29
- args.wallet, label=args.label, start_block=args.first_tx_block, load_prices=False
29
+ args.wallet,
30
+ label=args.label,
31
+ start_block=args.first_tx_block,
32
+ concurrency=args.concurrency,
33
+ load_prices=False,
30
34
  )
31
35
 
32
36
  if export_start_block := args.export_start_block or args.first_tx_block:
33
- start = datetime.fromtimestamp(
34
- await dank_mids.eth.get_block_timestamp(args.export_start_block), tz=timezone.utc
35
- )
37
+ start_ts = await dank_mids.eth.get_block_timestamp(export_start_block)
38
+ start = datetime.fromtimestamp(start_ts, tz=timezone.utc)
36
39
  else:
37
40
  start = None
38
41
 
@@ -1,5 +1,6 @@
1
1
  from functools import lru_cache
2
2
  from subprocess import CalledProcessError, check_output
3
+ from typing import List
3
4
 
4
5
 
5
6
  def check_docker() -> None:
@@ -9,48 +10,58 @@ def check_docker() -> None:
9
10
  Raises:
10
11
  RuntimeError: If docker is not installed.
11
12
  """
13
+ print(" 🔍 checking your computer for docker")
12
14
  try:
13
15
  check_output(["docker", "--version"])
14
- print("docker found!")
15
16
  except (CalledProcessError, FileNotFoundError):
16
- print("checking your computer for docker")
17
17
  raise RuntimeError(
18
18
  "Docker is not installed. You must install Docker before using dao-treasury."
19
19
  ) from None
20
+ else:
21
+ print(" ✔️ eth-portfolio found docker!")
20
22
 
21
23
 
22
- def check_docker_compose() -> None:
24
+ def check_docker_compose() -> List[str]:
23
25
  """
24
- Check that docker-compose is installed on the user's system.
26
+ Check that either `docker-compose` or `docker compose` is installed on the user's system.
27
+
28
+ Returns:
29
+ A valid compose command.
25
30
 
26
31
  Raises:
27
32
  RuntimeError: If docker-compose is not installed.
28
33
  """
29
- try:
30
- check_output(["docker-compose", "--version"])
31
- print("docker-compose found!")
32
- except (CalledProcessError, FileNotFoundError):
33
- print("checking your computer for docker-compose")
34
+ for cmd in ["docker-compose", "docker compose"]:
35
+ print(f" 🔍 checking your computer for {cmd}")
36
+
34
37
  try:
35
- check_output(["docker", "compose", "--version"])
36
- print("docker compose found!")
38
+ check_output([*cmd.split(" "), "--version"])
37
39
  except (CalledProcessError, FileNotFoundError):
38
- print("docker-compose not found, checking your computer for docker compose")
39
- raise RuntimeError(
40
- "Docker Compose is not installed. You must install Docker Compose before using dao-treasury."
41
- ) from None
40
+ print(f" ❌ {cmd} not found")
41
+ continue
42
+ else:
43
+ print(f" ✔️ eth-portfolio found {cmd}!")
44
+ return cmd.split(" ")
45
+
46
+ raise RuntimeError(
47
+ "Docker Compose is not installed. You must install Docker Compose before using dao-treasury."
48
+ ) from None
42
49
 
43
50
 
44
51
  @lru_cache(maxsize=None)
45
- def check_system() -> None:
52
+ def check_system() -> List[str]:
46
53
  """
47
54
  Check that docker and docker-compose is installed on the user's system.
48
55
 
56
+ Returns:
57
+ A valid compose command.
58
+
49
59
  Raises:
50
60
  RuntimeError: If docker-compose is not installed.
51
61
  """
62
+ print("eth-portfolio is checking for the required docker dependencies...")
52
63
  check_docker()
53
- check_docker_compose()
64
+ return check_docker_compose()
54
65
 
55
66
 
56
67
  __all__ = ["check_docker", "check_docker_compose", "check_system"]
@@ -3,7 +3,7 @@ networks:
3
3
 
4
4
  services:
5
5
  grafana:
6
- image: grafana/grafana:10.2.0
6
+ image: grafana/grafana:12.2.1
7
7
  ports:
8
8
  - 127.0.0.1:${GRAFANA_PORT:-3000}:3000
9
9
  environment:
@@ -47,7 +47,7 @@ services:
47
47
  restart: always
48
48
 
49
49
  victoria-metrics:
50
- image: victoriametrics/victoria-metrics:v1.81.1
50
+ image: victoriametrics/victoria-metrics:v1.129.1
51
51
  volumes:
52
52
  - ~/.eth-portfolio/data/victoria/:/victoria-metrics-data
53
53
  command:
@@ -3,7 +3,7 @@ from functools import wraps
3
3
  from importlib import resources
4
4
  from os import path
5
5
  from subprocess import CalledProcessError, check_output
6
- from typing import Callable, Final, Iterable, List, Tuple, TypeVar
6
+ from typing import Callable, Final, Iterable, List, Literal, Tuple, TypeVar
7
7
 
8
8
  from typing_extensions import ParamSpec
9
9
 
@@ -12,33 +12,33 @@ from eth_portfolio_scripts.docker.check import check_system
12
12
 
13
13
  logger: Final = logging.getLogger(__name__)
14
14
 
15
- compose_file: Final = str(
15
+ COMPOSE_FILE: Final = str(
16
16
  resources.files("eth_portfolio_scripts").joinpath("docker/docker-compose.yaml")
17
17
  )
18
18
 
19
19
 
20
20
  def up(*services: str) -> None:
21
+ """Build and start the specified docker-compose services."""
21
22
  build(*services)
22
- print("starting the infra containers...")
23
+ _print_notice("starting", services)
23
24
  _exec_command(["up", "-d", *services])
24
25
 
25
26
 
26
27
  def down() -> None:
28
+ """Stop all of eth-portfolio's docker-compose services."""
27
29
  _exec_command(["down"])
28
30
 
29
31
 
30
32
  def build(*services: str) -> None:
31
- print("building the grafana containers")
33
+ """Build the specified docker-compose services."""
34
+ _print_notice("building", services)
32
35
  _exec_command(["build", *services])
33
36
 
34
37
 
35
- def stop(container_name: str) -> None:
36
- """
37
- Stop the specified container if it is running.
38
- Defaults to stopping the 'renderer' container.
39
- """
40
- print(f"stopping the {container_name} container...")
41
- _exec_command(["stop", container_name])
38
+ def stop(*services: str) -> None:
39
+ """Stop the specified docker-compose services, if running."""
40
+ _print_notice("stopping", services)
41
+ _exec_command(["stop", *services])
42
42
 
43
43
 
44
44
  _P = ParamSpec("_P")
@@ -67,12 +67,30 @@ def ensure_containers(fn: Callable[_P, _T]) -> Callable[_P, _T]:
67
67
  return compose_wrap
68
68
 
69
69
 
70
- def _exec_command(command: List[str], *, compose_options: Tuple[str, ...] = ()) -> None:
71
- check_system()
70
+ def _print_notice(
71
+ doing: Literal["building", "starting", "stopping"],
72
+ services: Tuple[str, ...],
73
+ ) -> None:
74
+ if len(services) == 1:
75
+ container = services[0]
76
+ print(f"{doing} the {container} container")
77
+ elif len(services) == 2:
78
+ first, second = services
79
+ print(f"{doing} the {first} and {second} containers")
80
+ else:
81
+ *all_but_last, last = services
82
+ print(f"{doing} the {', '.join(all_but_last)}, and {last} containers")
83
+
84
+
85
+ def _exec_command(
86
+ command: List[str],
87
+ *,
88
+ compose_file: str = COMPOSE_FILE,
89
+ compose_options: Tuple[str, ...] = (),
90
+ ) -> None:
91
+ compose = check_system()
92
+ full_command = [*compose, *compose_options, "-f", compose_file, *command]
72
93
  try:
73
- check_output(["docker", "compose", *compose_options, "-f", compose_file, *command])
94
+ check_output(full_command)
74
95
  except (CalledProcessError, FileNotFoundError) as e:
75
- try:
76
- check_output(["docker-compose", *compose_options, "-f", compose_file, *command])
77
- except (CalledProcessError, FileNotFoundError) as _e:
78
- raise RuntimeError(f"Error occurred while running {' '.join(command)}: {_e}") from _e
96
+ raise RuntimeError(f"Error occurred while running `{' '.join(full_command)}`: {e}") from e
@@ -54,6 +54,12 @@ export_parser.add_argument(
54
54
  help="The time interval between datapoints. default: 6h",
55
55
  default="6h",
56
56
  )
57
+ export_parser.add_argument(
58
+ "--concurrency",
59
+ type=int,
60
+ help="The max number of historical blocks to export concurrently. default: 40",
61
+ default=40,
62
+ )
57
63
  export_parser.add_argument(
58
64
  "--first-tx-block",
59
65
  type=int,
@@ -68,3 +68,6 @@ async def post_data(metrics_to_export: List["Metric"]) -> None:
68
68
  def __set_session(sesh: ClientSession) -> None:
69
69
  global session
70
70
  session = sesh
71
+
72
+
73
+ __all__ = ["Metric", "get", "post_data"]