eth-portfolio 0.5.4__cp310-cp310-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eth-portfolio might be problematic. Click here for more details.

Files changed (83) hide show
  1. eth_portfolio/__init__.py +25 -0
  2. eth_portfolio/_argspec.cpython-310-darwin.so +0 -0
  3. eth_portfolio/_argspec.py +42 -0
  4. eth_portfolio/_cache.py +121 -0
  5. eth_portfolio/_config.cpython-310-darwin.so +0 -0
  6. eth_portfolio/_config.py +4 -0
  7. eth_portfolio/_db/__init__.py +0 -0
  8. eth_portfolio/_db/decorators.py +148 -0
  9. eth_portfolio/_db/entities.py +311 -0
  10. eth_portfolio/_db/utils.py +610 -0
  11. eth_portfolio/_decimal.py +156 -0
  12. eth_portfolio/_decorators.py +84 -0
  13. eth_portfolio/_exceptions.py +67 -0
  14. eth_portfolio/_ledgers/__init__.py +0 -0
  15. eth_portfolio/_ledgers/address.py +925 -0
  16. eth_portfolio/_ledgers/portfolio.py +328 -0
  17. eth_portfolio/_loaders/__init__.py +33 -0
  18. eth_portfolio/_loaders/_nonce.cpython-310-darwin.so +0 -0
  19. eth_portfolio/_loaders/_nonce.py +196 -0
  20. eth_portfolio/_loaders/balances.cpython-310-darwin.so +0 -0
  21. eth_portfolio/_loaders/balances.py +94 -0
  22. eth_portfolio/_loaders/token_transfer.py +217 -0
  23. eth_portfolio/_loaders/transaction.py +241 -0
  24. eth_portfolio/_loaders/utils.cpython-310-darwin.so +0 -0
  25. eth_portfolio/_loaders/utils.py +68 -0
  26. eth_portfolio/_shitcoins.cpython-310-darwin.so +0 -0
  27. eth_portfolio/_shitcoins.py +342 -0
  28. eth_portfolio/_stableish.cpython-310-darwin.so +0 -0
  29. eth_portfolio/_stableish.py +42 -0
  30. eth_portfolio/_submodules.py +73 -0
  31. eth_portfolio/_utils.py +225 -0
  32. eth_portfolio/_ydb/__init__.py +0 -0
  33. eth_portfolio/_ydb/token_transfers.py +146 -0
  34. eth_portfolio/address.py +397 -0
  35. eth_portfolio/buckets.py +212 -0
  36. eth_portfolio/constants.cpython-310-darwin.so +0 -0
  37. eth_portfolio/constants.py +87 -0
  38. eth_portfolio/portfolio.py +661 -0
  39. eth_portfolio/protocols/__init__.py +65 -0
  40. eth_portfolio/protocols/_base.py +107 -0
  41. eth_portfolio/protocols/convex.py +17 -0
  42. eth_portfolio/protocols/dsr.py +51 -0
  43. eth_portfolio/protocols/lending/README.md +6 -0
  44. eth_portfolio/protocols/lending/__init__.py +50 -0
  45. eth_portfolio/protocols/lending/_base.py +57 -0
  46. eth_portfolio/protocols/lending/compound.py +187 -0
  47. eth_portfolio/protocols/lending/liquity.py +110 -0
  48. eth_portfolio/protocols/lending/maker.py +111 -0
  49. eth_portfolio/protocols/lending/unit.py +46 -0
  50. eth_portfolio/protocols/liquity.py +16 -0
  51. eth_portfolio/py.typed +0 -0
  52. eth_portfolio/structs/__init__.py +43 -0
  53. eth_portfolio/structs/modified.py +69 -0
  54. eth_portfolio/structs/structs.py +626 -0
  55. eth_portfolio/typing/__init__.py +1419 -0
  56. eth_portfolio/typing/balance/single.py +176 -0
  57. eth_portfolio-0.5.4.dist-info/METADATA +26 -0
  58. eth_portfolio-0.5.4.dist-info/RECORD +83 -0
  59. eth_portfolio-0.5.4.dist-info/WHEEL +6 -0
  60. eth_portfolio-0.5.4.dist-info/entry_points.txt +2 -0
  61. eth_portfolio-0.5.4.dist-info/top_level.txt +3 -0
  62. eth_portfolio__mypyc.cpython-310-darwin.so +0 -0
  63. eth_portfolio_scripts/__init__.py +20 -0
  64. eth_portfolio_scripts/_args.py +26 -0
  65. eth_portfolio_scripts/_logging.py +15 -0
  66. eth_portfolio_scripts/_portfolio.py +209 -0
  67. eth_portfolio_scripts/_utils.py +106 -0
  68. eth_portfolio_scripts/balances.cpython-310-darwin.so +0 -0
  69. eth_portfolio_scripts/balances.py +57 -0
  70. eth_portfolio_scripts/docker/.grafana/dashboards/Portfolio/Balances.json +1962 -0
  71. eth_portfolio_scripts/docker/.grafana/dashboards/dashboards.yaml +10 -0
  72. eth_portfolio_scripts/docker/.grafana/datasources/datasources.yml +11 -0
  73. eth_portfolio_scripts/docker/__init__.cpython-310-darwin.so +0 -0
  74. eth_portfolio_scripts/docker/__init__.py +16 -0
  75. eth_portfolio_scripts/docker/check.cpython-310-darwin.so +0 -0
  76. eth_portfolio_scripts/docker/check.py +67 -0
  77. eth_portfolio_scripts/docker/docker-compose.yaml +61 -0
  78. eth_portfolio_scripts/docker/docker_compose.cpython-310-darwin.so +0 -0
  79. eth_portfolio_scripts/docker/docker_compose.py +100 -0
  80. eth_portfolio_scripts/main.py +119 -0
  81. eth_portfolio_scripts/py.typed +1 -0
  82. eth_portfolio_scripts/victoria/__init__.py +73 -0
  83. eth_portfolio_scripts/victoria/types.py +38 -0
@@ -0,0 +1,10 @@
1
+ apiVersion: 1
2
+
3
+ providers:
4
+ - name: 'Portfolio dashboards'
5
+ folder: 'Portfolio'
6
+ type: file
7
+ disableDeletion: true
8
+ editable: false
9
+ options:
10
+ path: /etc/grafana/provisioning/dashboards/Portfolio
@@ -0,0 +1,11 @@
1
+ apiVersion: 1
2
+
3
+ datasources:
4
+ - name: 'PROMETHEUS'
5
+ type: 'prometheus'
6
+ access: 'proxy'
7
+ org_id: 1
8
+ url: 'http://victoria-metrics:8428'
9
+ is_default: true
10
+ version: 1
11
+ editable: false
@@ -0,0 +1,16 @@
1
+ from eth_portfolio_scripts.docker.check import check_docker, check_docker_compose, check_system
2
+ from eth_portfolio_scripts.docker.docker_compose import build, down, ensure_containers, up, stop
3
+
4
+ __all__ = [
5
+ # commands
6
+ "build",
7
+ "up",
8
+ "down",
9
+ "stop",
10
+ # decorators
11
+ "ensure_containers",
12
+ # checks
13
+ "check_docker",
14
+ "check_docker_compose",
15
+ "check_system",
16
+ ]
@@ -0,0 +1,67 @@
1
+ from functools import lru_cache
2
+ from subprocess import CalledProcessError, check_output
3
+ from typing import List
4
+
5
+
6
+ def check_docker() -> None:
7
+ """
8
+ Check that docker is installed on the user's system.
9
+
10
+ Raises:
11
+ RuntimeError: If docker is not installed.
12
+ """
13
+ print(" 🔍 checking your computer for docker")
14
+ try:
15
+ check_output(["docker", "--version"])
16
+ except (CalledProcessError, FileNotFoundError):
17
+ raise RuntimeError(
18
+ "Docker is not installed. You must install Docker before using dao-treasury."
19
+ ) from None
20
+ else:
21
+ print(" ✔️ eth-portfolio found docker!")
22
+
23
+
24
+ def check_docker_compose() -> list[str]:
25
+ """
26
+ Check that either `docker-compose` or `docker compose` is installed on the user's system.
27
+
28
+ Returns:
29
+ A valid compose command.
30
+
31
+ Raises:
32
+ RuntimeError: If docker-compose is not installed.
33
+ """
34
+ for cmd in ["docker-compose", "docker compose"]:
35
+ print(f" 🔍 checking your computer for {cmd}")
36
+
37
+ try:
38
+ check_output([*cmd.split(" "), "--version"])
39
+ except (CalledProcessError, FileNotFoundError):
40
+ print(f" ❌ {cmd} not found")
41
+ continue
42
+ else:
43
+ print(f" ✔️ eth-portfolio found {cmd}!")
44
+ return cmd.split(" ")
45
+
46
+ raise RuntimeError(
47
+ "Docker Compose is not installed. You must install Docker Compose before using dao-treasury."
48
+ ) from None
49
+
50
+
51
+ @lru_cache(maxsize=None)
52
+ def check_system() -> list[str]:
53
+ """
54
+ Check that docker and docker-compose is installed on the user's system.
55
+
56
+ Returns:
57
+ A valid compose command.
58
+
59
+ Raises:
60
+ RuntimeError: If docker-compose is not installed.
61
+ """
62
+ print("eth-portfolio is checking for the required docker dependencies...")
63
+ check_docker()
64
+ return check_docker_compose()
65
+
66
+
67
+ __all__ = ["check_docker", "check_docker_compose", "check_system"]
@@ -0,0 +1,61 @@
1
+ networks:
2
+ eth_portfolio:
3
+
4
+ services:
5
+ grafana:
6
+ image: grafana/grafana:12.3.1
7
+ ports:
8
+ - 127.0.0.1:${GRAFANA_PORT:-3000}:3000
9
+ environment:
10
+ - GF_SECURITY_ADMIN_USER=${GF_SECURITY_ADMIN_USER:-admin}
11
+ - GF_SECURITY_ADMIN_PASSWORD=${GF_SECURITY_ADMIN_PASSWORD:-admin}
12
+ - GF_AUTH_ANONYMOUS_ENABLED=true
13
+ - GF_DASHBOARDS_DEFAULT_HOME_DASHBOARD_PATH=/etc/grafana/provisioning/dashboards/portfolio/Balances.json
14
+ - GF_SERVER_ROOT_URL
15
+ - GF_RENDERING_SERVER_URL=http://renderer:8091/render
16
+ - GF_RENDERING_CALLBACK_URL=http://grafana:3000/
17
+ - GF_LOG_FILTERS=rendering:debug
18
+ #- GF_INSTALL_PLUGINS=volkovlabs-variable-panel
19
+ #command: >
20
+ # sh -c "grafana-cli plugins install frser-sqlite-datasource && /run.sh"
21
+ volumes:
22
+ - ./.grafana/:/etc/grafana/provisioning/
23
+ networks:
24
+ - eth_portfolio
25
+ restart: always
26
+
27
+ renderer:
28
+ platform: linux/amd64
29
+ image: grafana/grafana-image-renderer:latest
30
+ ports:
31
+ - 127.0.0.1:${RENDERER_PORT:-8091}:8091
32
+ environment:
33
+ - ENABLE_METRICS=true
34
+ - HTTP_PORT=8091
35
+ networks:
36
+ - eth_portfolio
37
+ restart: always
38
+
39
+ vmagent:
40
+ image: victoriametrics/vmagent:heads-public-single-node-0-g52eb9c99e
41
+ command:
42
+ - "-remoteWrite.url=http://victoria-metrics:8428/api/v1/write"
43
+ depends_on:
44
+ - victoria-metrics
45
+ networks:
46
+ - eth_portfolio
47
+ restart: always
48
+
49
+ victoria-metrics:
50
+ image: victoriametrics/victoria-metrics:v1.132.0
51
+ volumes:
52
+ - ~/.eth-portfolio/data/victoria/:/victoria-metrics-data
53
+ command:
54
+ - '-memory.allowedBytes=3GB'
55
+ - "-retentionPeriod=10y"
56
+ - "-search.disableAutoCacheReset=true"
57
+ ports:
58
+ - 127.0.0.1:${VICTORIA_PORT:-8428}:8428
59
+ networks:
60
+ - eth_portfolio
61
+ restart: always
@@ -0,0 +1,100 @@
1
+ import logging
2
+ from functools import wraps
3
+ from importlib import resources
4
+ from os import path
5
+ from subprocess import CalledProcessError, check_output
6
+ from typing import Final, List, Literal, Tuple, TypeVar
7
+ from collections.abc import Callable
8
+ from collections.abc import Iterable
9
+
10
+ from typing_extensions import ParamSpec
11
+
12
+ from eth_portfolio_scripts.docker.check import check_system
13
+
14
+
15
+ logger: Final = logging.getLogger(__name__)
16
+
17
+ COMPOSE_FILE: Final = str(
18
+ resources.files("eth_portfolio_scripts").joinpath("docker/docker-compose.yaml")
19
+ )
20
+
21
+
22
+ def up(*services: str) -> None:
23
+ """Build and start the specified docker-compose services."""
24
+ build(*services)
25
+ _print_notice("starting", services)
26
+ _exec_command(["up", "-d", *services])
27
+
28
+
29
+ def down() -> None:
30
+ """Stop all of eth-portfolio's docker-compose services."""
31
+ _exec_command(["down"])
32
+
33
+
34
+ def build(*services: str) -> None:
35
+ """Build the specified docker-compose services."""
36
+ _print_notice("building", services)
37
+ _exec_command(["build", *services])
38
+
39
+
40
+ def stop(*services: str) -> None:
41
+ """Stop the specified docker-compose services, if running."""
42
+ _print_notice("stopping", services)
43
+ _exec_command(["stop", *services])
44
+
45
+
46
+ _P = ParamSpec("_P")
47
+ _T = TypeVar("_T")
48
+
49
+
50
+ def ensure_containers(fn: Callable[_P, _T]) -> Callable[_P, _T]:
51
+ @wraps(fn)
52
+ async def compose_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:
53
+ # register shutdown sequence
54
+ # TODO: argument to leave them up
55
+ # NOTE: do we need both this and the finally?
56
+ # signal.signal(signal.SIGINT, down)
57
+
58
+ # start Grafana containers
59
+ up()
60
+
61
+ try:
62
+ # attempt to run `fn`
63
+ return await fn(*args, **kwargs)
64
+ finally:
65
+ # stop and remove containers
66
+ # down()
67
+ pass
68
+
69
+ return compose_wrap
70
+
71
+
72
+ def _print_notice(
73
+ doing: Literal["building", "starting", "stopping"],
74
+ services: tuple[str, ...],
75
+ ) -> None:
76
+ if len(services) == 0:
77
+ print(f"{doing} the backend containers")
78
+ elif len(services) == 1:
79
+ container = services[0]
80
+ print(f"{doing} the {container} container")
81
+ elif len(services) == 2:
82
+ first, second = services
83
+ print(f"{doing} the {first} and {second} containers")
84
+ else:
85
+ *all_but_last, last = services
86
+ print(f"{doing} the {', '.join(all_but_last)}, and {last} containers")
87
+
88
+
89
+ def _exec_command(
90
+ command: list[str],
91
+ *,
92
+ compose_file: str = COMPOSE_FILE,
93
+ compose_options: tuple[str, ...] = (),
94
+ ) -> None:
95
+ compose = check_system()
96
+ full_command = [*compose, *compose_options, "-f", compose_file, *command]
97
+ try:
98
+ check_output(full_command)
99
+ except (CalledProcessError, FileNotFoundError) as e:
100
+ raise RuntimeError(f"Error occurred while running `{' '.join(full_command)}`: {e}") from e
@@ -0,0 +1,119 @@
1
+ import asyncio
2
+ from argparse import ArgumentParser
3
+ from os import environ
4
+
5
+ import brownie
6
+
7
+ from eth_portfolio_scripts import docker, logger
8
+ from eth_portfolio_scripts._args import add_infra_port_args
9
+ from eth_portfolio_scripts.balances import export_balances
10
+
11
+
12
+ parser = ArgumentParser(description="eth-portfolio")
13
+
14
+ subparsers = parser.add_subparsers(title="Commands", dest="command", required=True)
15
+
16
+ export_parser = subparsers.add_parser("export", help="Export a specific dataset for your portfolio")
17
+ export_parser.add_argument("target", help="Choose an exporter to run")
18
+ add_infra_port_args(export_parser)
19
+ export_parser.set_defaults(func=export_balances)
20
+
21
+ infra_parser = subparsers.add_parser(
22
+ "infra", help="Start the docker containers required to run any eth-portfolio service"
23
+ )
24
+ infra_parser.add_argument("cmd", help="What do you want to do?")
25
+ add_infra_port_args(infra_parser)
26
+ infra_parser.add_argument(
27
+ "--start-renderer",
28
+ action="store_true",
29
+ help="If set, starts the renderer container in addition to the default containers. By default, only grafana, victoria-metrics, and vmagent are started.",
30
+ )
31
+
32
+ export_parser.add_argument(
33
+ "--wallet",
34
+ type=str,
35
+ help="The address of a wallet to export. You can pass multiple, ie. `--wallet 0x123 0x234 0x345`",
36
+ required=True,
37
+ nargs="+",
38
+ )
39
+ export_parser.add_argument(
40
+ "--network",
41
+ type=str,
42
+ help="The brownie network identifier for the rpc you wish to use. default: mainnet",
43
+ default="mainnet",
44
+ )
45
+ export_parser.add_argument(
46
+ "--label",
47
+ type=str,
48
+ help='The label for this portfolio, if you want one. Defaults to "MyPortfolio".',
49
+ default="My Portfolio",
50
+ )
51
+ export_parser.add_argument(
52
+ "--interval",
53
+ type=str,
54
+ help="The time interval between datapoints. default: 6h",
55
+ default="6h",
56
+ )
57
+ export_parser.add_argument(
58
+ "--concurrency",
59
+ type=int,
60
+ help="The max number of historical blocks to export concurrently. default: 30",
61
+ default=30,
62
+ )
63
+ export_parser.add_argument(
64
+ "--first-tx-block",
65
+ type=int,
66
+ help=(
67
+ "The block of your portfolio's first transaction, if known. "
68
+ "This value, if provided, allows us to speed up processing of your data by limiting the block range we need to query. "
69
+ "If not provided, the whole blockchain will be scanned."
70
+ ),
71
+ default=0,
72
+ )
73
+ export_parser.add_argument(
74
+ "--export-start-block",
75
+ type=int,
76
+ help="The first block in the range you wish to export.",
77
+ default=0,
78
+ )
79
+ export_parser.add_argument(
80
+ "--daemon",
81
+ action="store_true",
82
+ help="TODO: If True, starts a daemon process instead of running in your terminal. Not currently supported.",
83
+ )
84
+
85
+ args = parser.parse_args()
86
+
87
+ if hasattr(args, "network"):
88
+ environ["BROWNIE_NETWORK_ID"] = args.network
89
+
90
+ environ["GRAFANA_PORT"] = str(args.grafana_port)
91
+ environ["RENDERER_PORT"] = str(args.renderer_port)
92
+ environ["VICTORIA_PORT"] = str(args.victoria_port)
93
+
94
+
95
+ # TODO: run forever arg
96
+ def main():
97
+ command = args.command
98
+ if command == "infra":
99
+ if args.cmd == "start":
100
+ # Start the backend containers
101
+ if getattr(args, "start_renderer", False):
102
+ docker.up()
103
+ else:
104
+ docker.up("grafana", "victoria-metrics", "vmagent")
105
+ elif args.cmd == "stop":
106
+ docker.down()
107
+ else:
108
+ raise ValueError(f"{args.target} is not a valid command")
109
+
110
+ else:
111
+ # The user's command is `export`
112
+ if args.target == "balances":
113
+ asyncio.run(args.func(args))
114
+ else:
115
+ raise ValueError(f"{args.target} is not a valid command")
116
+
117
+
118
+ if __name__ == "__main__":
119
+ brownie.project.run(__file__)
@@ -0,0 +1 @@
1
+
@@ -0,0 +1,73 @@
1
+ from gzip import compress
2
+ from logging import getLogger
3
+ from os import environ
4
+ from typing import Final, List
5
+
6
+ import a_sync
7
+ from aiohttp import ClientError, ClientSession, ServerDisconnectedError
8
+ from msgspec import json
9
+
10
+ from eth_portfolio_scripts.victoria import types
11
+ from eth_portfolio_scripts.victoria.types import Metric
12
+
13
+
14
+ BASE_URL: Final = environ.get("VM_URL", "http://127.0.0.1:8430")
15
+
16
+ # this will be populated later
17
+ session: ClientSession = None # type: ignore [assignment]
18
+
19
+ logger: Final = getLogger("eth_portfolio.victoria")
20
+
21
+ encode: Final = json.encode
22
+ decode: Final = json.decode
23
+
24
+
25
+ class VictoriaMetricsError(ValueError): ...
26
+
27
+
28
+ def get_session() -> ClientSession:
29
+ sesh = session
30
+ if sesh is None:
31
+ sesh = ClientSession(BASE_URL, raise_for_status=True)
32
+ __set_session(sesh)
33
+ return sesh
34
+
35
+
36
+ async def get(url: str) -> bytes:
37
+ session = get_session()
38
+ while True:
39
+ try:
40
+ async with session.get(url=url, headers={"Connection": "close"}) as response:
41
+ return await response.read()
42
+ except ServerDisconnectedError:
43
+ continue
44
+
45
+
46
+ @a_sync.Semaphore(2)
47
+ async def post_data(metrics_to_export: list["Metric"]) -> None:
48
+ """Post all metrics at once."""
49
+ data = compress(b"\n".join(encode(metric) for metric in metrics_to_export))
50
+ attempts = 0
51
+ session = get_session()
52
+ while True:
53
+ try:
54
+ async with session.post(
55
+ "/api/v1/import",
56
+ headers={"Connection": "close", "Content-Encoding": "gzip"},
57
+ data=data,
58
+ ):
59
+ logger.debug(f"posted {len(data)} datas")
60
+ return
61
+ except ClientError as e:
62
+ attempts += 1
63
+ logger.debug("You had a ClientError: %s", e)
64
+ if attempts >= 10:
65
+ raise e
66
+
67
+
68
+ def __set_session(sesh: ClientSession) -> None:
69
+ global session
70
+ session = sesh
71
+
72
+
73
+ __all__ = ["Metric", "get", "post_data"]
@@ -0,0 +1,38 @@
1
+ """
2
+ These 2 helpers enable us to decode only the relevant data in the json response and discard the rest
3
+ They're in a separate file so mypyc doesn't try to compile them
4
+ """
5
+
6
+ from typing import List, Tuple, TypedDict, final
7
+
8
+ from eth_typing import ChecksumAddress
9
+ from msgspec import Raw, Struct
10
+
11
+
12
+ @final
13
+ class Metric(TypedDict):
14
+ param: str
15
+ wallet: ChecksumAddress
16
+ token_address: ChecksumAddress
17
+ token: str
18
+ bucket: str
19
+ network: str
20
+ __name__: str
21
+
22
+
23
+ @final
24
+ class PrometheusItem(TypedDict):
25
+ metric: Metric
26
+ values: list[float]
27
+ timestamps: list[float]
28
+
29
+
30
+ @final
31
+ class Data(Struct):
32
+ result: tuple[Raw, ...]
33
+
34
+
35
+ @final
36
+ class Response(Struct):
37
+ status: str
38
+ data: Data