eth-portfolio 0.5.8__cp311-cp311-musllinux_1_2_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eth-portfolio might be problematic. Click here for more details.

Files changed (83) hide show
  1. eth_portfolio/__init__.py +24 -0
  2. eth_portfolio/_argspec.cpython-311-x86_64-linux-musl.so +0 -0
  3. eth_portfolio/_argspec.py +43 -0
  4. eth_portfolio/_cache.py +119 -0
  5. eth_portfolio/_config.cpython-311-x86_64-linux-musl.so +0 -0
  6. eth_portfolio/_config.py +4 -0
  7. eth_portfolio/_db/__init__.py +0 -0
  8. eth_portfolio/_db/decorators.py +147 -0
  9. eth_portfolio/_db/entities.py +311 -0
  10. eth_portfolio/_db/utils.py +619 -0
  11. eth_portfolio/_decimal.py +154 -0
  12. eth_portfolio/_decorators.py +84 -0
  13. eth_portfolio/_exceptions.py +65 -0
  14. eth_portfolio/_ledgers/__init__.py +0 -0
  15. eth_portfolio/_ledgers/address.py +917 -0
  16. eth_portfolio/_ledgers/portfolio.py +328 -0
  17. eth_portfolio/_loaders/__init__.py +33 -0
  18. eth_portfolio/_loaders/_nonce.cpython-311-x86_64-linux-musl.so +0 -0
  19. eth_portfolio/_loaders/_nonce.py +193 -0
  20. eth_portfolio/_loaders/balances.cpython-311-x86_64-linux-musl.so +0 -0
  21. eth_portfolio/_loaders/balances.py +95 -0
  22. eth_portfolio/_loaders/token_transfer.py +215 -0
  23. eth_portfolio/_loaders/transaction.py +240 -0
  24. eth_portfolio/_loaders/utils.cpython-311-x86_64-linux-musl.so +0 -0
  25. eth_portfolio/_loaders/utils.py +67 -0
  26. eth_portfolio/_shitcoins.cpython-311-x86_64-linux-musl.so +0 -0
  27. eth_portfolio/_shitcoins.py +342 -0
  28. eth_portfolio/_stableish.cpython-311-x86_64-linux-musl.so +0 -0
  29. eth_portfolio/_stableish.py +42 -0
  30. eth_portfolio/_submodules.py +72 -0
  31. eth_portfolio/_utils.py +229 -0
  32. eth_portfolio/_ydb/__init__.py +0 -0
  33. eth_portfolio/_ydb/token_transfers.py +144 -0
  34. eth_portfolio/address.py +396 -0
  35. eth_portfolio/buckets.py +212 -0
  36. eth_portfolio/constants.cpython-311-x86_64-linux-musl.so +0 -0
  37. eth_portfolio/constants.py +87 -0
  38. eth_portfolio/portfolio.py +669 -0
  39. eth_portfolio/protocols/__init__.py +64 -0
  40. eth_portfolio/protocols/_base.py +107 -0
  41. eth_portfolio/protocols/convex.py +17 -0
  42. eth_portfolio/protocols/dsr.py +50 -0
  43. eth_portfolio/protocols/lending/README.md +6 -0
  44. eth_portfolio/protocols/lending/__init__.py +50 -0
  45. eth_portfolio/protocols/lending/_base.py +56 -0
  46. eth_portfolio/protocols/lending/compound.py +186 -0
  47. eth_portfolio/protocols/lending/liquity.py +108 -0
  48. eth_portfolio/protocols/lending/maker.py +110 -0
  49. eth_portfolio/protocols/lending/unit.py +44 -0
  50. eth_portfolio/protocols/liquity.py +17 -0
  51. eth_portfolio/py.typed +0 -0
  52. eth_portfolio/structs/__init__.py +43 -0
  53. eth_portfolio/structs/modified.py +69 -0
  54. eth_portfolio/structs/structs.py +628 -0
  55. eth_portfolio/typing/__init__.py +1418 -0
  56. eth_portfolio/typing/balance/single.py +176 -0
  57. eth_portfolio-0.5.8.dist-info/METADATA +28 -0
  58. eth_portfolio-0.5.8.dist-info/RECORD +83 -0
  59. eth_portfolio-0.5.8.dist-info/WHEEL +5 -0
  60. eth_portfolio-0.5.8.dist-info/entry_points.txt +2 -0
  61. eth_portfolio-0.5.8.dist-info/top_level.txt +3 -0
  62. eth_portfolio__mypyc.cpython-311-x86_64-linux-musl.so +0 -0
  63. eth_portfolio_scripts/__init__.py +17 -0
  64. eth_portfolio_scripts/_args.py +26 -0
  65. eth_portfolio_scripts/_logging.py +14 -0
  66. eth_portfolio_scripts/_portfolio.py +209 -0
  67. eth_portfolio_scripts/_utils.py +106 -0
  68. eth_portfolio_scripts/balances.cpython-311-x86_64-linux-musl.so +0 -0
  69. eth_portfolio_scripts/balances.py +56 -0
  70. eth_portfolio_scripts/docker/.grafana/dashboards/Portfolio/Balances.json +1962 -0
  71. eth_portfolio_scripts/docker/.grafana/dashboards/dashboards.yaml +10 -0
  72. eth_portfolio_scripts/docker/.grafana/datasources/datasources.yml +11 -0
  73. eth_portfolio_scripts/docker/__init__.cpython-311-x86_64-linux-musl.so +0 -0
  74. eth_portfolio_scripts/docker/__init__.py +16 -0
  75. eth_portfolio_scripts/docker/check.cpython-311-x86_64-linux-musl.so +0 -0
  76. eth_portfolio_scripts/docker/check.py +66 -0
  77. eth_portfolio_scripts/docker/docker-compose.yaml +61 -0
  78. eth_portfolio_scripts/docker/docker_compose.cpython-311-x86_64-linux-musl.so +0 -0
  79. eth_portfolio_scripts/docker/docker_compose.py +97 -0
  80. eth_portfolio_scripts/main.py +118 -0
  81. eth_portfolio_scripts/py.typed +1 -0
  82. eth_portfolio_scripts/victoria/__init__.py +72 -0
  83. eth_portfolio_scripts/victoria/types.py +38 -0
@@ -0,0 +1,24 @@
1
+ import warnings
2
+
3
+ warnings.filterwarnings(
4
+ "ignore",
5
+ message="Changing decimals precision could have unintended side effects!",
6
+ category=UserWarning,
7
+ )
8
+
9
+
10
+ import a_sync._smart
11
+
12
+ a_sync._smart.set_smart_task_factory()
13
+
14
+
15
+ # make sure we init the extended db before we init ypm somewhere
16
+ from eth_portfolio._db import utils
17
+ from eth_portfolio._shitcoins import SHITCOINS
18
+ from eth_portfolio.portfolio import Portfolio, portfolio
19
+
20
+ __all__ = [
21
+ "Portfolio",
22
+ "portfolio",
23
+ "SHITCOINS",
24
+ ]
@@ -0,0 +1,43 @@
1
+ from collections.abc import Callable
2
+ from inspect import getfullargspec
3
+ from typing import Any
4
+
5
+ # WIP:
6
+
7
+
8
+ def get_args_type(sample: Callable) -> tuple[type, ...]:
9
+ argspec = getfullargspec(sample)
10
+ args = {
11
+ arg_name: argspec.annotations[arg_name] if arg_name in argspec.annotations else Any
12
+ for arg_name in argspec.args
13
+ if arg_name != "self"
14
+ }
15
+ return tuple(*args.values())
16
+
17
+
18
+ def get_kwargs_type(sample: Callable) -> tuple[type, ...]:
19
+ argspec = getfullargspec(sample)
20
+ num_kwargs = len(argspec.args) - len(argspec.defaults or [])
21
+ kwarg_names = argspec.args[num_kwargs:]
22
+ kwargs = {
23
+ kwarg_name: argspec.annotations[kwarg_name] if kwarg_name in argspec.annotations else Any
24
+ for kwarg_name in kwarg_names
25
+ }
26
+ _kwarg_types: list[type[object]] = list(*kwargs.values())
27
+ if num_kwargs == 1:
28
+ return tuple[_kwarg_types[0]] # type: ignore [valid-type,return-value]
29
+ elif num_kwargs == 2:
30
+ return tuple[_kwarg_types[0], _kwarg_types[1]] # type: ignore [misc,return-value]
31
+ elif num_kwargs == 3:
32
+ return tuple[_kwarg_types[0], _kwarg_types[1], _kwarg_types[2]] # type: ignore [misc,return-value]
33
+ else:
34
+ return Any # type: ignore [misc,return-value]
35
+
36
+
37
+ def get_return_type(sample: Callable) -> type:
38
+ argspec = getfullargspec(sample)
39
+ return argspec.annotations["return"] if "return" in argspec.annotations else Any
40
+
41
+
42
+ def get_types(sample: Callable) -> tuple[type, type, type]:
43
+ return get_args_type(sample), get_kwargs_type(sample), get_return_type(sample) # type: ignore [return-value]
@@ -0,0 +1,119 @@
1
+ import functools
2
+ import inspect
3
+ from asyncio import AbstractEventLoop, PriorityQueue, Task, current_task, get_event_loop
4
+ from collections.abc import Callable
5
+ from concurrent.futures import Executor
6
+ from hashlib import md5
7
+ from logging import getLogger
8
+ from os import makedirs
9
+ from os.path import exists, join
10
+ from pickle import dumps, load, loads
11
+ from random import random
12
+ from typing import Any, Final, NoReturn
13
+
14
+ from a_sync import PruningThreadPoolExecutor
15
+ from a_sync._typing import P, T
16
+ from a_sync.asyncio import create_task
17
+ from aiofiles import open as _aio_open
18
+ from brownie import chain
19
+
20
+ BASE_PATH: Final = f"./cache/{chain.id}/"
21
+ _THREAD_NAME_PREFIX: Final = "eth-portfolio-cache-decorator"
22
+ _EXISTS_EXECUTOR: Final = PruningThreadPoolExecutor(8, f"{_THREAD_NAME_PREFIX}-exists")
23
+
24
+
25
+ def cache_to_disk(fn: Callable[P, T]) -> Callable[P, T]:
26
+ # sourcery skip: use-contextlib-suppress
27
+ name = fn.__name__
28
+ cache_path_for_fn = f"{BASE_PATH}{fn.__module__.replace('.', '/')}/{name}"
29
+ logger = getLogger(f"eth_portfolio.cache_to_disk.{name}")
30
+
31
+ def get_cache_file_path(args: tuple[Any, ...], kwargs: dict[str, Any]) -> str:
32
+ # Create a unique filename based on the function arguments
33
+ key = md5(dumps((args, sorted(kwargs.items())))).hexdigest()
34
+ return join(cache_path_for_fn, f"{key}.json")
35
+
36
+ write_executor = PruningThreadPoolExecutor(8, f"{_THREAD_NAME_PREFIX}-{fn.__qualname__}-write")
37
+
38
+ makedirs(cache_path_for_fn, exist_ok=True)
39
+
40
+ if inspect.iscoroutinefunction(fn):
41
+ read_executor = PruningThreadPoolExecutor(
42
+ 8, f"{_THREAD_NAME_PREFIX}-{fn.__qualname__}-read"
43
+ )
44
+
45
+ queue: PriorityQueue = PriorityQueue()
46
+
47
+ async def cache_deco_worker_coro(func: Callable[..., Any]) -> NoReturn:
48
+ try:
49
+ while True:
50
+ _, fut, cache_path, args, kwargs = await queue.get()
51
+ try:
52
+ async with _aio_open(cache_path, "rb", executor=read_executor) as f:
53
+ fut.set_result(loads(await f.read()))
54
+ except Exception as e:
55
+ fut.set_exception(e)
56
+ except Exception as e:
57
+ logger.error("%s for %s is broken!!!", current_task(), func)
58
+ logger.exception(e)
59
+ raise
60
+
61
+ loop: AbstractEventLoop | None = None
62
+ workers: list[Task[NoReturn]] = []
63
+
64
+ @functools.wraps(fn)
65
+ async def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
66
+ cache_path = get_cache_file_path(args, kwargs)
67
+ if await _EXISTS_EXECUTOR.run(exists, cache_path):
68
+ nonlocal loop
69
+ if loop is None:
70
+ loop = get_event_loop()
71
+ fut = loop.create_future()
72
+ # we intentionally mix up the order to break up heavy load block ranges
73
+ queue.put_nowait((random(), fut, cache_path, args, kwargs))
74
+ if not workers:
75
+ workers.extend(create_task(cache_deco_worker_coro(fn)) for _ in range(100))
76
+ try:
77
+ return await fut
78
+ except EOFError:
79
+ pass
80
+
81
+ async_result: T = await fn(*args, **kwargs)
82
+ try:
83
+ await __cache_write(cache_path, async_result, write_executor)
84
+ except OSError as e:
85
+ # I was having some weird issues in docker that I don't want to debug,
86
+ # so I'm going to assume you have another means to let you know you're
87
+ # out of disk space and will pass right on through here so my script
88
+ # can continue
89
+ if e.strerror != "No space left on device":
90
+ raise
91
+ return async_result
92
+
93
+ else:
94
+
95
+ @functools.wraps(fn)
96
+ def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
97
+ cache_path = get_cache_file_path(args, kwargs)
98
+ try:
99
+ with open(cache_path, "rb") as f:
100
+ return load(f)
101
+ except (FileNotFoundError, EOFError):
102
+ pass
103
+
104
+ sync_result: T = fn(*args, **kwargs) # type: ignore [assignment, return-value]
105
+ try:
106
+ create_task(
107
+ coro=__cache_write(cache_path, sync_result, write_executor),
108
+ skip_gc_until_done=True,
109
+ )
110
+ except RuntimeError:
111
+ pass
112
+ return sync_result
113
+
114
+ return disk_cache_wrap
115
+
116
+
117
+ async def __cache_write(cache_path: str, result: Any, executor: Executor) -> None:
118
+ async with _aio_open(cache_path, "wb", executor=executor) as f:
119
+ await f.write(dumps(result))
@@ -0,0 +1,4 @@
1
+ import os
2
+ from typing import Final
3
+
4
+ REORG_BUFFER: Final = int(os.environ.get("REORG_BUFFER", 30))
File without changes
@@ -0,0 +1,147 @@
1
+ from asyncio import iscoroutinefunction
2
+ from asyncio import sleep as aio_sleep
3
+ from collections.abc import Callable
4
+ from functools import wraps
5
+ from logging import DEBUG, getLogger
6
+ from random import random
7
+ from time import sleep as time_sleep
8
+ from typing import Final, TypeVar
9
+
10
+ from a_sync._typing import AnyFn
11
+ from pony.orm import OperationalError, TransactionError
12
+ from typing_extensions import ParamSpec
13
+
14
+ P = ParamSpec("P")
15
+ T = TypeVar("T")
16
+
17
+
18
+ logger: Final = getLogger(__name__)
19
+ __logger_is_enabled_for: Final = logger.isEnabledFor
20
+ __logger_warning: Final = logger.warning
21
+ __logger_log: Final = logger._log
22
+
23
+
24
+ def break_locks(fn: AnyFn[P, T]) -> AnyFn[P, T]:
25
+ """
26
+ Decorator to handle database lock errors by retrying the function.
27
+
28
+ This decorator is designed to wrap functions that interact with a database
29
+ and may encounter `OperationalError` due to database locks. It will retry
30
+ the function indefinitely if a "database is locked" error occurs. After
31
+ 5 attempts, a warning is logged, but the function will continue to retry
32
+ until it succeeds or a non-lock-related error occurs.
33
+
34
+ Args:
35
+ fn: The function to be wrapped, which may be a coroutine or a regular function.
36
+
37
+ Examples:
38
+ Basic usage with a regular function:
39
+
40
+ >>> @break_locks
41
+ ... def my_function():
42
+ ... # Function logic that may encounter a database lock
43
+ ... pass
44
+
45
+ Basic usage with an asynchronous function:
46
+
47
+ >>> @break_locks
48
+ ... async def my_async_function():
49
+ ... # Async function logic that may encounter a database lock
50
+ ... pass
51
+
52
+ See Also:
53
+ - :func:`pony.orm.db_session`: For managing database sessions.
54
+ """
55
+ if iscoroutinefunction(fn):
56
+
57
+ @wraps(fn)
58
+ async def break_locks_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
59
+ debug_logs_enabled = None
60
+ tries = 0
61
+ while True:
62
+ try:
63
+ return await fn(*args, **kwargs)
64
+ except OperationalError as e:
65
+ if str(e) != "database is locked":
66
+ raise e
67
+
68
+ if debug_logs_enabled is None:
69
+ debug_logs_enabled = __logger_is_enabled_for(DEBUG)
70
+
71
+ if debug_logs_enabled is True:
72
+ __logger_log(DEBUG, "%s.%s %s", (fn.__module__, fn.__name__, e))
73
+
74
+ await aio_sleep(tries * random())
75
+ tries += 1
76
+ if tries > 5:
77
+ __logger_warning("%s caught in err loop with %s", fn, e)
78
+
79
+ else:
80
+
81
+ @wraps(fn)
82
+ def break_locks_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
83
+ debug_logs_enabled = None
84
+ tries = 0
85
+ while True:
86
+ try:
87
+ return fn(*args, **kwargs) # type: ignore [return-value]
88
+ except OperationalError as e:
89
+ if str(e) != "database is locked":
90
+ raise e
91
+
92
+ if debug_logs_enabled is None:
93
+ debug_logs_enabled = __logger_is_enabled_for(DEBUG)
94
+
95
+ if debug_logs_enabled is True:
96
+ __logger_log(DEBUG, "%s.%s %s", (fn.__module__, fn.__name__, e))
97
+
98
+ time_sleep(tries * random())
99
+ tries += 1
100
+ if tries > 5:
101
+ __logger_warning("%s caught in err loop with %s", fn, e)
102
+
103
+ return break_locks_wrap
104
+
105
+
106
+ def requery_objs_on_diff_tx_err(fn: Callable[P, T]) -> Callable[P, T]:
107
+ """
108
+ Decorator to handle transaction errors by retrying the function.
109
+
110
+ This decorator is designed to wrap functions that may encounter
111
+ `TransactionError` due to mixing objects from different transactions.
112
+ It will retry the function until it succeeds or a non-transaction-related
113
+ error occurs.
114
+
115
+ Args:
116
+ fn: The function to be wrapped, which must not be a coroutine.
117
+
118
+ Raises:
119
+ TypeError: If the function is a coroutine.
120
+
121
+ Examples:
122
+ Basic usage with a function that may encounter transaction errors:
123
+
124
+ >>> @requery_objs_on_diff_tx_err
125
+ ... def my_function():
126
+ ... # Function logic that may encounter a transaction error
127
+ ... pass
128
+
129
+ See Also:
130
+ - :func:`pony.orm.db_session`: For managing database sessions.
131
+ """
132
+ if iscoroutinefunction(fn):
133
+ raise TypeError(f"{fn} must not be async")
134
+
135
+ @wraps(fn)
136
+ def requery_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
137
+ while True:
138
+ try:
139
+ return fn(*args, **kwargs)
140
+ except TransactionError as e:
141
+ if str(e) != "An attempt to mix objects belonging to different transactions":
142
+ raise e
143
+ # The error occurs if you committed new objs to the db and started a new transaction while still inside of a `db_session`,
144
+ # and then tried to use the newly committed objects in the next transaction. Now that the objects are in the db this will
145
+ # not reoccur. The next iteration will be successful.
146
+
147
+ return requery_wrap
@@ -0,0 +1,311 @@
1
+ from functools import cached_property
2
+ from typing import TYPE_CHECKING
3
+
4
+ from evmspec.structs.transaction import AccessListEntry
5
+ from hexbytes import HexBytes
6
+ from msgspec import json
7
+ from pony.orm import Optional, PrimaryKey, Required, Set, composite_key
8
+ from y._db.entities import Address, Block, Contract, DbEntity, Token
9
+
10
+ from eth_portfolio import structs
11
+ from eth_portfolio._decimal import Decimal
12
+
13
+
14
+ class BlockExtended(Block):
15
+ """
16
+ Extends the base Block entity to add relationships to transactions, internal transfers, and token transfers.
17
+ """
18
+
19
+ if TYPE_CHECKING:
20
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
21
+ transactions: Set["Transaction"]
22
+ internal_transfers: Set["InternalTransfer"]
23
+ token_transfers: Set["TokenTransfer"]
24
+
25
+ transactions = Set("Transaction", lazy=True, reverse="block")
26
+ """All transactions included in this block."""
27
+
28
+ internal_transfers = Set("InternalTransfer", lazy=True, reverse="block")
29
+ """All internal transfers in this block."""
30
+
31
+ token_transfers = Set("TokenTransfer", lazy=True, reverse="block")
32
+ """All token transfers in this block."""
33
+
34
+
35
+ class AddressExtended(Address):
36
+ """
37
+ Extends the base Address entity to add relationships for sent/received transactions, internal transfers, and token transfers.
38
+ """
39
+
40
+ if TYPE_CHECKING:
41
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
42
+ transactions_sent: Set["Transaction"]
43
+ transactions_received: Set["Transaction"]
44
+ internal_transfers_sent: Set["InternalTransfer"]
45
+ internal_transfers_received: Set["InternalTransfer"]
46
+ token_transfers_sent: Set["TokenTransfer"]
47
+ token_transfers_received: Set["TokenTransfer"]
48
+
49
+ transactions_sent = Set("Transaction", lazy=True, reverse="from_address")
50
+ """Transactions sent from this address."""
51
+
52
+ transactions_received = Set("Transaction", lazy=True, reverse="to_address")
53
+ """Transactions received by this address."""
54
+
55
+ internal_transfers_sent = Set("InternalTransfer", lazy=True, reverse="from_address")
56
+ """Internal transfers sent from this address."""
57
+
58
+ internal_transfers_received = Set("InternalTransfer", lazy=True, reverse="to_address")
59
+ """Internal transfers received by this address."""
60
+
61
+ token_transfers_sent = Set("TokenTransfer", lazy=True, reverse="from_address")
62
+ """Token transfers sent from this address."""
63
+
64
+ token_transfers_received = Set("TokenTransfer", lazy=True, reverse="to_address")
65
+ """Token transfers received by this address."""
66
+
67
+
68
+ class ContractExtended(Contract, AddressExtended):
69
+ """
70
+ Extends both Contract and AddressExtended to represent a contract with address relationships.
71
+ """
72
+
73
+
74
+ class TokenExtended(Token, AddressExtended):
75
+ """
76
+ Extends both Token and AddressExtended to represent a token contract with address relationships.
77
+ """
78
+
79
+ if TYPE_CHECKING:
80
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
81
+ transfers: Set["TokenTransfer"]
82
+
83
+ transfers = Set("TokenTransfer", lazy=True, reverse="token")
84
+ """All token transfers involving this token."""
85
+
86
+
87
+ class Transaction(DbEntity):
88
+ """
89
+ Represents an Ethereum transaction, including all on-chain and decoded fields.
90
+ """
91
+
92
+ _id = PrimaryKey(int, auto=True)
93
+ """Auto-incrementing primary key for the transaction."""
94
+
95
+ block = Required(BlockExtended, lazy=True, reverse="transactions")
96
+ """The block containing this transaction."""
97
+
98
+ transaction_index = Required(int, lazy=True)
99
+ """The index of this transaction within the block."""
100
+
101
+ hash = Required(str, index=True, lazy=True)
102
+ """The transaction hash."""
103
+
104
+ from_address = Required(AddressExtended, index=True, lazy=True, reverse="transactions_sent")
105
+ """Sender address."""
106
+
107
+ to_address = Optional(AddressExtended, index=True, lazy=True, reverse="transactions_received")
108
+ """Recipient address (None for contract creation)."""
109
+
110
+ value = Required(Decimal, 38, 18, lazy=True)
111
+ price = Optional(Decimal, 38, 18, lazy=True)
112
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
113
+ """USD value of the transaction (optional)."""
114
+
115
+ nonce = Required(int, lazy=True)
116
+ """Sender's transaction count at the time of this transaction."""
117
+
118
+ type = Optional(int, lazy=True)
119
+ """Transaction type (e.g., legacy, EIP-1559)."""
120
+
121
+ gas = Required(Decimal, 38, 1, lazy=True)
122
+ """Gas limit for the transaction."""
123
+
124
+ gas_price = Required(Decimal, 38, 1, lazy=True)
125
+ """Gas price (in wei)."""
126
+
127
+ max_fee_per_gas = Optional(Decimal, 38, 1, lazy=True)
128
+ """Max fee per gas (EIP-1559, optional)."""
129
+
130
+ max_priority_fee_per_gas = Optional(Decimal, 38, 1, lazy=True)
131
+ """Max priority fee per gas (EIP-1559, optional)."""
132
+
133
+ composite_key(block, transaction_index)
134
+
135
+ raw = Required(bytes, lazy=True)
136
+
137
+ @cached_property
138
+ def decoded(self) -> structs.Transaction:
139
+ return json.decode(self.raw, type=structs.Transaction)
140
+
141
+ @property
142
+ def input(self) -> HexBytes:
143
+ """The data payload sent with the transaction (for contract calls)."""
144
+ return self.decoded.input
145
+
146
+ @property
147
+ def r(self) -> HexBytes:
148
+ """ECDSA signature R value."""
149
+ return self.decoded.r
150
+
151
+ @property
152
+ def s(self) -> HexBytes:
153
+ """ECDSA signature S value."""
154
+ return self.decoded.s
155
+
156
+ @property
157
+ def v(self) -> int:
158
+ """ECDSA signature V value (replay protection)."""
159
+ return self.decoded.v
160
+
161
+ @property
162
+ def access_list(self) -> list[AccessListEntry]:
163
+ """EIP-2930 access list (if present)."""
164
+ return self.decoded.access_list
165
+
166
+ @property
167
+ def y_parity(self) -> int | None:
168
+ """EIP-1559 y-parity value (if present)."""
169
+ return self.decoded.y_parity
170
+
171
+
172
+ class InternalTransfer(DbEntity):
173
+ """
174
+ Represents an internal transfer (call, delegatecall, etc.) within a transaction.
175
+ """
176
+
177
+ _id = PrimaryKey(int, auto=True)
178
+ """Auto-incrementing primary key for the internal transfer."""
179
+
180
+ # common
181
+ block = Required(BlockExtended, lazy=True, reverse="internal_transfers")
182
+ """The block containing this internal transfer."""
183
+
184
+ transaction_index = Required(int, lazy=True)
185
+ """The index of the transaction within the block."""
186
+
187
+ hash = Required(str, lazy=True)
188
+ """Transaction hash."""
189
+
190
+ from_address = Required(
191
+ AddressExtended, index=True, lazy=True, reverse="internal_transfers_sent"
192
+ )
193
+ """Sender address."""
194
+
195
+ to_address = Optional(
196
+ AddressExtended, index=True, lazy=True, reverse="internal_transfers_received"
197
+ )
198
+ """Recipient address (None for contract creation)."""
199
+
200
+ value = Required(Decimal, 38, 18, lazy=True)
201
+ price = Optional(Decimal, 38, 18, lazy=True)
202
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
203
+ """USD value of the transfer (optional)."""
204
+
205
+ # unique
206
+ type = Required(str, lazy=True)
207
+ """Type of call (e.g., "call", "delegatecall", "staticcall")."""
208
+
209
+ call_type = Required(str, lazy=True)
210
+ """Call type (e.g., "call", "create")."""
211
+
212
+ trace_address = Required(str, lazy=True)
213
+ """Path of sub-calls to reach this transfer."""
214
+
215
+ gas = Required(Decimal, 38, 1, lazy=True)
216
+ """Gas provided for the call."""
217
+
218
+ gas_used = Optional(Decimal, 38, 1, lazy=True)
219
+ """Gas used by the call (optional)."""
220
+
221
+ composite_key(
222
+ block,
223
+ transaction_index,
224
+ hash,
225
+ from_address,
226
+ to_address,
227
+ value,
228
+ type,
229
+ call_type,
230
+ trace_address,
231
+ gas,
232
+ gas_used,
233
+ )
234
+
235
+ raw = Required(bytes, lazy=True)
236
+
237
+ @cached_property
238
+ def decoded(self) -> structs.InternalTransfer:
239
+ structs.InternalTransfer.__doc__
240
+ return json.decode(self.raw, type=structs.InternalTransfer)
241
+
242
+ @property
243
+ def code(self) -> HexBytes:
244
+ structs.InternalTransfer.code.__doc__
245
+ return self.decoded.code
246
+
247
+ @property
248
+ def input(self) -> HexBytes:
249
+ """The input data for the call."""
250
+ return self.decoded.input
251
+
252
+ @property
253
+ def output(self) -> HexBytes:
254
+ """The output data from the call."""
255
+ return self.decoded.output
256
+
257
+ @property
258
+ def subtraces(self) -> int:
259
+ """The number of sub-operations spawned by this internal transfer."""
260
+ return self.decoded.subtraces
261
+
262
+
263
+ class TokenTransfer(DbEntity):
264
+ """
265
+ Represents an ERC20/ERC721 token transfer event within a transaction.
266
+ """
267
+
268
+ _id = PrimaryKey(int, auto=True)
269
+ """Auto-incrementing primary key for the token transfer."""
270
+
271
+ # common
272
+ block = Required(BlockExtended, lazy=True, reverse="token_transfers")
273
+ """The block containing this token transfer."""
274
+
275
+ transaction_index = Required(int, lazy=True)
276
+ """The index of the transaction within the block."""
277
+
278
+ hash = Required(str, lazy=True)
279
+ """Transaction hash."""
280
+
281
+ from_address = Required(AddressExtended, index=True, lazy=True, reverse="token_transfers_sent")
282
+ """Sender address."""
283
+
284
+ to_address = Required(
285
+ AddressExtended, index=True, lazy=True, reverse="token_transfers_received"
286
+ )
287
+ """Recipient address."""
288
+
289
+ value = Required(Decimal, 38, 18, lazy=True)
290
+ """Amount of tokens transferred."""
291
+
292
+ price = Optional(Decimal, 38, 18, lazy=True)
293
+ """Price of the token at the time of transfer (optional)."""
294
+
295
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
296
+ """USD value of the transfer (optional)."""
297
+
298
+ # unique
299
+ log_index = Required(int, lazy=True)
300
+ """Log index of the transfer event within the transaction."""
301
+
302
+ token = Optional(TokenExtended, index=True, lazy=True, reverse="transfers")
303
+ """The token contract involved in this transfer."""
304
+
305
+ composite_key(block, transaction_index, log_index)
306
+
307
+ raw = Required(bytes, lazy=True)
308
+
309
+ @cached_property
310
+ def decoded(self) -> structs.TokenTransfer:
311
+ return json.decode(self.raw, type=structs.TokenTransfer)