eth-portfolio-temp 0.3.0__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eth-portfolio-temp might be problematic. Click here for more details.

Files changed (83) hide show
  1. eth_portfolio/__init__.py +25 -0
  2. eth_portfolio/_argspec.cp313-win_amd64.pyd +0 -0
  3. eth_portfolio/_argspec.py +42 -0
  4. eth_portfolio/_cache.py +121 -0
  5. eth_portfolio/_config.cp313-win_amd64.pyd +0 -0
  6. eth_portfolio/_config.py +4 -0
  7. eth_portfolio/_db/__init__.py +0 -0
  8. eth_portfolio/_db/decorators.py +147 -0
  9. eth_portfolio/_db/entities.py +311 -0
  10. eth_portfolio/_db/utils.py +604 -0
  11. eth_portfolio/_decimal.py +156 -0
  12. eth_portfolio/_decorators.py +84 -0
  13. eth_portfolio/_exceptions.py +67 -0
  14. eth_portfolio/_ledgers/__init__.py +0 -0
  15. eth_portfolio/_ledgers/address.py +938 -0
  16. eth_portfolio/_ledgers/portfolio.py +327 -0
  17. eth_portfolio/_loaders/__init__.py +33 -0
  18. eth_portfolio/_loaders/_nonce.cp313-win_amd64.pyd +0 -0
  19. eth_portfolio/_loaders/_nonce.py +196 -0
  20. eth_portfolio/_loaders/balances.cp313-win_amd64.pyd +0 -0
  21. eth_portfolio/_loaders/balances.py +94 -0
  22. eth_portfolio/_loaders/token_transfer.py +217 -0
  23. eth_portfolio/_loaders/transaction.py +240 -0
  24. eth_portfolio/_loaders/utils.cp313-win_amd64.pyd +0 -0
  25. eth_portfolio/_loaders/utils.py +68 -0
  26. eth_portfolio/_shitcoins.cp313-win_amd64.pyd +0 -0
  27. eth_portfolio/_shitcoins.py +330 -0
  28. eth_portfolio/_stableish.cp313-win_amd64.pyd +0 -0
  29. eth_portfolio/_stableish.py +42 -0
  30. eth_portfolio/_submodules.py +73 -0
  31. eth_portfolio/_utils.py +225 -0
  32. eth_portfolio/_ydb/__init__.py +0 -0
  33. eth_portfolio/_ydb/token_transfers.py +145 -0
  34. eth_portfolio/address.py +397 -0
  35. eth_portfolio/buckets.py +212 -0
  36. eth_portfolio/constants.cp313-win_amd64.pyd +0 -0
  37. eth_portfolio/constants.py +82 -0
  38. eth_portfolio/portfolio.py +661 -0
  39. eth_portfolio/protocols/__init__.py +67 -0
  40. eth_portfolio/protocols/_base.py +108 -0
  41. eth_portfolio/protocols/convex.py +17 -0
  42. eth_portfolio/protocols/dsr.py +51 -0
  43. eth_portfolio/protocols/lending/README.md +6 -0
  44. eth_portfolio/protocols/lending/__init__.py +50 -0
  45. eth_portfolio/protocols/lending/_base.py +57 -0
  46. eth_portfolio/protocols/lending/compound.py +187 -0
  47. eth_portfolio/protocols/lending/liquity.py +110 -0
  48. eth_portfolio/protocols/lending/maker.py +104 -0
  49. eth_portfolio/protocols/lending/unit.py +46 -0
  50. eth_portfolio/protocols/liquity.py +16 -0
  51. eth_portfolio/py.typed +0 -0
  52. eth_portfolio/structs/__init__.py +43 -0
  53. eth_portfolio/structs/modified.py +69 -0
  54. eth_portfolio/structs/structs.py +637 -0
  55. eth_portfolio/typing/__init__.py +1447 -0
  56. eth_portfolio/typing/balance/single.py +176 -0
  57. eth_portfolio__mypyc.cp313-win_amd64.pyd +0 -0
  58. eth_portfolio_scripts/__init__.py +20 -0
  59. eth_portfolio_scripts/_args.py +26 -0
  60. eth_portfolio_scripts/_logging.py +15 -0
  61. eth_portfolio_scripts/_portfolio.py +209 -0
  62. eth_portfolio_scripts/_utils.py +106 -0
  63. eth_portfolio_scripts/balances.cp313-win_amd64.pyd +0 -0
  64. eth_portfolio_scripts/balances.py +52 -0
  65. eth_portfolio_scripts/docker/.grafana/dashboards/Portfolio/Balances.json +1962 -0
  66. eth_portfolio_scripts/docker/.grafana/dashboards/dashboards.yaml +10 -0
  67. eth_portfolio_scripts/docker/.grafana/datasources/datasources.yml +11 -0
  68. eth_portfolio_scripts/docker/__init__.cp313-win_amd64.pyd +0 -0
  69. eth_portfolio_scripts/docker/__init__.py +16 -0
  70. eth_portfolio_scripts/docker/check.cp313-win_amd64.pyd +0 -0
  71. eth_portfolio_scripts/docker/check.py +67 -0
  72. eth_portfolio_scripts/docker/docker-compose.yaml +61 -0
  73. eth_portfolio_scripts/docker/docker_compose.cp313-win_amd64.pyd +0 -0
  74. eth_portfolio_scripts/docker/docker_compose.py +98 -0
  75. eth_portfolio_scripts/main.py +119 -0
  76. eth_portfolio_scripts/py.typed +1 -0
  77. eth_portfolio_scripts/victoria/__init__.py +73 -0
  78. eth_portfolio_scripts/victoria/types.py +38 -0
  79. eth_portfolio_temp-0.3.0.dist-info/METADATA +26 -0
  80. eth_portfolio_temp-0.3.0.dist-info/RECORD +83 -0
  81. eth_portfolio_temp-0.3.0.dist-info/WHEEL +5 -0
  82. eth_portfolio_temp-0.3.0.dist-info/entry_points.txt +2 -0
  83. eth_portfolio_temp-0.3.0.dist-info/top_level.txt +3 -0
@@ -0,0 +1,25 @@
1
+ import warnings
2
+
3
+ warnings.filterwarnings(
4
+ "ignore",
5
+ message="Changing decimals precision could have unintended side effects!",
6
+ category=UserWarning,
7
+ )
8
+
9
+
10
+ import a_sync._smart
11
+
12
+ a_sync._smart.set_smart_task_factory()
13
+
14
+
15
+ from eth_portfolio.portfolio import Portfolio, portfolio
16
+
17
+ # make sure we init the extended db before we init ypm somewhere
18
+ from eth_portfolio._db import utils
19
+ from eth_portfolio._shitcoins import SHITCOINS
20
+
21
+ __all__ = [
22
+ "Portfolio",
23
+ "portfolio",
24
+ "SHITCOINS",
25
+ ]
@@ -0,0 +1,42 @@
1
+ from inspect import getfullargspec
2
+ from typing import Any, Callable, List, Tuple, Type
3
+
4
+ # WIP:
5
+
6
+
7
+ def get_args_type(sample: Callable) -> Tuple[Type, ...]:
8
+ argspec = getfullargspec(sample)
9
+ args = {
10
+ arg_name: argspec.annotations[arg_name] if arg_name in argspec.annotations else Any
11
+ for arg_name in argspec.args
12
+ if arg_name != "self"
13
+ }
14
+ return tuple(*args.values())
15
+
16
+
17
+ def get_kwargs_type(sample: Callable) -> Tuple[Type, ...]:
18
+ argspec = getfullargspec(sample)
19
+ num_kwargs = len(argspec.args) - len(argspec.defaults or [])
20
+ kwarg_names = argspec.args[num_kwargs:]
21
+ kwargs = {
22
+ kwarg_name: argspec.annotations[kwarg_name] if kwarg_name in argspec.annotations else Any
23
+ for kwarg_name in kwarg_names
24
+ }
25
+ _kwarg_types: List[Type[object]] = list(*kwargs.values())
26
+ if num_kwargs == 1:
27
+ return Tuple[_kwarg_types[0]] # type: ignore [valid-type,return-value]
28
+ elif num_kwargs == 2:
29
+ return Tuple[_kwarg_types[0], _kwarg_types[1]] # type: ignore [misc,return-value]
30
+ elif num_kwargs == 3:
31
+ return Tuple[_kwarg_types[0], _kwarg_types[1], _kwarg_types[2]] # type: ignore [misc,return-value]
32
+ else:
33
+ return Any # type: ignore [misc,return-value]
34
+
35
+
36
+ def get_return_type(sample: Callable) -> Type:
37
+ argspec = getfullargspec(sample)
38
+ return argspec.annotations["return"] if "return" in argspec.annotations else Any
39
+
40
+
41
+ def get_types(sample: Callable) -> Tuple[Type, Type, Type]:
42
+ return get_args_type(sample), get_kwargs_type(sample), get_return_type(sample) # type: ignore [return-value]
@@ -0,0 +1,121 @@
1
+ import functools
2
+ import inspect
3
+ from asyncio import AbstractEventLoop, PriorityQueue, Task, current_task, get_event_loop
4
+ from concurrent.futures import Executor
5
+ from hashlib import md5
6
+ from logging import getLogger
7
+ from os import makedirs
8
+ from os.path import exists, join
9
+ from pickle import dumps, load, loads
10
+ from random import random
11
+ from typing import Any, Callable, Final, List, NoReturn, Optional
12
+
13
+ from a_sync import PruningThreadPoolExecutor
14
+ from a_sync._typing import P, T
15
+ from a_sync.asyncio import create_task
16
+
17
+ # TODO: rip out this deprecated func
18
+ from a_sync.primitives.queue import log_broken
19
+ from aiofiles import open as _aio_open
20
+ from brownie import chain
21
+
22
+ BASE_PATH: Final = f"./cache/{chain.id}/"
23
+ _THREAD_NAME_PREFIX: Final = "eth-portfolio-cache-decorator"
24
+ _EXISTS_EXECUTOR: Final = PruningThreadPoolExecutor(8, f"{_THREAD_NAME_PREFIX}-exists")
25
+
26
+
27
+ def cache_to_disk(fn: Callable[P, T]) -> Callable[P, T]:
28
+ # sourcery skip: use-contextlib-suppress
29
+ name = fn.__name__
30
+ cache_path_for_fn = f"{BASE_PATH}{fn.__module__.replace('.', '/')}/{name}"
31
+ logger = getLogger(f"eth_portfolio.cache_to_disk.{name}")
32
+
33
+ def get_cache_file_path(args: tuple[Any, ...], kwargs: dict[str, Any]) -> str:
34
+ # Create a unique filename based on the function arguments
35
+ key = md5(dumps((args, sorted(kwargs.items())))).hexdigest()
36
+ return join(cache_path_for_fn, f"{key}.json")
37
+
38
+ write_executor = PruningThreadPoolExecutor(8, f"{_THREAD_NAME_PREFIX}-{fn.__qualname__}-write")
39
+
40
+ makedirs(cache_path_for_fn, exist_ok=True)
41
+
42
+ if inspect.iscoroutinefunction(fn):
43
+ read_executor = PruningThreadPoolExecutor(
44
+ 8, f"{_THREAD_NAME_PREFIX}-{fn.__qualname__}-read"
45
+ )
46
+
47
+ queue: PriorityQueue = PriorityQueue()
48
+
49
+ async def cache_deco_worker_coro(func: Callable[..., Any]) -> NoReturn:
50
+ try:
51
+ while True:
52
+ _, fut, cache_path, args, kwargs = await queue.get()
53
+ try:
54
+ async with _aio_open(cache_path, "rb", executor=read_executor) as f:
55
+ fut.set_result(loads(await f.read()))
56
+ except Exception as e:
57
+ fut.set_exception(e)
58
+ except Exception as e:
59
+ logger.error("%s for %s is broken!!!", current_task(), func)
60
+ logger.exception(e)
61
+ raise
62
+
63
+ loop: Optional[AbstractEventLoop] = None
64
+ workers: List[Task[NoReturn]] = []
65
+
66
+ @functools.wraps(fn)
67
+ async def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
68
+ cache_path = get_cache_file_path(args, kwargs)
69
+ if await _EXISTS_EXECUTOR.run(exists, cache_path):
70
+ nonlocal loop
71
+ if loop is None:
72
+ loop = get_event_loop()
73
+ fut = loop.create_future()
74
+ # we intentionally mix up the order to break up heavy load block ranges
75
+ queue.put_nowait((random(), fut, cache_path, args, kwargs))
76
+ if not workers:
77
+ workers.extend(create_task(cache_deco_worker_coro(fn)) for _ in range(100))
78
+ try:
79
+ return await fut
80
+ except EOFError:
81
+ pass
82
+
83
+ async_result: T = await fn(*args, **kwargs)
84
+ try:
85
+ await __cache_write(cache_path, async_result, write_executor)
86
+ except OSError as e:
87
+ # I was having some weird issues in docker that I don't want to debug,
88
+ # so I'm going to assume you have another means to let you know you're
89
+ # out of disk space and will pass right on through here so my script
90
+ # can continue
91
+ if e.strerror != "No space left on device":
92
+ raise
93
+ return async_result
94
+
95
+ else:
96
+
97
+ @functools.wraps(fn)
98
+ def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
99
+ cache_path = get_cache_file_path(args, kwargs)
100
+ try:
101
+ with open(cache_path, "rb") as f:
102
+ return load(f)
103
+ except (FileNotFoundError, EOFError):
104
+ pass
105
+
106
+ sync_result: T = fn(*args, **kwargs) # type: ignore [assignment, return-value]
107
+ try:
108
+ create_task(
109
+ coro=__cache_write(cache_path, sync_result, write_executor),
110
+ skip_gc_until_done=True,
111
+ )
112
+ except RuntimeError:
113
+ pass
114
+ return sync_result
115
+
116
+ return disk_cache_wrap
117
+
118
+
119
+ async def __cache_write(cache_path: str, result: Any, executor: Executor) -> None:
120
+ async with _aio_open(cache_path, "wb", executor=executor) as f:
121
+ await f.write(dumps(result))
@@ -0,0 +1,4 @@
1
+ import os
2
+ from typing import Final
3
+
4
+ REORG_BUFFER: Final = int(os.environ.get("REORG_BUFFER", 30))
File without changes
@@ -0,0 +1,147 @@
1
+ from asyncio import iscoroutinefunction
2
+ from asyncio import sleep as aio_sleep
3
+ from functools import wraps
4
+ from logging import DEBUG, getLogger
5
+ from random import random
6
+ from time import sleep as time_sleep
7
+ from typing import Callable, Final, TypeVar
8
+
9
+ from a_sync._typing import AnyFn
10
+ from pony.orm import OperationalError, TransactionError
11
+ from typing_extensions import ParamSpec
12
+
13
+
14
+ P = ParamSpec("P")
15
+ T = TypeVar("T")
16
+
17
+
18
+ logger: Final = getLogger(__name__)
19
+ __logger_is_enabled_for: Final = logger.isEnabledFor
20
+ __logger_warning: Final = logger.warning
21
+ __logger_log: Final = logger._log
22
+
23
+
24
+ def break_locks(fn: AnyFn[P, T]) -> AnyFn[P, T]:
25
+ """
26
+ Decorator to handle database lock errors by retrying the function.
27
+
28
+ This decorator is designed to wrap functions that interact with a database
29
+ and may encounter `OperationalError` due to database locks. It will retry
30
+ the function indefinitely if a "database is locked" error occurs. After
31
+ 5 attempts, a warning is logged, but the function will continue to retry
32
+ until it succeeds or a non-lock-related error occurs.
33
+
34
+ Args:
35
+ fn: The function to be wrapped, which may be a coroutine or a regular function.
36
+
37
+ Examples:
38
+ Basic usage with a regular function:
39
+
40
+ >>> @break_locks
41
+ ... def my_function():
42
+ ... # Function logic that may encounter a database lock
43
+ ... pass
44
+
45
+ Basic usage with an asynchronous function:
46
+
47
+ >>> @break_locks
48
+ ... async def my_async_function():
49
+ ... # Async function logic that may encounter a database lock
50
+ ... pass
51
+
52
+ See Also:
53
+ - :func:`pony.orm.db_session`: For managing database sessions.
54
+ """
55
+ if iscoroutinefunction(fn):
56
+
57
+ @wraps(fn)
58
+ async def break_locks_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
59
+ debug_logs_enabled = None
60
+ tries = 0
61
+ while True:
62
+ try:
63
+ return await fn(*args, **kwargs)
64
+ except OperationalError as e:
65
+ if str(e) != "database is locked":
66
+ raise e
67
+
68
+ if debug_logs_enabled is None:
69
+ debug_logs_enabled = __logger_is_enabled_for(DEBUG)
70
+
71
+ if debug_logs_enabled is True:
72
+ __logger_log(DEBUG, "%s.%s %s", (fn.__module__, fn.__name__, e))
73
+
74
+ await aio_sleep(tries * random())
75
+ tries += 1
76
+ if tries > 5:
77
+ __logger_warning("%s caught in err loop with %s", fn, e)
78
+
79
+ else:
80
+
81
+ @wraps(fn)
82
+ def break_locks_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
83
+ debug_logs_enabled = None
84
+ tries = 0
85
+ while True:
86
+ try:
87
+ return fn(*args, **kwargs) # type: ignore [return-value]
88
+ except OperationalError as e:
89
+ if str(e) != "database is locked":
90
+ raise e
91
+
92
+ if debug_logs_enabled is None:
93
+ debug_logs_enabled = __logger_is_enabled_for(DEBUG)
94
+
95
+ if debug_logs_enabled is True:
96
+ __logger_log(DEBUG, "%s.%s %s", (fn.__module__, fn.__name__, e))
97
+
98
+ time_sleep(tries * random())
99
+ tries += 1
100
+ if tries > 5:
101
+ __logger_warning("%s caught in err loop with %s", fn, e)
102
+
103
+ return break_locks_wrap
104
+
105
+
106
+ def requery_objs_on_diff_tx_err(fn: Callable[P, T]) -> Callable[P, T]:
107
+ """
108
+ Decorator to handle transaction errors by retrying the function.
109
+
110
+ This decorator is designed to wrap functions that may encounter
111
+ `TransactionError` due to mixing objects from different transactions.
112
+ It will retry the function until it succeeds or a non-transaction-related
113
+ error occurs.
114
+
115
+ Args:
116
+ fn: The function to be wrapped, which must not be a coroutine.
117
+
118
+ Raises:
119
+ TypeError: If the function is a coroutine.
120
+
121
+ Examples:
122
+ Basic usage with a function that may encounter transaction errors:
123
+
124
+ >>> @requery_objs_on_diff_tx_err
125
+ ... def my_function():
126
+ ... # Function logic that may encounter a transaction error
127
+ ... pass
128
+
129
+ See Also:
130
+ - :func:`pony.orm.db_session`: For managing database sessions.
131
+ """
132
+ if iscoroutinefunction(fn):
133
+ raise TypeError(f"{fn} must not be async")
134
+
135
+ @wraps(fn)
136
+ def requery_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
137
+ while True:
138
+ try:
139
+ return fn(*args, **kwargs)
140
+ except TransactionError as e:
141
+ if str(e) != "An attempt to mix objects belonging to different transactions":
142
+ raise e
143
+ # The error occurs if you committed new objs to the db and started a new transaction while still inside of a `db_session`,
144
+ # and then tried to use the newly committed objects in the next transaction. Now that the objects are in the db this will
145
+ # not reoccur. The next iteration will be successful.
146
+
147
+ return requery_wrap
@@ -0,0 +1,311 @@
1
+ import typing
2
+ from functools import cached_property
3
+
4
+ from evmspec.structs.transaction import AccessListEntry
5
+ from hexbytes import HexBytes
6
+ from msgspec import json
7
+ from pony.orm import Optional, PrimaryKey, Required, Set, composite_key
8
+ from y._db.entities import Address, Block, Contract, DbEntity, Token
9
+
10
+ from eth_portfolio import structs
11
+ from eth_portfolio._decimal import Decimal
12
+
13
+
14
+ class BlockExtended(Block):
15
+ """
16
+ Extends the base Block entity to add relationships to transactions, internal transfers, and token transfers.
17
+ """
18
+
19
+ if typing.TYPE_CHECKING:
20
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
21
+ transactions: Set["Transaction"]
22
+ internal_transfers: Set["InternalTransfer"]
23
+ token_transfers: Set["TokenTransfer"]
24
+
25
+ transactions = Set("Transaction", lazy=True, reverse="block")
26
+ """All transactions included in this block."""
27
+
28
+ internal_transfers = Set("InternalTransfer", lazy=True, reverse="block")
29
+ """All internal transfers in this block."""
30
+
31
+ token_transfers = Set("TokenTransfer", lazy=True, reverse="block")
32
+ """All token transfers in this block."""
33
+
34
+
35
+ class AddressExtended(Address):
36
+ """
37
+ Extends the base Address entity to add relationships for sent/received transactions, internal transfers, and token transfers.
38
+ """
39
+
40
+ if typing.TYPE_CHECKING:
41
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
42
+ transactions_sent: Set["Transaction"]
43
+ transactions_received: Set["Transaction"]
44
+ internal_transfers_sent: Set["InternalTransfer"]
45
+ internal_transfers_received: Set["InternalTransfer"]
46
+ token_transfers_sent: Set["TokenTransfer"]
47
+ token_transfers_received: Set["TokenTransfer"]
48
+
49
+ transactions_sent = Set("Transaction", lazy=True, reverse="from_address")
50
+ """Transactions sent from this address."""
51
+
52
+ transactions_received = Set("Transaction", lazy=True, reverse="to_address")
53
+ """Transactions received by this address."""
54
+
55
+ internal_transfers_sent = Set("InternalTransfer", lazy=True, reverse="from_address")
56
+ """Internal transfers sent from this address."""
57
+
58
+ internal_transfers_received = Set("InternalTransfer", lazy=True, reverse="to_address")
59
+ """Internal transfers received by this address."""
60
+
61
+ token_transfers_sent = Set("TokenTransfer", lazy=True, reverse="from_address")
62
+ """Token transfers sent from this address."""
63
+
64
+ token_transfers_received = Set("TokenTransfer", lazy=True, reverse="to_address")
65
+ """Token transfers received by this address."""
66
+
67
+
68
+ class ContractExtended(Contract, AddressExtended):
69
+ """
70
+ Extends both Contract and AddressExtended to represent a contract with address relationships.
71
+ """
72
+
73
+
74
+ class TokenExtended(Token, AddressExtended):
75
+ """
76
+ Extends both Token and AddressExtended to represent a token contract with address relationships.
77
+ """
78
+
79
+ if typing.TYPE_CHECKING:
80
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
81
+ transfers: Set["TokenTransfer"]
82
+
83
+ transfers = Set("TokenTransfer", lazy=True, reverse="token")
84
+ """All token transfers involving this token."""
85
+
86
+
87
+ class Transaction(DbEntity):
88
+ """
89
+ Represents an Ethereum transaction, including all on-chain and decoded fields.
90
+ """
91
+
92
+ _id = PrimaryKey(int, auto=True)
93
+ """Auto-incrementing primary key for the transaction."""
94
+
95
+ block = Required(BlockExtended, lazy=True, reverse="transactions")
96
+ """The block containing this transaction."""
97
+
98
+ transaction_index = Required(int, lazy=True)
99
+ """The index of this transaction within the block."""
100
+
101
+ hash = Required(str, index=True, lazy=True)
102
+ """The transaction hash."""
103
+
104
+ from_address = Required(AddressExtended, index=True, lazy=True, reverse="transactions_sent")
105
+ """Sender address."""
106
+
107
+ to_address = Optional(AddressExtended, index=True, lazy=True, reverse="transactions_received")
108
+ """Recipient address (None for contract creation)."""
109
+
110
+ value = Required(Decimal, 38, 18, lazy=True)
111
+ price = Optional(Decimal, 38, 18, lazy=True)
112
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
113
+ """USD value of the transaction (optional)."""
114
+
115
+ nonce = Required(int, lazy=True)
116
+ """Sender's transaction count at the time of this transaction."""
117
+
118
+ type = Optional(int, lazy=True)
119
+ """Transaction type (e.g., legacy, EIP-1559)."""
120
+
121
+ gas = Required(Decimal, 38, 1, lazy=True)
122
+ """Gas limit for the transaction."""
123
+
124
+ gas_price = Required(Decimal, 38, 1, lazy=True)
125
+ """Gas price (in wei)."""
126
+
127
+ max_fee_per_gas = Optional(Decimal, 38, 1, lazy=True)
128
+ """Max fee per gas (EIP-1559, optional)."""
129
+
130
+ max_priority_fee_per_gas = Optional(Decimal, 38, 1, lazy=True)
131
+ """Max priority fee per gas (EIP-1559, optional)."""
132
+
133
+ composite_key(block, transaction_index)
134
+
135
+ raw = Required(bytes, lazy=True)
136
+
137
+ @cached_property
138
+ def decoded(self) -> structs.Transaction:
139
+ return json.decode(self.raw, type=structs.Transaction)
140
+
141
+ @property
142
+ def input(self) -> HexBytes:
143
+ """The data payload sent with the transaction (for contract calls)."""
144
+ return self.decoded.input
145
+
146
+ @property
147
+ def r(self) -> HexBytes:
148
+ """ECDSA signature R value."""
149
+ return self.decoded.r
150
+
151
+ @property
152
+ def s(self) -> HexBytes:
153
+ """ECDSA signature S value."""
154
+ return self.decoded.s
155
+
156
+ @property
157
+ def v(self) -> int:
158
+ """ECDSA signature V value (replay protection)."""
159
+ return self.decoded.v
160
+
161
+ @property
162
+ def access_list(self) -> typing.List[AccessListEntry]:
163
+ """EIP-2930 access list (if present)."""
164
+ return self.decoded.access_list
165
+
166
+ @property
167
+ def y_parity(self) -> typing.Optional[int]:
168
+ """EIP-1559 y-parity value (if present)."""
169
+ return self.decoded.y_parity
170
+
171
+
172
+ class InternalTransfer(DbEntity):
173
+ """
174
+ Represents an internal transfer (call, delegatecall, etc.) within a transaction.
175
+ """
176
+
177
+ _id = PrimaryKey(int, auto=True)
178
+ """Auto-incrementing primary key for the internal transfer."""
179
+
180
+ # common
181
+ block = Required(BlockExtended, lazy=True, reverse="internal_transfers")
182
+ """The block containing this internal transfer."""
183
+
184
+ transaction_index = Required(int, lazy=True)
185
+ """The index of the transaction within the block."""
186
+
187
+ hash = Required(str, lazy=True)
188
+ """Transaction hash."""
189
+
190
+ from_address = Required(
191
+ AddressExtended, index=True, lazy=True, reverse="internal_transfers_sent"
192
+ )
193
+ """Sender address."""
194
+
195
+ to_address = Optional(
196
+ AddressExtended, index=True, lazy=True, reverse="internal_transfers_received"
197
+ )
198
+ """Recipient address (None for contract creation)."""
199
+
200
+ value = Required(Decimal, 38, 18, lazy=True)
201
+ price = Optional(Decimal, 38, 18, lazy=True)
202
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
203
+ """USD value of the transfer (optional)."""
204
+
205
+ # unique
206
+ type = Required(str, lazy=True)
207
+ """Type of call (e.g., "call", "delegatecall", "staticcall")."""
208
+
209
+ call_type = Required(str, lazy=True)
210
+ """Call type (e.g., "call", "create")."""
211
+
212
+ trace_address = Required(str, lazy=True)
213
+ """Path of sub-calls to reach this transfer."""
214
+
215
+ gas = Required(Decimal, 38, 1, lazy=True)
216
+ """Gas provided for the call."""
217
+
218
+ gas_used = Optional(Decimal, 38, 1, lazy=True)
219
+ """Gas used by the call (optional)."""
220
+
221
+ composite_key(
222
+ block,
223
+ transaction_index,
224
+ hash,
225
+ from_address,
226
+ to_address,
227
+ value,
228
+ type,
229
+ call_type,
230
+ trace_address,
231
+ gas,
232
+ gas_used,
233
+ )
234
+
235
+ raw = Required(bytes, lazy=True)
236
+
237
+ @cached_property
238
+ def decoded(self) -> structs.InternalTransfer:
239
+ structs.InternalTransfer.__doc__
240
+ return json.decode(self.raw, type=structs.InternalTransfer)
241
+
242
+ @property
243
+ def code(self) -> HexBytes:
244
+ structs.InternalTransfer.code.__doc__
245
+ return self.decoded.code
246
+
247
+ @property
248
+ def input(self) -> HexBytes:
249
+ """The input data for the call."""
250
+ return self.decoded.input
251
+
252
+ @property
253
+ def output(self) -> HexBytes:
254
+ """The output data from the call."""
255
+ return self.decoded.output
256
+
257
+ @property
258
+ def subtraces(self) -> int:
259
+ """The number of sub-operations spawned by this internal transfer."""
260
+ return self.decoded.subtraces
261
+
262
+
263
+ class TokenTransfer(DbEntity):
264
+ """
265
+ Represents an ERC20/ERC721 token transfer event within a transaction.
266
+ """
267
+
268
+ _id = PrimaryKey(int, auto=True)
269
+ """Auto-incrementing primary key for the token transfer."""
270
+
271
+ # common
272
+ block = Required(BlockExtended, lazy=True, reverse="token_transfers")
273
+ """The block containing this token transfer."""
274
+
275
+ transaction_index = Required(int, lazy=True)
276
+ """The index of the transaction within the block."""
277
+
278
+ hash = Required(str, lazy=True)
279
+ """Transaction hash."""
280
+
281
+ from_address = Required(AddressExtended, index=True, lazy=True, reverse="token_transfers_sent")
282
+ """Sender address."""
283
+
284
+ to_address = Required(
285
+ AddressExtended, index=True, lazy=True, reverse="token_transfers_received"
286
+ )
287
+ """Recipient address."""
288
+
289
+ value = Required(Decimal, 38, 18, lazy=True)
290
+ """Amount of tokens transferred."""
291
+
292
+ price = Optional(Decimal, 38, 18, lazy=True)
293
+ """Price of the token at the time of transfer (optional)."""
294
+
295
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
296
+ """USD value of the transfer (optional)."""
297
+
298
+ # unique
299
+ log_index = Required(int, lazy=True)
300
+ """Log index of the transfer event within the transaction."""
301
+
302
+ token = Optional(TokenExtended, index=True, lazy=True, reverse="transfers")
303
+ """The token contract involved in this transfer."""
304
+
305
+ composite_key(block, transaction_index, log_index)
306
+
307
+ raw = Required(bytes, lazy=True)
308
+
309
+ @cached_property
310
+ def decoded(self) -> structs.TokenTransfer:
311
+ return json.decode(self.raw, type=structs.TokenTransfer)