eth-portfolio 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eth-portfolio might be problematic. Click here for more details.

Files changed (47) hide show
  1. eth_portfolio/__init__.py +16 -0
  2. eth_portfolio/_argspec.py +42 -0
  3. eth_portfolio/_cache.py +116 -0
  4. eth_portfolio/_config.py +3 -0
  5. eth_portfolio/_db/__init__.py +0 -0
  6. eth_portfolio/_db/decorators.py +147 -0
  7. eth_portfolio/_db/entities.py +204 -0
  8. eth_portfolio/_db/utils.py +595 -0
  9. eth_portfolio/_decimal.py +122 -0
  10. eth_portfolio/_decorators.py +71 -0
  11. eth_portfolio/_exceptions.py +67 -0
  12. eth_portfolio/_ledgers/__init__.py +0 -0
  13. eth_portfolio/_ledgers/address.py +892 -0
  14. eth_portfolio/_ledgers/portfolio.py +327 -0
  15. eth_portfolio/_loaders/__init__.py +33 -0
  16. eth_portfolio/_loaders/balances.py +78 -0
  17. eth_portfolio/_loaders/token_transfer.py +214 -0
  18. eth_portfolio/_loaders/transaction.py +379 -0
  19. eth_portfolio/_loaders/utils.py +59 -0
  20. eth_portfolio/_shitcoins.py +212 -0
  21. eth_portfolio/_utils.py +286 -0
  22. eth_portfolio/_ydb/__init__.py +0 -0
  23. eth_portfolio/_ydb/token_transfers.py +136 -0
  24. eth_portfolio/address.py +382 -0
  25. eth_portfolio/buckets.py +181 -0
  26. eth_portfolio/constants.py +58 -0
  27. eth_portfolio/portfolio.py +629 -0
  28. eth_portfolio/protocols/__init__.py +66 -0
  29. eth_portfolio/protocols/_base.py +107 -0
  30. eth_portfolio/protocols/convex.py +17 -0
  31. eth_portfolio/protocols/dsr.py +31 -0
  32. eth_portfolio/protocols/lending/__init__.py +49 -0
  33. eth_portfolio/protocols/lending/_base.py +57 -0
  34. eth_portfolio/protocols/lending/compound.py +185 -0
  35. eth_portfolio/protocols/lending/liquity.py +110 -0
  36. eth_portfolio/protocols/lending/maker.py +105 -0
  37. eth_portfolio/protocols/lending/unit.py +47 -0
  38. eth_portfolio/protocols/liquity.py +16 -0
  39. eth_portfolio/py.typed +0 -0
  40. eth_portfolio/structs/__init__.py +43 -0
  41. eth_portfolio/structs/modified.py +69 -0
  42. eth_portfolio/structs/structs.py +637 -0
  43. eth_portfolio/typing.py +1460 -0
  44. eth_portfolio-1.1.0.dist-info/METADATA +174 -0
  45. eth_portfolio-1.1.0.dist-info/RECORD +47 -0
  46. eth_portfolio-1.1.0.dist-info/WHEEL +5 -0
  47. eth_portfolio-1.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,16 @@
1
+ import a_sync._smart
2
+
3
+ a_sync._smart.set_smart_task_factory()
4
+
5
+
6
+ from eth_portfolio.portfolio import Portfolio, portfolio
7
+
8
+ # make sure we init the extended db before we init ypm somewhere
9
+ from eth_portfolio._db import utils
10
+ from eth_portfolio._shitcoins import SHITCOINS
11
+
12
+ __all__ = [
13
+ "Portfolio",
14
+ "portfolio",
15
+ "SHITCOINS",
16
+ ]
@@ -0,0 +1,42 @@
1
+ from inspect import getfullargspec
2
+ from typing import Any, Callable, List, Tuple, Type
3
+
4
+ # WIP:
5
+
6
+
7
+ def get_args_type(sample: Callable) -> Tuple[Type, ...]:
8
+ argspec = getfullargspec(sample)
9
+ args = {
10
+ arg_name: argspec.annotations[arg_name] if arg_name in argspec.annotations else Any
11
+ for arg_name in argspec.args
12
+ if arg_name != "self"
13
+ }
14
+ return tuple(*args.values())
15
+
16
+
17
+ def get_kwargs_type(sample: Callable) -> Tuple[Type, ...]:
18
+ argspec = getfullargspec(sample)
19
+ num_kwargs = len(argspec.args) - len(argspec.defaults or [])
20
+ kwarg_names = argspec.args[num_kwargs:]
21
+ kwargs = {
22
+ kwarg_name: argspec.annotations[kwarg_name] if kwarg_name in argspec.annotations else Any
23
+ for kwarg_name in kwarg_names
24
+ }
25
+ _kwarg_types: List[Type[object]] = list(*kwargs.values())
26
+ if num_kwargs == 1:
27
+ return Tuple[_kwarg_types[0]] # type: ignore [valid-type,return-value]
28
+ elif num_kwargs == 2:
29
+ return Tuple[_kwarg_types[0], _kwarg_types[1]] # type: ignore [misc,return-value]
30
+ elif num_kwargs == 3:
31
+ return Tuple[_kwarg_types[0], _kwarg_types[1], _kwarg_types[2]] # type: ignore [misc,return-value]
32
+ else:
33
+ return Any # type: ignore [misc,return-value]
34
+
35
+
36
+ def get_return_type(sample: Callable) -> Type:
37
+ argspec = getfullargspec(sample)
38
+ return argspec.annotations["return"] if "return" in argspec.annotations else Any
39
+
40
+
41
+ def get_types(sample: Callable) -> Tuple[Type, Type, Type]:
42
+ return get_args_type(sample), get_kwargs_type(sample), get_return_type(sample) # type: ignore [return-value]
@@ -0,0 +1,116 @@
1
+ import functools
2
+ import inspect
3
+ from asyncio import PriorityQueue, Task, current_task, get_event_loop
4
+ from concurrent.futures import Executor
5
+ from hashlib import md5
6
+ from logging import getLogger
7
+ from os import makedirs
8
+ from os.path import exists, join
9
+ from pickle import dumps, load, loads
10
+ from random import random
11
+ from typing import Any, Callable, List, NoReturn
12
+
13
+ from a_sync import PruningThreadPoolExecutor
14
+ from a_sync._typing import P, T
15
+ from a_sync.asyncio import create_task
16
+ from a_sync.primitives.queue import log_broken
17
+ from aiofiles import open as _aio_open
18
+ from brownie import chain
19
+
20
+ BASE_PATH = f"./cache/{chain.id}/"
21
+ _THREAD_NAME_PREFIX = "eth-portfolio-cache-decorator"
22
+ _EXISTS_EXECUTOR = PruningThreadPoolExecutor(8, f"{_THREAD_NAME_PREFIX}-exists")
23
+
24
+
25
+ def cache_to_disk(fn: Callable[P, T]) -> Callable[P, T]:
26
+ # sourcery skip: use-contextlib-suppress
27
+ name = fn.__name__
28
+ cache_path_for_fn = f"{BASE_PATH}{fn.__module__.replace('.', '/')}/{name}"
29
+ logger = getLogger(f"eth_portfolio.cache_to_disk.{name}")
30
+
31
+ def get_cache_file_path(args, kwargs):
32
+ # Create a unique filename based on the function arguments
33
+ key = md5(dumps((args, sorted(kwargs.items())))).hexdigest()
34
+ return join(cache_path_for_fn, f"{key}.json")
35
+
36
+ write_executor = PruningThreadPoolExecutor(8, f"{_THREAD_NAME_PREFIX}-{fn.__qualname__}-write")
37
+
38
+ makedirs(cache_path_for_fn, exist_ok=True)
39
+
40
+ if inspect.iscoroutinefunction(fn):
41
+ read_executor = PruningThreadPoolExecutor(
42
+ 8, f"{_THREAD_NAME_PREFIX}-{fn.__qualname__}-read"
43
+ )
44
+
45
+ queue = PriorityQueue()
46
+
47
+ async def cache_deco_worker_coro(func) -> NoReturn:
48
+ try:
49
+ while True:
50
+ _, fut, cache_path, args, kwargs = await queue.get()
51
+ try:
52
+ async with _aio_open(cache_path, "rb", executor=read_executor) as f:
53
+ fut.set_result(loads(await f.read()))
54
+ except Exception as e:
55
+ fut.set_exception(e)
56
+ except Exception as e:
57
+ logger.error("%s for %s is broken!!!", current_task(), func)
58
+ logger.exception(e)
59
+ raise
60
+
61
+ workers: List[Task[NoReturn]] = []
62
+
63
+ @functools.wraps(fn)
64
+ async def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
65
+ cache_path = get_cache_file_path(args, kwargs)
66
+ if await _EXISTS_EXECUTOR.run(exists, cache_path):
67
+ fut = get_event_loop().create_future()
68
+ # we intentionally mix up the order to break up heavy load block ranges
69
+ queue.put_nowait((random(), fut, cache_path, args, kwargs))
70
+ if not workers:
71
+ workers.extend(create_task(cache_deco_worker_coro(fn)) for _ in range(100))
72
+ try:
73
+ return await fut
74
+ except EOFError:
75
+ pass
76
+
77
+ async_result: T = await fn(*args, **kwargs)
78
+ try:
79
+ await __cache_write(cache_path, async_result, write_executor)
80
+ except OSError as e:
81
+ # I was having some weird issues in docker that I don't want to debug,
82
+ # so I'm going to assume you have another means to let you know you're
83
+ # out of disk space and will pass right on through here so my script
84
+ # can continue
85
+ if e.strerror != "No space left on device":
86
+ raise
87
+ return async_result
88
+
89
+ else:
90
+
91
+ @functools.wraps(fn)
92
+ def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
93
+ cache_path = get_cache_file_path(args, kwargs)
94
+ try:
95
+ with open(cache_path, "rb") as f:
96
+ return load(f)
97
+ except (FileNotFoundError, EOFError):
98
+ pass
99
+
100
+ sync_result: T = fn(*args, **kwargs) # type: ignore [assignment, return-value]
101
+ try:
102
+ create_task(
103
+ coro=__cache_write(cache_path, sync_result, write_executor),
104
+ skip_gc_until_done=True,
105
+ )
106
+ except RuntimeError:
107
+ pass
108
+ return sync_result
109
+
110
+ return disk_cache_wrap
111
+
112
+
113
+ async def __cache_write(cache_path: str, result: Any, executor: Executor) -> None:
114
+ result = dumps(result)
115
+ async with _aio_open(cache_path, "wb", executor=executor) as f:
116
+ await f.write(result)
@@ -0,0 +1,3 @@
1
+ import os
2
+
3
+ REORG_BUFFER = int(os.environ.get("REORG_BUFFER", 30))
File without changes
@@ -0,0 +1,147 @@
1
+ from asyncio import iscoroutinefunction
2
+ from asyncio import sleep as aio_sleep
3
+ from functools import wraps
4
+ from logging import DEBUG, getLogger
5
+ from random import random
6
+ from time import sleep as time_sleep
7
+ from typing import Callable, TypeVar
8
+
9
+ from a_sync._typing import AnyFn
10
+ from pony.orm import OperationalError, TransactionError
11
+ from typing_extensions import ParamSpec
12
+
13
+
14
+ P = ParamSpec("P")
15
+ T = TypeVar("T")
16
+
17
+
18
+ logger = getLogger(__name__)
19
+ __logger_is_enabled_for = logger.isEnabledFor
20
+ __logger_warning = logger.warning
21
+ __logger_log = logger._log
22
+
23
+
24
+ def break_locks(fn: AnyFn[P, T]) -> AnyFn[P, T]:
25
+ """
26
+ Decorator to handle database lock errors by retrying the function.
27
+
28
+ This decorator is designed to wrap functions that interact with a database
29
+ and may encounter `OperationalError` due to database locks. It will retry
30
+ the function indefinitely if a "database is locked" error occurs. After
31
+ 5 attempts, a warning is logged, but the function will continue to retry
32
+ until it succeeds or a non-lock-related error occurs.
33
+
34
+ Args:
35
+ fn: The function to be wrapped, which may be a coroutine or a regular function.
36
+
37
+ Examples:
38
+ Basic usage with a regular function:
39
+
40
+ >>> @break_locks
41
+ ... def my_function():
42
+ ... # Function logic that may encounter a database lock
43
+ ... pass
44
+
45
+ Basic usage with an asynchronous function:
46
+
47
+ >>> @break_locks
48
+ ... async def my_async_function():
49
+ ... # Async function logic that may encounter a database lock
50
+ ... pass
51
+
52
+ See Also:
53
+ - :func:`pony.orm.db_session`: For managing database sessions.
54
+ """
55
+ if iscoroutinefunction(fn):
56
+
57
+ @wraps(fn)
58
+ async def break_locks_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
59
+ debug_logs_enabled = None
60
+ tries = 0
61
+ while True:
62
+ try:
63
+ return await fn(*args, **kwargs)
64
+ except OperationalError as e:
65
+ if str(e) != "database is locked":
66
+ raise e
67
+
68
+ if debug_logs_enabled is None:
69
+ debug_logs_enabled = __logger_is_enabled_for(DEBUG)
70
+
71
+ if debug_logs_enabled is True:
72
+ __logger_log(DEBUG, "%s.%s %s", (fn.__module__, fn.__name__, e))
73
+
74
+ await aio_sleep(tries * random())
75
+ tries += 1
76
+ if tries > 5:
77
+ __logger_warning("%s caught in err loop with %s", fn, e)
78
+
79
+ else:
80
+
81
+ @wraps(fn)
82
+ def break_locks_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
83
+ debug_logs_enabled = None
84
+ tries = 0
85
+ while True:
86
+ try:
87
+ return fn(*args, **kwargs) # type: ignore [return-value]
88
+ except OperationalError as e:
89
+ if str(e) != "database is locked":
90
+ raise e
91
+
92
+ if debug_logs_enabled is None:
93
+ debug_logs_enabled = __logger_is_enabled_for(DEBUG)
94
+
95
+ if debug_logs_enabled is True:
96
+ __logger_log(DEBUG, "%s.%s %s", (fn.__module__, fn.__name__, e))
97
+
98
+ time_sleep(tries * random())
99
+ tries += 1
100
+ if tries > 5:
101
+ __logger_warning("%s caught in err loop with %s", fn, e)
102
+
103
+ return break_locks_wrap
104
+
105
+
106
+ def requery_objs_on_diff_tx_err(fn: Callable[P, T]) -> Callable[P, T]:
107
+ """
108
+ Decorator to handle transaction errors by retrying the function.
109
+
110
+ This decorator is designed to wrap functions that may encounter
111
+ `TransactionError` due to mixing objects from different transactions.
112
+ It will retry the function until it succeeds or a non-transaction-related
113
+ error occurs.
114
+
115
+ Args:
116
+ fn: The function to be wrapped, which must not be a coroutine.
117
+
118
+ Raises:
119
+ TypeError: If the function is a coroutine.
120
+
121
+ Examples:
122
+ Basic usage with a function that may encounter transaction errors:
123
+
124
+ >>> @requery_objs_on_diff_tx_err
125
+ ... def my_function():
126
+ ... # Function logic that may encounter a transaction error
127
+ ... pass
128
+
129
+ See Also:
130
+ - :func:`pony.orm.db_session`: For managing database sessions.
131
+ """
132
+ if iscoroutinefunction(fn):
133
+ raise TypeError(f"{fn} must not be async")
134
+
135
+ @wraps(fn)
136
+ def requery_wrap(*args: P.args, **kwargs: P.kwargs) -> T:
137
+ while True:
138
+ try:
139
+ return fn(*args, **kwargs)
140
+ except TransactionError as e:
141
+ if str(e) != "An attempt to mix objects belonging to different transactions":
142
+ raise e
143
+ # The error occurs if you committed new objs to the db and started a new transaction while still inside of a `db_session`,
144
+ # and then tried to use the newly committed objects in the next transaction. Now that the objects are in the db this will
145
+ # not reoccur. The next iteration will be successful.
146
+
147
+ return requery_wrap
@@ -0,0 +1,204 @@
1
+ import typing
2
+ from functools import cached_property
3
+
4
+ from evmspec.structs.transaction import AccessListEntry
5
+ from hexbytes import HexBytes
6
+ from msgspec import json
7
+ from pony.orm import Optional, PrimaryKey, Required, Set, composite_key
8
+ from y._db.entities import Address, Block, Contract, DbEntity, Token
9
+
10
+ from eth_portfolio import structs
11
+ from eth_portfolio._decimal import Decimal
12
+
13
+
14
+ class BlockExtended(Block):
15
+ if typing.TYPE_CHECKING:
16
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
17
+ transactions: Set["Transaction"]
18
+ internal_transfers: Set["InternalTransfer"]
19
+ token_transfers: Set["TokenTransfer"]
20
+
21
+ transactions = Set("Transaction", lazy=True, reverse="block")
22
+ internal_transfers = Set("InternalTransfer", lazy=True, reverse="block")
23
+ token_transfers = Set("TokenTransfer", lazy=True, reverse="block")
24
+
25
+
26
+ class AddressExtended(Address):
27
+ if typing.TYPE_CHECKING:
28
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
29
+ transactions_sent: Set["Transaction"]
30
+ transactions_received: Set["Transaction"]
31
+ internal_transfers_sent: Set["InternalTransfer"]
32
+ internal_transfers_received: Set["InternalTransfer"]
33
+ token_transfers_sent: Set["TokenTransfer"]
34
+ token_transfers_received: Set["TokenTransfer"]
35
+
36
+ transactions_sent = Set("Transaction", lazy=True, reverse="from_address")
37
+ transactions_received = Set("Transaction", lazy=True, reverse="to_address")
38
+ internal_transfers_sent = Set("InternalTransfer", lazy=True, reverse="from_address")
39
+ internal_transfers_received = Set("InternalTransfer", lazy=True, reverse="to_address")
40
+ token_transfers_sent = Set("TokenTransfer", lazy=True, reverse="from_address")
41
+ token_transfers_received = Set("TokenTransfer", lazy=True, reverse="to_address")
42
+
43
+
44
+ class ContractExtended(Contract, AddressExtended):
45
+ pass
46
+
47
+
48
+ class TokenExtended(Token, AddressExtended):
49
+ if typing.TYPE_CHECKING:
50
+ # if we execute this code we get `TypeError: 'type' object is not subscriptable`
51
+ transfers: Set["TokenTransfer"]
52
+
53
+ transfers = Set("TokenTransfer", lazy=True, reverse="token")
54
+
55
+
56
+ class Transaction(DbEntity):
57
+ _id = PrimaryKey(int, auto=True)
58
+ block = Required(BlockExtended, lazy=True, reverse="transactions")
59
+ transaction_index = Required(int, lazy=True)
60
+ hash = Required(str, index=True, lazy=True)
61
+ from_address = Required(AddressExtended, index=True, lazy=True, reverse="transactions_sent")
62
+ to_address = Optional(AddressExtended, index=True, lazy=True, reverse="transactions_received")
63
+ value = Required(Decimal, 38, 18, lazy=True)
64
+ price = Optional(Decimal, 38, 18, lazy=True)
65
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
66
+
67
+ nonce = Required(int, lazy=True)
68
+ type = Optional(int, lazy=True)
69
+ gas = Required(Decimal, 38, 1, lazy=True)
70
+ gas_price = Required(Decimal, 38, 1, lazy=True)
71
+ max_fee_per_gas = Optional(Decimal, 38, 1, lazy=True)
72
+ max_priority_fee_per_gas = Optional(Decimal, 38, 1, lazy=True)
73
+
74
+ composite_key(block, transaction_index)
75
+
76
+ raw = Required(bytes, lazy=True)
77
+
78
+ @cached_property
79
+ def decoded(self) -> structs.Transaction:
80
+ return json.decode(self.raw, type=structs.Transaction)
81
+
82
+ @property
83
+ def input(self) -> HexBytes:
84
+ structs.Transaction.input.__doc__
85
+ return self.decoded.input
86
+
87
+ @property
88
+ def r(self) -> HexBytes:
89
+ structs.Transaction.r.__doc__
90
+ return self.decoded.r
91
+
92
+ @property
93
+ def s(self) -> HexBytes:
94
+ structs.Transaction.s.__doc__
95
+ return self.decoded.s
96
+
97
+ @property
98
+ def v(self) -> int:
99
+ structs.Transaction.v.__doc__
100
+ return self.decoded.v
101
+
102
+ @property
103
+ def access_list(self) -> typing.List[AccessListEntry]:
104
+ structs.Transaction.access_list.__doc__
105
+ return self.decoded.access_list
106
+
107
+ @property
108
+ def y_parity(self) -> typing.Optional[int]:
109
+ structs.TokenTransfer.y_parity.__doc__
110
+ return self.decoded.y_parity
111
+
112
+
113
+ class InternalTransfer(DbEntity):
114
+ _id = PrimaryKey(int, auto=True)
115
+
116
+ # common
117
+ block = Required(BlockExtended, lazy=True, reverse="internal_transfers")
118
+ transaction_index = Required(int, lazy=True)
119
+ hash = Required(str, lazy=True)
120
+ from_address = Required(
121
+ AddressExtended, index=True, lazy=True, reverse="internal_transfers_sent"
122
+ )
123
+ to_address = Optional(
124
+ AddressExtended, index=True, lazy=True, reverse="internal_transfers_received"
125
+ )
126
+ value = Required(Decimal, 38, 18, lazy=True)
127
+ price = Optional(Decimal, 38, 18, lazy=True)
128
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
129
+
130
+ # unique
131
+ type = Required(str, lazy=True)
132
+ call_type = Required(str, lazy=True)
133
+ trace_address = Required(str, lazy=True)
134
+ gas = Required(Decimal, 38, 1, lazy=True)
135
+ gas_used = Optional(Decimal, 38, 1, lazy=True)
136
+
137
+ composite_key(
138
+ block,
139
+ transaction_index,
140
+ hash,
141
+ from_address,
142
+ to_address,
143
+ value,
144
+ type,
145
+ call_type,
146
+ trace_address,
147
+ gas,
148
+ gas_used,
149
+ )
150
+
151
+ raw = Required(bytes, lazy=True)
152
+
153
+ @cached_property
154
+ def decoded(self) -> structs.InternalTransfer:
155
+ structs.InternalTransfer.__doc__
156
+ return json.decode(self.raw, type=structs.InternalTransfer)
157
+
158
+ @property
159
+ def code(self) -> HexBytes:
160
+ structs.InternalTransfer.code.__doc__
161
+ return self.decoded.code
162
+
163
+ @property
164
+ def input(self) -> HexBytes:
165
+ structs.InternalTransfer.input.__doc__
166
+ return self.decoded.input
167
+
168
+ @property
169
+ def output(self) -> HexBytes:
170
+ structs.InternalTransfer.output.__doc__
171
+ return self.decoded.output
172
+
173
+ @property
174
+ def subtraces(self) -> int:
175
+ structs.InternalTransfer.subtraces.__doc__
176
+ return self.decoded.subtraces
177
+
178
+
179
+ class TokenTransfer(DbEntity):
180
+ _id = PrimaryKey(int, auto=True)
181
+
182
+ # common
183
+ block = Required(BlockExtended, lazy=True, reverse="token_transfers")
184
+ transaction_index = Required(int, lazy=True)
185
+ hash = Required(str, lazy=True)
186
+ from_address = Required(AddressExtended, index=True, lazy=True, reverse="token_transfers_sent")
187
+ to_address = Required(
188
+ AddressExtended, index=True, lazy=True, reverse="token_transfers_received"
189
+ )
190
+ value = Required(Decimal, 38, 18, lazy=True)
191
+ price = Optional(Decimal, 38, 18, lazy=True)
192
+ value_usd = Optional(Decimal, 38, 18, lazy=True)
193
+
194
+ # unique
195
+ log_index = Required(int, lazy=True)
196
+ token = Optional(TokenExtended, index=True, lazy=True, reverse="transfers")
197
+
198
+ composite_key(block, transaction_index, log_index)
199
+
200
+ raw = Required(bytes, lazy=True)
201
+
202
+ @cached_property
203
+ def decoded(self) -> structs.TokenTransfer:
204
+ return json.decode(self.raw, type=structs.TokenTransfer)