eth-portfolio-temp 0.2.12__cp313-cp313-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eth-portfolio-temp might be problematic. Click here for more details.
- eth_portfolio/__init__.py +25 -0
- eth_portfolio/_argspec.cp313-win32.pyd +0 -0
- eth_portfolio/_argspec.py +42 -0
- eth_portfolio/_cache.py +121 -0
- eth_portfolio/_config.cp313-win32.pyd +0 -0
- eth_portfolio/_config.py +4 -0
- eth_portfolio/_db/__init__.py +0 -0
- eth_portfolio/_db/decorators.py +147 -0
- eth_portfolio/_db/entities.py +311 -0
- eth_portfolio/_db/utils.py +604 -0
- eth_portfolio/_decimal.py +156 -0
- eth_portfolio/_decorators.py +84 -0
- eth_portfolio/_exceptions.py +67 -0
- eth_portfolio/_ledgers/__init__.py +0 -0
- eth_portfolio/_ledgers/address.py +938 -0
- eth_portfolio/_ledgers/portfolio.py +327 -0
- eth_portfolio/_loaders/__init__.py +33 -0
- eth_portfolio/_loaders/_nonce.cp313-win32.pyd +0 -0
- eth_portfolio/_loaders/_nonce.py +196 -0
- eth_portfolio/_loaders/balances.cp313-win32.pyd +0 -0
- eth_portfolio/_loaders/balances.py +94 -0
- eth_portfolio/_loaders/token_transfer.py +217 -0
- eth_portfolio/_loaders/transaction.py +240 -0
- eth_portfolio/_loaders/utils.cp313-win32.pyd +0 -0
- eth_portfolio/_loaders/utils.py +68 -0
- eth_portfolio/_shitcoins.cp313-win32.pyd +0 -0
- eth_portfolio/_shitcoins.py +329 -0
- eth_portfolio/_stableish.cp313-win32.pyd +0 -0
- eth_portfolio/_stableish.py +42 -0
- eth_portfolio/_submodules.py +73 -0
- eth_portfolio/_utils.py +225 -0
- eth_portfolio/_ydb/__init__.py +0 -0
- eth_portfolio/_ydb/token_transfers.py +145 -0
- eth_portfolio/address.py +397 -0
- eth_portfolio/buckets.py +194 -0
- eth_portfolio/constants.cp313-win32.pyd +0 -0
- eth_portfolio/constants.py +82 -0
- eth_portfolio/portfolio.py +661 -0
- eth_portfolio/protocols/__init__.py +67 -0
- eth_portfolio/protocols/_base.py +108 -0
- eth_portfolio/protocols/convex.py +17 -0
- eth_portfolio/protocols/dsr.py +51 -0
- eth_portfolio/protocols/lending/README.md +6 -0
- eth_portfolio/protocols/lending/__init__.py +50 -0
- eth_portfolio/protocols/lending/_base.py +57 -0
- eth_portfolio/protocols/lending/compound.py +187 -0
- eth_portfolio/protocols/lending/liquity.py +110 -0
- eth_portfolio/protocols/lending/maker.py +104 -0
- eth_portfolio/protocols/lending/unit.py +46 -0
- eth_portfolio/protocols/liquity.py +16 -0
- eth_portfolio/py.typed +0 -0
- eth_portfolio/structs/__init__.py +43 -0
- eth_portfolio/structs/modified.py +69 -0
- eth_portfolio/structs/structs.py +637 -0
- eth_portfolio/typing/__init__.py +1447 -0
- eth_portfolio/typing/balance/single.py +176 -0
- eth_portfolio__mypyc.cp313-win32.pyd +0 -0
- eth_portfolio_scripts/__init__.py +20 -0
- eth_portfolio_scripts/_args.py +26 -0
- eth_portfolio_scripts/_logging.py +15 -0
- eth_portfolio_scripts/_portfolio.py +194 -0
- eth_portfolio_scripts/_utils.py +106 -0
- eth_portfolio_scripts/balances.cp313-win32.pyd +0 -0
- eth_portfolio_scripts/balances.py +52 -0
- eth_portfolio_scripts/docker/.grafana/dashboards/Portfolio/Balances.json +1962 -0
- eth_portfolio_scripts/docker/.grafana/dashboards/dashboards.yaml +10 -0
- eth_portfolio_scripts/docker/.grafana/datasources/datasources.yml +11 -0
- eth_portfolio_scripts/docker/__init__.cp313-win32.pyd +0 -0
- eth_portfolio_scripts/docker/__init__.py +16 -0
- eth_portfolio_scripts/docker/check.cp313-win32.pyd +0 -0
- eth_portfolio_scripts/docker/check.py +56 -0
- eth_portfolio_scripts/docker/docker-compose.yaml +61 -0
- eth_portfolio_scripts/docker/docker_compose.cp313-win32.pyd +0 -0
- eth_portfolio_scripts/docker/docker_compose.py +78 -0
- eth_portfolio_scripts/main.py +119 -0
- eth_portfolio_scripts/py.typed +1 -0
- eth_portfolio_scripts/victoria/__init__.py +73 -0
- eth_portfolio_scripts/victoria/types.py +38 -0
- eth_portfolio_temp-0.2.12.dist-info/METADATA +25 -0
- eth_portfolio_temp-0.2.12.dist-info/RECORD +83 -0
- eth_portfolio_temp-0.2.12.dist-info/WHEEL +5 -0
- eth_portfolio_temp-0.2.12.dist-info/entry_points.txt +2 -0
- eth_portfolio_temp-0.2.12.dist-info/top_level.txt +3 -0
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import TYPE_CHECKING, AsyncIterator, Dict, Generic, Optional, TypeVar
|
|
3
|
+
|
|
4
|
+
import a_sync
|
|
5
|
+
from pandas import DataFrame, concat # type: ignore
|
|
6
|
+
from y.datatypes import Address, Block
|
|
7
|
+
|
|
8
|
+
from eth_portfolio._decorators import set_end_block_if_none
|
|
9
|
+
from eth_portfolio._ledgers.address import (
|
|
10
|
+
AddressLedgerBase,
|
|
11
|
+
InternalTransfersList,
|
|
12
|
+
TokenTransfersList,
|
|
13
|
+
TransactionsList,
|
|
14
|
+
_LedgerEntryList,
|
|
15
|
+
)
|
|
16
|
+
from eth_portfolio._utils import _AiterMixin
|
|
17
|
+
from eth_portfolio.structs import InternalTransfer, TokenTransfer, Transaction
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from eth_portfolio.portfolio import Portfolio
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
T = TypeVar("T")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class PortfolioLedgerBase(a_sync.ASyncGenericBase, _AiterMixin[T], Generic[_LedgerEntryList, T]):
|
|
28
|
+
property_name: str
|
|
29
|
+
object_caches: Dict[Address, AddressLedgerBase[_LedgerEntryList, T]]
|
|
30
|
+
|
|
31
|
+
def __init__(self, portfolio: "Portfolio"): # type: ignore
|
|
32
|
+
assert hasattr(self, "property_name"), "Subclasses must define a property_name"
|
|
33
|
+
self.object_caches = {
|
|
34
|
+
address.address: getattr(address, self.property_name) for address in portfolio
|
|
35
|
+
}
|
|
36
|
+
self.portfolio = portfolio
|
|
37
|
+
super().__init__()
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def _start_block(self) -> int:
|
|
41
|
+
"""Returns the start block for analysis of this portfolio."""
|
|
42
|
+
return self.portfolio._start_block
|
|
43
|
+
|
|
44
|
+
def _get_and_yield(self, start_block: int, end_block: int, mem_cache: bool) -> AsyncIterator[T]:
|
|
45
|
+
"""
|
|
46
|
+
Asynchronously yields ledger entries for each address in the portfolio for the specified block range.
|
|
47
|
+
|
|
48
|
+
This method is crucial for efficient data retrieval across multiple addresses, as it:
|
|
49
|
+
- Utilizes asynchronous iteration to process addresses concurrently.
|
|
50
|
+
- Reduces memory usage by yielding entries one at a time.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
start_block: The starting block for the ledger query.
|
|
54
|
+
end_block: The ending block for the ledger query.
|
|
55
|
+
|
|
56
|
+
Yields:
|
|
57
|
+
Individual ledger entries of type T for each address in the portfolio.
|
|
58
|
+
|
|
59
|
+
Example:
|
|
60
|
+
>>> async for entry in ledger._get_and_yield(start_block=1000000, end_block=1100000):
|
|
61
|
+
... print(entry)
|
|
62
|
+
"""
|
|
63
|
+
aiterators = [
|
|
64
|
+
getattr(address, self.property_name)._get_and_yield(start_block, end_block, mem_cache)
|
|
65
|
+
for address in self.portfolio
|
|
66
|
+
]
|
|
67
|
+
return a_sync.as_yielded(*aiterators)
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def asynchronous(self) -> bool:
|
|
71
|
+
"""Returns `True` if the portfolio associated with this ledger is asynchronous, `False` if not."""
|
|
72
|
+
return self.portfolio.asynchronous
|
|
73
|
+
|
|
74
|
+
@set_end_block_if_none
|
|
75
|
+
async def get(self, start_block: Block, end_block: Block) -> Dict[Address, _LedgerEntryList]:
|
|
76
|
+
"""
|
|
77
|
+
Fetches ledger entries for all portfolio addresses within the specified block range.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
start_block: The starting block number for the query.
|
|
81
|
+
end_block: The ending block number for the query.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
A dictionary mapping each portfolio address to its corresponding ledger entries within the specified block range.
|
|
85
|
+
|
|
86
|
+
Note:
|
|
87
|
+
The @set_end_block_if_none decorator ensures that if end_block is not provided,
|
|
88
|
+
it defaults to the latest block.
|
|
89
|
+
|
|
90
|
+
Example:
|
|
91
|
+
>>> ledger = PortfolioTransactionsLedger(portfolio=portfolio)
|
|
92
|
+
>>> ledger_entries = await ledger.get(start_block=1000000, end_block=1100000)
|
|
93
|
+
>>> print("\n".join(f"Address {addr}: {len(entries)} entries" for addr, entries in ledger_entries.items()))
|
|
94
|
+
"""
|
|
95
|
+
coros = {
|
|
96
|
+
address: cache.get(start_block, end_block, sync=False)
|
|
97
|
+
for address, cache in self.object_caches.items()
|
|
98
|
+
}
|
|
99
|
+
return await a_sync.gather(coros)
|
|
100
|
+
|
|
101
|
+
@set_end_block_if_none
|
|
102
|
+
async def df(self, start_block: Block, end_block: Block) -> DataFrame:
|
|
103
|
+
"""
|
|
104
|
+
Returns a DataFrame containing all entries for this ledger.
|
|
105
|
+
|
|
106
|
+
This method provides an easy way to access your data in a standardized, clean format
|
|
107
|
+
for further analysis and reporting.
|
|
108
|
+
|
|
109
|
+
NOTE: Subclasses may override this method for type-specific DataFrame processing.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
start_block: The starting block for the query.
|
|
113
|
+
end_block: The ending block for the query.
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
A DataFrame containing processed ledger entries.
|
|
117
|
+
|
|
118
|
+
Example:
|
|
119
|
+
>>> df = await ledger.df(start_block=1000000, end_block=1100000)
|
|
120
|
+
>>> print(df)
|
|
121
|
+
"""
|
|
122
|
+
df = await self._df_base(start_block, end_block)
|
|
123
|
+
if len(df) > 0:
|
|
124
|
+
df = self._cleanup_df(df)
|
|
125
|
+
return df
|
|
126
|
+
|
|
127
|
+
async def sent(
|
|
128
|
+
self, start_block: Optional[Block] = None, end_block: Optional[Block] = None
|
|
129
|
+
) -> AsyncIterator[T]:
|
|
130
|
+
portfolio_addresses = set(self.portfolio.addresses.keys())
|
|
131
|
+
async for obj in self[start_block:end_block]:
|
|
132
|
+
if (
|
|
133
|
+
obj.from_address in portfolio_addresses
|
|
134
|
+
and obj.to_address not in portfolio_addresses
|
|
135
|
+
):
|
|
136
|
+
yield obj
|
|
137
|
+
|
|
138
|
+
async def received(
|
|
139
|
+
self, start_block: Optional[Block] = None, end_block: Optional[Block] = None
|
|
140
|
+
) -> AsyncIterator[T]:
|
|
141
|
+
portfolio_addresses = set(self.portfolio.addresses.keys())
|
|
142
|
+
async for obj in self[start_block:end_block]:
|
|
143
|
+
if (
|
|
144
|
+
obj.to_address in portfolio_addresses
|
|
145
|
+
and obj.from_address not in portfolio_addresses
|
|
146
|
+
):
|
|
147
|
+
yield obj
|
|
148
|
+
|
|
149
|
+
async def _df_base(self, start_block: Block, end_block: Block) -> DataFrame:
|
|
150
|
+
"""
|
|
151
|
+
Fetches and concatenates raw ledger data into a :class:`~DataFrame` for all addresses in the portfolio.
|
|
152
|
+
|
|
153
|
+
This method is a crucial part of the data processing pipeline, as it:
|
|
154
|
+
- Retrieves ledger entries for the specified block range across all portfolio addresses.
|
|
155
|
+
- Combines the data from multiple addresses into a single DataFrame.
|
|
156
|
+
- Serves as the foundation for further data cleaning and analysis.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
start_block: The starting block number for the query.
|
|
160
|
+
end_block: The ending block number for the query.
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
DataFrame: A concatenated DataFrame containing raw ledger entries from all addresses.
|
|
164
|
+
|
|
165
|
+
Example:
|
|
166
|
+
>>> df_base = await ledger._df_base(start_block=1000000, end_block=1100000)
|
|
167
|
+
>>> print(f"Total entries: {len(df_base)}", df_base.head(), sep="\n")
|
|
168
|
+
|
|
169
|
+
Note:
|
|
170
|
+
This method returns raw data that may contain duplicates or require further processing.
|
|
171
|
+
For cleaned and deduplicated data, use the `df()` method instead.
|
|
172
|
+
"""
|
|
173
|
+
data: Dict[Address, _LedgerEntryList] = await self.get(start_block, end_block, sync=False)
|
|
174
|
+
return concat(pandable.df for pandable in data.values())
|
|
175
|
+
|
|
176
|
+
@classmethod
|
|
177
|
+
def _deduplicate_df(cls, df: DataFrame) -> DataFrame:
|
|
178
|
+
"""
|
|
179
|
+
Deduplicate the DataFrame to prevent double-counting of transfers within the portfolio.
|
|
180
|
+
|
|
181
|
+
This method is crucial for ensuring accurate portfolio analysis by removing duplicate entries
|
|
182
|
+
where transfers between owned addresses appear once in each result set.
|
|
183
|
+
|
|
184
|
+
Note:
|
|
185
|
+
- This method can be overridden in subclasses if needed.
|
|
186
|
+
- If the DataFrame contains columns with list-type values, they are converted to strings for comparison.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
df: The DataFrame to deduplicate.
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
A deduplicated version of the input DataFrame.
|
|
193
|
+
|
|
194
|
+
Example:
|
|
195
|
+
>>> original_df = pd.DataFrame(...) # Your original DataFrame
|
|
196
|
+
>>> deduped_df = PortfolioLedgerBase._deduplicate_df(original_df)
|
|
197
|
+
>>> print(f"Original rows: {len(original_df)}, Deduplicated rows: {len(deduped_df)}")
|
|
198
|
+
"""
|
|
199
|
+
return df.loc[df.astype(str).drop_duplicates().index]
|
|
200
|
+
|
|
201
|
+
@classmethod
|
|
202
|
+
def _cleanup_df(cls, df: DataFrame) -> DataFrame:
|
|
203
|
+
"""
|
|
204
|
+
Cleans up the DataFrame by deduplicating and sorting it by block number.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
df: The DataFrame to clean up.
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
A cleaned and deduplicated DataFrame sorted by block number.
|
|
211
|
+
|
|
212
|
+
Example:
|
|
213
|
+
>>> cleaned_df = ledger._cleanup_df(df)
|
|
214
|
+
>>> print(cleaned_df)
|
|
215
|
+
"""
|
|
216
|
+
df = cls._deduplicate_df(df)
|
|
217
|
+
return df.sort_values(["blockNumber"]).reset_index(drop=True)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
class PortfolioTransactionsLedger(PortfolioLedgerBase[TransactionsList, Transaction]):
|
|
221
|
+
"""
|
|
222
|
+
The :class:`~eth_portfolio._ledgers.PortfolioTransactionsLedger` class manages Ethereum
|
|
223
|
+
transaction entries across all addresses in a portfolio. It aggregates and processes
|
|
224
|
+
transactions for the entire portfolio within specified block ranges.
|
|
225
|
+
|
|
226
|
+
In the eth-portfolio ecosystem, this class is essential for:
|
|
227
|
+
- Providing a comprehensive view of all Ethereum transactions across multiple addresses.
|
|
228
|
+
- Supporting portfolio-wide analysis and reporting of transaction history.
|
|
229
|
+
- Enabling efficient querying of transaction data for specific time periods or block ranges.
|
|
230
|
+
|
|
231
|
+
Example:
|
|
232
|
+
>>> ledger = PortfolioTransactionsLedger(portfolio=Portfolio(addresses=["0x1234...", "0xABCD..."]))
|
|
233
|
+
>>> df = await ledger.df(start_block=10000000, end_block=12000000)
|
|
234
|
+
>>> print(df)
|
|
235
|
+
"""
|
|
236
|
+
|
|
237
|
+
property_name = "transactions"
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
class PortfolioTokenTransfersLedger(PortfolioLedgerBase[TokenTransfersList, TokenTransfer]):
|
|
241
|
+
"""
|
|
242
|
+
The :class:`~eth_portfolio._ledgers.PortfolioTokenTransfersLedger` class manages ERC20 token
|
|
243
|
+
transfer entries across all addresses in a portfolio. It aggregates and processes
|
|
244
|
+
token transfers for the entire portfolio within specified block ranges.
|
|
245
|
+
|
|
246
|
+
In the eth-portfolio ecosystem, this class is crucial for:
|
|
247
|
+
- Tracking all ERC20 token movements across multiple addresses in a portfolio.
|
|
248
|
+
- Facilitating token balance calculations and portfolio valuation.
|
|
249
|
+
- Supporting analysis of token transfer patterns and history.
|
|
250
|
+
|
|
251
|
+
Example:
|
|
252
|
+
>>> ledger = PortfolioTokenTransfersLedger(portfolio=Portfolio(addresses=["0x1234...", "0xABCD..."]))
|
|
253
|
+
>>> df = await ledger.df(start_block=10000000, end_block=12000000)
|
|
254
|
+
>>> print(df)
|
|
255
|
+
"""
|
|
256
|
+
|
|
257
|
+
property_name = "token_transfers"
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
class PortfolioInternalTransfersLedger(
|
|
261
|
+
PortfolioLedgerBase[InternalTransfersList, InternalTransfer]
|
|
262
|
+
):
|
|
263
|
+
"""
|
|
264
|
+
The :class:`~eth_portfolio._ledgers.PortfolioInternalTransfersLedger` class manages internal
|
|
265
|
+
transfer entries across all addresses in a portfolio. It aggregates and processes internal transfers
|
|
266
|
+
for the entire portfolio within specified block ranges.
|
|
267
|
+
|
|
268
|
+
In the eth-portfolio ecosystem, this class plays a vital role in:
|
|
269
|
+
- Tracking internal Ethereum transfers between addresses within the same portfolio.
|
|
270
|
+
- Providing insights into complex transactions that involve multiple internal transfers.
|
|
271
|
+
- Supporting accurate portfolio analysis of internal transfer data from EVM traces.
|
|
272
|
+
|
|
273
|
+
Example:
|
|
274
|
+
>>> ledger = PortfolioInternalTransfersLedger(portfolio=Portfolio(addresses=["0x1234...", "0xABCD..."]))
|
|
275
|
+
>>> df = await ledger.df(start_block=10000000, end_block=12000000)
|
|
276
|
+
>>> print(df)
|
|
277
|
+
"""
|
|
278
|
+
|
|
279
|
+
property_name = "internal_transfers"
|
|
280
|
+
|
|
281
|
+
@set_end_block_if_none
|
|
282
|
+
async def df(self, start_block: Block, end_block: Block) -> DataFrame:
|
|
283
|
+
"""
|
|
284
|
+
Returns a DataFrame containing all internal transfers to or from any of the addresses in the portfolio.
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
start_block: The starting block for the query.
|
|
288
|
+
end_block: The ending block for the query.
|
|
289
|
+
|
|
290
|
+
Returns:
|
|
291
|
+
A DataFrame containing processed internal transfer entries.
|
|
292
|
+
|
|
293
|
+
Example:
|
|
294
|
+
>>> df = await ledger.df(start_block=10000000, end_block=12000000)
|
|
295
|
+
>>> print(df)
|
|
296
|
+
"""
|
|
297
|
+
df = await self._df_base(start_block, end_block)
|
|
298
|
+
if len(df) > 0:
|
|
299
|
+
df.rename(
|
|
300
|
+
columns={"transactionHash": "hash", "transactionPosition": "transactionIndex"},
|
|
301
|
+
inplace=True,
|
|
302
|
+
)
|
|
303
|
+
df = self._cleanup_df(df)
|
|
304
|
+
return df
|
|
305
|
+
|
|
306
|
+
@classmethod
|
|
307
|
+
def _deduplicate_df(cls, df: DataFrame) -> DataFrame:
|
|
308
|
+
"""
|
|
309
|
+
Deduplicates the DataFrame.
|
|
310
|
+
|
|
311
|
+
This deduplication is essential for ensuring accurate portfolio analysis by removing
|
|
312
|
+
duplicate entries due to transfers between addresses within the same portfolio.
|
|
313
|
+
|
|
314
|
+
Args:
|
|
315
|
+
df: The DataFrame to deduplicate.
|
|
316
|
+
|
|
317
|
+
Returns:
|
|
318
|
+
A deduplicated DataFrame.
|
|
319
|
+
|
|
320
|
+
Example:
|
|
321
|
+
>>> deduped_df = PortfolioInternalTransfersLedger._deduplicate_df(df)
|
|
322
|
+
>>> print(deduped_df)
|
|
323
|
+
"""
|
|
324
|
+
df = df.reset_index(drop=True)
|
|
325
|
+
# We cant use drop_duplicates when one of the columns, `traceAddress`, contains lists.
|
|
326
|
+
# We must first convert the lists to strings
|
|
327
|
+
return df.loc[df.astype(str).drop_duplicates().index]
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module initializes the `_loaders` package within the `eth_portfolio` library.
|
|
3
|
+
It imports key functions responsible for loading blockchain data related to transactions
|
|
4
|
+
and token transfers for use within the package.
|
|
5
|
+
|
|
6
|
+
The functions imported here are designed to facilitate the retrieval and processing of
|
|
7
|
+
Ethereum blockchain data, enabling efficient data handling and storage for portfolio analysis.
|
|
8
|
+
|
|
9
|
+
Imported Functions:
|
|
10
|
+
- :func:`~eth_portfolio._loaders.transaction.load_transaction`:
|
|
11
|
+
Loads transaction data by address and nonce, with optional price data retrieval.
|
|
12
|
+
- :func:`~eth_portfolio._loaders.token_transfer.load_token_transfer`:
|
|
13
|
+
Processes and loads token transfer data from log entries, with optional price fetching.
|
|
14
|
+
|
|
15
|
+
Examples:
|
|
16
|
+
These functions can be used to load and process blockchain data for portfolio analysis.
|
|
17
|
+
For example, you might use them as follows:
|
|
18
|
+
|
|
19
|
+
>>> from eth_portfolio._loaders import load_transaction, load_token_transfer
|
|
20
|
+
>>> nonce, transaction = await load_transaction(address="0x1234567890abcdef1234567890abcdef12345678", nonce=5, load_prices=True)
|
|
21
|
+
>>> print(transaction)
|
|
22
|
+
|
|
23
|
+
>>> transfer_log = {"address": "0xTokenAddress", "data": "0xData", "removed": False}
|
|
24
|
+
>>> token_transfer = await load_token_transfer(transfer_log, load_prices=True)
|
|
25
|
+
>>> print(token_transfer)
|
|
26
|
+
|
|
27
|
+
See Also:
|
|
28
|
+
- :mod:`eth_portfolio._loaders.transaction`: Contains functions for loading transaction data.
|
|
29
|
+
- :mod:`eth_portfolio._loaders.token_transfer`: Contains functions for processing token transfer logs.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
from eth_portfolio._loaders.transaction import load_transaction
|
|
33
|
+
from eth_portfolio._loaders.token_transfer import load_token_transfer
|
|
Binary file
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from time import time
|
|
5
|
+
from typing import ClassVar, DefaultDict, Dict, Final, Optional, Tuple, final
|
|
6
|
+
|
|
7
|
+
import a_sync
|
|
8
|
+
import dank_mids
|
|
9
|
+
from eth_typing import BlockNumber, ChecksumAddress
|
|
10
|
+
|
|
11
|
+
from eth_portfolio._loaders import utils
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
logger: Final = logging.getLogger("eth_portfolio.nonces")
|
|
15
|
+
logger_is_enabled: Final = logger.isEnabledFor
|
|
16
|
+
__log: Final = logger._log
|
|
17
|
+
|
|
18
|
+
DEBUG: Final = logging.DEBUG
|
|
19
|
+
|
|
20
|
+
Nonce = int
|
|
21
|
+
AccountNonces = DefaultDict[Nonce, BlockNumber]
|
|
22
|
+
GlobalNonces = DefaultDict[ChecksumAddress, AccountNonces]
|
|
23
|
+
|
|
24
|
+
nonces: Final[GlobalNonces] = defaultdict(lambda: defaultdict(int)) # type: ignore [arg-type]
|
|
25
|
+
locks: Final[DefaultDict[ChecksumAddress, asyncio.Lock]] = defaultdict(asyncio.Lock)
|
|
26
|
+
|
|
27
|
+
get_transaction_count: Final = dank_mids.eth.get_transaction_count
|
|
28
|
+
|
|
29
|
+
igather: Final = a_sync.igather
|
|
30
|
+
|
|
31
|
+
now: Final = time
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
async def get_nonce_at_block(address: ChecksumAddress, block: BlockNumber) -> int:
|
|
35
|
+
"""
|
|
36
|
+
Retrieves the nonce of an address at a specific block.
|
|
37
|
+
|
|
38
|
+
This function gets the transaction count (nonce) for the given address at the specified block. It also includes a special case to handle known issues on certain networks like Arbitrum.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
address: The address of the account.
|
|
42
|
+
block: The block number at which to retrieve the nonce.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
The nonce of the address at the given block.
|
|
46
|
+
|
|
47
|
+
Example:
|
|
48
|
+
>>> block = 12345678
|
|
49
|
+
>>> nonce = await get_nonce_at_block("0x1234567890abcdef1234567890abcdef12345678", block)
|
|
50
|
+
>>> print(f"The nonce at block {block} is {nonce}.")
|
|
51
|
+
|
|
52
|
+
"""
|
|
53
|
+
try:
|
|
54
|
+
nonce = await get_transaction_count(address, block_identifier=block) - 1
|
|
55
|
+
_update_nonces(address, nonce, block)
|
|
56
|
+
return nonce
|
|
57
|
+
except ValueError as e:
|
|
58
|
+
# NOTE this is known to occur on Arbitrum
|
|
59
|
+
if "error creating execution cursor" in str(e) and block == 0:
|
|
60
|
+
return -1
|
|
61
|
+
raise ValueError(f"For {address} at {block}: {e}") from e
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async def get_block_for_nonce(address: ChecksumAddress, nonce: Nonce) -> int:
|
|
65
|
+
highest_known_nonce_lt_query: Optional[int]
|
|
66
|
+
lowest_known_nonce_gt_query: Optional[int]
|
|
67
|
+
|
|
68
|
+
async with locks[address]:
|
|
69
|
+
highest_known_nonce_lt_query = None
|
|
70
|
+
lowest_known_nonce_gt_query = None
|
|
71
|
+
|
|
72
|
+
# it is impossible for n to == nonce
|
|
73
|
+
for n in nonces[address]:
|
|
74
|
+
if n < nonce:
|
|
75
|
+
if highest_known_nonce_lt_query is None or n > highest_known_nonce_lt_query:
|
|
76
|
+
highest_known_nonce_lt_query = n
|
|
77
|
+
elif n == nonce:
|
|
78
|
+
continue
|
|
79
|
+
elif lowest_known_nonce_gt_query is None or n < lowest_known_nonce_gt_query:
|
|
80
|
+
lowest_known_nonce_gt_query = n
|
|
81
|
+
|
|
82
|
+
if highest_known_nonce_lt_query is not None:
|
|
83
|
+
lo = nonces[address][highest_known_nonce_lt_query]
|
|
84
|
+
else:
|
|
85
|
+
lo = BlockNumber(0)
|
|
86
|
+
|
|
87
|
+
if lowest_known_nonce_gt_query is not None:
|
|
88
|
+
hi = nonces[address][lowest_known_nonce_gt_query]
|
|
89
|
+
else:
|
|
90
|
+
hi = await get_block_number()
|
|
91
|
+
|
|
92
|
+
# lets find the general area first before we proceed with our binary search
|
|
93
|
+
range_size = hi - lo + 1
|
|
94
|
+
if range_size > 4:
|
|
95
|
+
lo, hi = await _get_area(address, nonce, lo, hi, range_size)
|
|
96
|
+
|
|
97
|
+
debug_logs_enabled = logger_is_enabled(DEBUG)
|
|
98
|
+
while True:
|
|
99
|
+
_nonce = await get_nonce_at_block(address, lo)
|
|
100
|
+
|
|
101
|
+
if _nonce < nonce:
|
|
102
|
+
old_lo = lo
|
|
103
|
+
lo += int((hi - lo) / 2) or 1 # type: ignore [assignment]
|
|
104
|
+
if debug_logs_enabled:
|
|
105
|
+
__log(
|
|
106
|
+
DEBUG,
|
|
107
|
+
"Nonce for %s at %s is %s, checking higher block %s",
|
|
108
|
+
(address, old_lo, _nonce, lo),
|
|
109
|
+
)
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
prev_block_nonce: int = await get_nonce_at_block(address, lo - 1) # type: ignore [arg-type]
|
|
113
|
+
if prev_block_nonce >= nonce:
|
|
114
|
+
hi = lo
|
|
115
|
+
lo = int(lo / 2) # type: ignore [assignment]
|
|
116
|
+
if debug_logs_enabled:
|
|
117
|
+
__log(
|
|
118
|
+
DEBUG,
|
|
119
|
+
"Nonce for %s at %s is %s, checking lower block %s",
|
|
120
|
+
(address, hi, _nonce, lo),
|
|
121
|
+
)
|
|
122
|
+
continue
|
|
123
|
+
|
|
124
|
+
if debug_logs_enabled:
|
|
125
|
+
__log(DEBUG, "Found nonce %s for %s at block %s", (nonce, address, lo))
|
|
126
|
+
|
|
127
|
+
return lo
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
async def _get_area(
|
|
131
|
+
address: ChecksumAddress,
|
|
132
|
+
nonce: Nonce,
|
|
133
|
+
lo: BlockNumber,
|
|
134
|
+
hi: BlockNumber,
|
|
135
|
+
range_size: int,
|
|
136
|
+
) -> Tuple[BlockNumber, BlockNumber]:
|
|
137
|
+
num_chunks = _get_num_chunks(range_size)
|
|
138
|
+
chunk_size = range_size // num_chunks
|
|
139
|
+
points = [BlockNumber(lo + i * chunk_size) for i in range(num_chunks)]
|
|
140
|
+
nonces = await igather(get_nonce_at_block(address, point) for point in points)
|
|
141
|
+
for block, n in zip(points, nonces):
|
|
142
|
+
if n >= nonce:
|
|
143
|
+
return lo, block
|
|
144
|
+
lo = block
|
|
145
|
+
return lo, hi
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def _update_nonces(address: ChecksumAddress, nonce: Nonce, block: BlockNumber) -> None:
|
|
149
|
+
# if you are searching for `nonce` and you verified it occurs AT or ABOVE `block` call this fn.
|
|
150
|
+
if block > nonces[address][nonce]:
|
|
151
|
+
nonces[address][nonce] = block
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def _get_num_chunks(range_size: int) -> int:
|
|
155
|
+
if range_size >= 4096:
|
|
156
|
+
return 100
|
|
157
|
+
elif range_size >= 2048:
|
|
158
|
+
return 80
|
|
159
|
+
elif range_size >= 1024:
|
|
160
|
+
return 40
|
|
161
|
+
elif range_size >= 512:
|
|
162
|
+
return 20
|
|
163
|
+
elif range_size >= 256:
|
|
164
|
+
return 10
|
|
165
|
+
elif range_size >= 128:
|
|
166
|
+
return 8
|
|
167
|
+
elif range_size >= 64:
|
|
168
|
+
return 6
|
|
169
|
+
elif range_size >= 32:
|
|
170
|
+
return 5
|
|
171
|
+
elif range_size >= 16:
|
|
172
|
+
return 4
|
|
173
|
+
elif range_size >= 8:
|
|
174
|
+
return 3
|
|
175
|
+
else:
|
|
176
|
+
return 2
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@final
|
|
180
|
+
class BlockCache:
|
|
181
|
+
block: ClassVar[BlockNumber] = 0
|
|
182
|
+
updated_at: ClassVar = 0.0
|
|
183
|
+
lock: Final = asyncio.Lock()
|
|
184
|
+
ttl: Final = 5.0
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
async def get_block_number() -> BlockNumber:
|
|
188
|
+
if now() - BlockCache.updated_at < BlockCache.ttl:
|
|
189
|
+
return BlockCache.block
|
|
190
|
+
async with BlockCache.lock:
|
|
191
|
+
if now() - BlockCache.updated_at < BlockCache.ttl:
|
|
192
|
+
return BlockCache.block
|
|
193
|
+
ts = now()
|
|
194
|
+
block = BlockCache.block = await dank_mids.eth.block_number
|
|
195
|
+
BlockCache.updated_at = ts
|
|
196
|
+
return block
|
|
Binary file
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from decimal import InvalidOperation
|
|
3
|
+
from typing import Final
|
|
4
|
+
|
|
5
|
+
import y
|
|
6
|
+
from y._decorators import stuck_coro_debugger
|
|
7
|
+
from y.datatypes import Address, Block
|
|
8
|
+
|
|
9
|
+
from eth_portfolio._decimal import Decimal
|
|
10
|
+
from eth_portfolio._utils import _get_price
|
|
11
|
+
from eth_portfolio.typing import Balance
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
_ZERO: Final = Decimal(0)
|
|
15
|
+
|
|
16
|
+
logger: Final = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@stuck_coro_debugger
|
|
20
|
+
async def load_token_balance(token: y.ERC20, address: Address, block: Block) -> Balance:
|
|
21
|
+
"""
|
|
22
|
+
Asynchronously fetch the ERC20 token balance and its USD value for a given address at a specific block.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
token: The ERC20 token contract to query.
|
|
26
|
+
address: The address holding the ERC20 tokens.
|
|
27
|
+
block: The block number for the balance query.
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
:class:`~eth_portfolio.typing.Balance`: A custom object containing:
|
|
31
|
+
- balance: The token balance (in token's smallest unit).
|
|
32
|
+
- value: The USD value of the balance (18 decimal places).
|
|
33
|
+
- token: The ERC20 token which was checked.
|
|
34
|
+
- block: The block number where the balance was taken.
|
|
35
|
+
|
|
36
|
+
Note:
|
|
37
|
+
Non-standard ERC20 tokens are handled gracefully, returning a zero balance.
|
|
38
|
+
|
|
39
|
+
Example:
|
|
40
|
+
>>> balance = await load_token_balance(token=dai_contract, address='0x1234...', block=12345678)
|
|
41
|
+
>>> print(f"Token: {balance.token}, Value: {balance.balance}, USD: {balance.usd_value}")
|
|
42
|
+
"""
|
|
43
|
+
try:
|
|
44
|
+
balance = await token.balance_of_readable(address, block, sync=False)
|
|
45
|
+
except y.NonStandardERC20:
|
|
46
|
+
logger.warning("NonStandardERC20 exc for %s", token)
|
|
47
|
+
balance = _ZERO
|
|
48
|
+
token_address = token.address
|
|
49
|
+
if not balance:
|
|
50
|
+
return Balance(token=token_address, block=block)
|
|
51
|
+
price = await _get_price(token_address, block)
|
|
52
|
+
return Balance(
|
|
53
|
+
balance=round(balance, 18),
|
|
54
|
+
usd_value=_calc_value(balance, price),
|
|
55
|
+
token=token_address,
|
|
56
|
+
block=block,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _calc_value(balance, price) -> Decimal:
|
|
61
|
+
"""
|
|
62
|
+
Calculate the USD value of a token balance based on its price.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
balance: The token balance.
|
|
66
|
+
price: The token price in USD.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
The total USD value, rounded to 18 decimal places if possible.
|
|
70
|
+
If rounding is not possible due to high precision, returns the unrounded value.
|
|
71
|
+
|
|
72
|
+
Note:
|
|
73
|
+
Returns :class:`~decimal.Decimal(0)` if the price is None, handling cases where price data is unavailable.
|
|
74
|
+
|
|
75
|
+
Example:
|
|
76
|
+
>>> value = _calc_value(balance=1000, price=0.50)
|
|
77
|
+
>>> print(f"USD Value: {value}")
|
|
78
|
+
"""
|
|
79
|
+
if price is None:
|
|
80
|
+
return _ZERO
|
|
81
|
+
# NOTE If balance * price returns a Decimal with precision < 18, rounding is both impossible and unnecessary.
|
|
82
|
+
value = Decimal(balance) * Decimal(price)
|
|
83
|
+
return round(value, 18)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
_builtin_round: Final = round
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def round(value: Decimal, digits: int) -> Decimal:
|
|
90
|
+
# For a Decimal with precision < 18, rounding is both impossible and unnecessary.
|
|
91
|
+
try:
|
|
92
|
+
return _builtin_round(value, digits) # type: ignore [return-value]
|
|
93
|
+
except InvalidOperation:
|
|
94
|
+
return value
|