eth-portfolio 0.5.7__cp312-cp312-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eth-portfolio might be problematic. Click here for more details.

Files changed (83) hide show
  1. eth_portfolio/__init__.py +24 -0
  2. eth_portfolio/_argspec.cp312-win32.pyd +0 -0
  3. eth_portfolio/_argspec.py +43 -0
  4. eth_portfolio/_cache.py +119 -0
  5. eth_portfolio/_config.cp312-win32.pyd +0 -0
  6. eth_portfolio/_config.py +4 -0
  7. eth_portfolio/_db/__init__.py +0 -0
  8. eth_portfolio/_db/decorators.py +147 -0
  9. eth_portfolio/_db/entities.py +311 -0
  10. eth_portfolio/_db/utils.py +616 -0
  11. eth_portfolio/_decimal.py +154 -0
  12. eth_portfolio/_decorators.py +84 -0
  13. eth_portfolio/_exceptions.py +65 -0
  14. eth_portfolio/_ledgers/__init__.py +0 -0
  15. eth_portfolio/_ledgers/address.py +924 -0
  16. eth_portfolio/_ledgers/portfolio.py +328 -0
  17. eth_portfolio/_loaders/__init__.py +33 -0
  18. eth_portfolio/_loaders/_nonce.cp312-win32.pyd +0 -0
  19. eth_portfolio/_loaders/_nonce.py +193 -0
  20. eth_portfolio/_loaders/balances.cp312-win32.pyd +0 -0
  21. eth_portfolio/_loaders/balances.py +95 -0
  22. eth_portfolio/_loaders/token_transfer.py +215 -0
  23. eth_portfolio/_loaders/transaction.py +240 -0
  24. eth_portfolio/_loaders/utils.cp312-win32.pyd +0 -0
  25. eth_portfolio/_loaders/utils.py +67 -0
  26. eth_portfolio/_shitcoins.cp312-win32.pyd +0 -0
  27. eth_portfolio/_shitcoins.py +342 -0
  28. eth_portfolio/_stableish.cp312-win32.pyd +0 -0
  29. eth_portfolio/_stableish.py +42 -0
  30. eth_portfolio/_submodules.py +72 -0
  31. eth_portfolio/_utils.py +215 -0
  32. eth_portfolio/_ydb/__init__.py +0 -0
  33. eth_portfolio/_ydb/token_transfers.py +145 -0
  34. eth_portfolio/address.py +396 -0
  35. eth_portfolio/buckets.py +212 -0
  36. eth_portfolio/constants.cp312-win32.pyd +0 -0
  37. eth_portfolio/constants.py +87 -0
  38. eth_portfolio/portfolio.py +662 -0
  39. eth_portfolio/protocols/__init__.py +64 -0
  40. eth_portfolio/protocols/_base.py +107 -0
  41. eth_portfolio/protocols/convex.py +17 -0
  42. eth_portfolio/protocols/dsr.py +50 -0
  43. eth_portfolio/protocols/lending/README.md +6 -0
  44. eth_portfolio/protocols/lending/__init__.py +50 -0
  45. eth_portfolio/protocols/lending/_base.py +56 -0
  46. eth_portfolio/protocols/lending/compound.py +186 -0
  47. eth_portfolio/protocols/lending/liquity.py +108 -0
  48. eth_portfolio/protocols/lending/maker.py +110 -0
  49. eth_portfolio/protocols/lending/unit.py +44 -0
  50. eth_portfolio/protocols/liquity.py +17 -0
  51. eth_portfolio/py.typed +0 -0
  52. eth_portfolio/structs/__init__.py +43 -0
  53. eth_portfolio/structs/modified.py +69 -0
  54. eth_portfolio/structs/structs.py +626 -0
  55. eth_portfolio/typing/__init__.py +1418 -0
  56. eth_portfolio/typing/balance/single.py +176 -0
  57. eth_portfolio-0.5.7.dist-info/METADATA +26 -0
  58. eth_portfolio-0.5.7.dist-info/RECORD +83 -0
  59. eth_portfolio-0.5.7.dist-info/WHEEL +5 -0
  60. eth_portfolio-0.5.7.dist-info/entry_points.txt +2 -0
  61. eth_portfolio-0.5.7.dist-info/top_level.txt +3 -0
  62. eth_portfolio__mypyc.cp312-win32.pyd +0 -0
  63. eth_portfolio_scripts/__init__.py +17 -0
  64. eth_portfolio_scripts/_args.py +26 -0
  65. eth_portfolio_scripts/_logging.py +14 -0
  66. eth_portfolio_scripts/_portfolio.py +209 -0
  67. eth_portfolio_scripts/_utils.py +106 -0
  68. eth_portfolio_scripts/balances.cp312-win32.pyd +0 -0
  69. eth_portfolio_scripts/balances.py +56 -0
  70. eth_portfolio_scripts/docker/.grafana/dashboards/Portfolio/Balances.json +1962 -0
  71. eth_portfolio_scripts/docker/.grafana/dashboards/dashboards.yaml +10 -0
  72. eth_portfolio_scripts/docker/.grafana/datasources/datasources.yml +11 -0
  73. eth_portfolio_scripts/docker/__init__.cp312-win32.pyd +0 -0
  74. eth_portfolio_scripts/docker/__init__.py +16 -0
  75. eth_portfolio_scripts/docker/check.cp312-win32.pyd +0 -0
  76. eth_portfolio_scripts/docker/check.py +66 -0
  77. eth_portfolio_scripts/docker/docker-compose.yaml +61 -0
  78. eth_portfolio_scripts/docker/docker_compose.cp312-win32.pyd +0 -0
  79. eth_portfolio_scripts/docker/docker_compose.py +97 -0
  80. eth_portfolio_scripts/main.py +118 -0
  81. eth_portfolio_scripts/py.typed +1 -0
  82. eth_portfolio_scripts/victoria/__init__.py +72 -0
  83. eth_portfolio_scripts/victoria/types.py +38 -0
@@ -0,0 +1,924 @@
1
+ """
2
+ This module defines the :class:`~eth_portfolio.AddressLedgerBase`, :class:`~eth_portfolio.TransactionsList`,
3
+ :class:`~eth_portfolio.AddressTransactionsLedger`, :class:`~eth_portfolio.InternalTransfersList`,
4
+ :class:`~eth_portfolio.AddressInternalTransfersLedger`, :class:`~eth_portfolio.TokenTransfersList`,
5
+ and :class:`~eth_portfolio.AddressTokenTransfersLedger` classes. These classes manage and interact with ledger entries
6
+ such as transactions, internal transfers, and token transfers associated with Ethereum addresses within the `eth-portfolio` system.
7
+
8
+ These classes leverage the `a_sync` library to support both synchronous and asynchronous operations, allowing efficient data gathering
9
+ and processing without blocking, thus improving the overall responsiveness and performance of portfolio operations.
10
+ """
11
+
12
+ from abc import ABCMeta, abstractmethod
13
+ from asyncio import Lock, Queue, create_task, gather, sleep
14
+ from collections import defaultdict
15
+ from collections.abc import AsyncGenerator, AsyncIterator, Callable
16
+ from functools import partial
17
+ from http import HTTPStatus
18
+ from itertools import product
19
+ from logging import getLogger
20
+ from typing import TYPE_CHECKING, Final, Generic, NoReturn, TypeVar, Union
21
+
22
+ import a_sync
23
+ import dank_mids
24
+ import eth_retry
25
+ from a_sync.asyncio import sleep0 as yield_to_loop
26
+ from aiohttp import ClientResponseError
27
+ from brownie import chain
28
+ from dank_mids.eth import TraceFilterParams
29
+ from eth_typing import BlockNumber, ChecksumAddress
30
+ from evmspec import FilterTrace
31
+ from evmspec.structs.receipt import Status
32
+ from evmspec.structs.trace import call, reward
33
+ from faster_async_lru import alru_cache
34
+ from pandas import DataFrame # type: ignore
35
+ from tqdm import tqdm
36
+ from y import ERC20, Network
37
+ from y._decorators import stuck_coro_debugger
38
+ from y.datatypes import Block
39
+ from y.utils.events import BATCH_SIZE
40
+
41
+ from eth_portfolio import _exceptions
42
+ from eth_portfolio._cache import cache_to_disk
43
+ from eth_portfolio._decorators import set_end_block_if_none
44
+ from eth_portfolio._loaders.transaction import get_nonce_at_block, load_transaction
45
+ from eth_portfolio._utils import PandableList, _AiterMixin, get_buffered_chain_height
46
+ from eth_portfolio._ydb.token_transfers import TokenTransfers
47
+ from eth_portfolio.structs import InternalTransfer, TokenTransfer, Transaction
48
+
49
+ if TYPE_CHECKING:
50
+ from eth_portfolio.address import PortfolioAddress
51
+
52
+ logger = getLogger(__name__)
53
+
54
+
55
+ T = TypeVar("T")
56
+
57
+ _LedgerEntryList = TypeVar(
58
+ "_LedgerEntryList", "TransactionsList", "InternalTransfersList", "TokenTransfersList"
59
+ )
60
+ PandableLedgerEntryList = Union["TransactionsList", "InternalTransfersList", "TokenTransfersList"]
61
+
62
+
63
+ class AddressLedgerBase(
64
+ a_sync.ASyncGenericBase, _AiterMixin[T], Generic[_LedgerEntryList, T], metaclass=ABCMeta
65
+ ):
66
+ """
67
+ Abstract base class for address ledgers in the eth-portfolio system.
68
+ """
69
+
70
+ __slots__ = (
71
+ "address",
72
+ "asynchronous",
73
+ "cached_from",
74
+ "cached_thru",
75
+ "load_prices",
76
+ "objects",
77
+ "portfolio_address",
78
+ "_lock",
79
+ )
80
+
81
+ def __init__(self, portfolio_address: "PortfolioAddress") -> None:
82
+ """
83
+ Initializes the AddressLedgerBase instance.
84
+
85
+ Args:
86
+ portfolio_address: The :class:`~eth_portfolio.address.PortfolioAddress` this ledger belongs to.
87
+ """
88
+
89
+ # TODO replace the following line with an abc implementation.
90
+ # assert isinstance(portfolio_address, PortfolioAddress), f"address must be a PortfolioAddress. try passing in PortfolioAddress({portfolio_address}) instead."
91
+
92
+ super().__init__()
93
+
94
+ self.portfolio_address = portfolio_address
95
+ """
96
+ The portfolio address this ledger belongs to.
97
+ """
98
+
99
+ self.address: Final = self.portfolio_address.address
100
+ """
101
+ The Ethereum address being managed.
102
+ """
103
+
104
+ self.asynchronous: Final = self.portfolio_address.asynchronous
105
+ """
106
+ Flag indicating if the operations are asynchronous.
107
+ """
108
+
109
+ self.load_prices: Final = self.portfolio_address.load_prices
110
+ """
111
+ Indicates if price loading is enabled.
112
+ """
113
+
114
+ self.objects: Final[_LedgerEntryList] = self._list_type()
115
+ """
116
+ _LedgerEntryList: List of ledger entries.
117
+ """
118
+
119
+ # NOTE: The following two properties will both be ints once the cache has contents
120
+ self.cached_from: int = None # type: ignore
121
+ """
122
+ The block from which all entries for this ledger have been loaded into memory.
123
+ """
124
+
125
+ self.cached_thru: int = None # type: ignore
126
+ """
127
+ The block through which all entries for this ledger have been loaded into memory.
128
+ """
129
+
130
+ self._lock: Final = Lock()
131
+ """
132
+ Lock: Lock for synchronizing access to ledger entries.
133
+ """
134
+
135
+ def __hash__(self) -> int:
136
+ """
137
+ Returns the hash of the address.
138
+
139
+ Returns:
140
+ The hash value.
141
+ """
142
+ return hash(self.address)
143
+
144
+ def __repr__(self) -> str:
145
+ return f"<{type(self).__name__} for {self.address} at {hex(id(self))}>"
146
+
147
+ @property
148
+ @abstractmethod
149
+ def _list_type(self) -> type[_LedgerEntryList]:
150
+ """
151
+ Type of list used to store ledger entries.
152
+ """
153
+ ...
154
+
155
+ @property
156
+ def _start_block(self) -> int:
157
+ """
158
+ Returns the starting block for the portfolio address.
159
+
160
+ Returns:
161
+ The starting block number.
162
+ """
163
+ return self.portfolio_address._start_block
164
+
165
+ async def _get_and_yield(
166
+ self, start_block: Block, end_block: Block, mem_cache: bool
167
+ ) -> AsyncGenerator[T, None]:
168
+ """
169
+ Yields ledger entries between the specified blocks.
170
+
171
+ Args:
172
+ start_block: The starting block number.
173
+ end_block: The ending block number.
174
+
175
+ Yields:
176
+ AsyncGenerator[T, None]: An async generator of ledger entries.
177
+ """
178
+ num_yielded = 0
179
+
180
+ async def unblock_loop() -> None:
181
+ """
182
+ Let the event loop run at least once for every 100
183
+ objects yielded so it doesn't get too congested.
184
+ """
185
+ nonlocal num_yielded
186
+ num_yielded += 1
187
+ if num_yielded % 500 == 0:
188
+ await yield_to_loop()
189
+
190
+ if not mem_cache:
191
+ async for ledger_entry in self._get_new_objects(start_block, end_block, False):
192
+ yield ledger_entry
193
+ await unblock_loop()
194
+ return
195
+
196
+ if self.objects and end_block and self.objects[-1].block_number > end_block:
197
+ for ledger_entry in self.objects:
198
+ block = ledger_entry.block_number
199
+ if block < start_block:
200
+ continue
201
+ elif block > end_block:
202
+ return
203
+ yield ledger_entry
204
+ await unblock_loop()
205
+
206
+ yielded = set()
207
+ for ledger_entry in self.objects:
208
+ block = ledger_entry.block_number
209
+ if block < start_block:
210
+ continue
211
+ elif end_block and block > end_block:
212
+ break
213
+ yield ledger_entry
214
+ yielded.add(ledger_entry)
215
+ await unblock_loop()
216
+ async for ledger_entry in self._get_new_objects(start_block, end_block, True): # type: ignore [assignment, misc]
217
+ if ledger_entry not in yielded:
218
+ yield ledger_entry
219
+ yielded.add(ledger_entry)
220
+ await unblock_loop()
221
+ for ledger_entry in self.objects:
222
+ block = ledger_entry.block_number
223
+ if block < start_block:
224
+ continue
225
+ elif end_block and block > end_block:
226
+ break
227
+ if ledger_entry not in yielded:
228
+ yield ledger_entry
229
+ yielded.add(ledger_entry)
230
+ await unblock_loop()
231
+
232
+ @set_end_block_if_none
233
+ @stuck_coro_debugger
234
+ async def get(self, start_block: Block, end_block: Block) -> _LedgerEntryList:
235
+ """
236
+ Retrieves ledger entries between the specified blocks.
237
+
238
+ Args:
239
+ start_block: The starting block number.
240
+ end_block: The ending block number.
241
+
242
+ Returns:
243
+ _LedgerEntryList: The list of ledger entries.
244
+
245
+ Examples:
246
+ >>> entries = await ledger.get(12000000, 12345678)
247
+ """
248
+ return self._list_type([ledger_entry async for ledger_entry in self[start_block:end_block]])
249
+
250
+ @stuck_coro_debugger
251
+ async def new(self) -> _LedgerEntryList:
252
+ """
253
+ Retrieves new ledger entries since the last cached block.
254
+
255
+ Returns:
256
+ _LedgerEntryList: The list of new ledger entries.
257
+
258
+ Examples:
259
+ >>> new_entries = await ledger.new()
260
+ """
261
+ start_block = 0 if self.cached_thru is None else self.cached_thru + 1
262
+ end_block = await get_buffered_chain_height()
263
+ return self[start_block, end_block] # type: ignore [index, return-value]
264
+
265
+ async def sent(
266
+ self, start_block: Block | None = None, end_block: Block | None = None
267
+ ) -> AsyncIterator[T]:
268
+ address = self.portfolio_address.address
269
+ async for obj in self[start_block:end_block]:
270
+ if obj.from_address == address:
271
+ yield obj
272
+
273
+ async def received(
274
+ self, start_block: Block | None = None, end_block: Block | None = None
275
+ ) -> AsyncIterator[T]:
276
+ address = self.portfolio_address.address
277
+ async for obj in self[start_block:end_block]:
278
+ if obj.from_address != address:
279
+ yield obj
280
+
281
+ @stuck_coro_debugger
282
+ @set_end_block_if_none
283
+ async def _get_new_objects(
284
+ self, start_block: Block, end_block: Block, mem_cache: bool
285
+ ) -> AsyncIterator[T]:
286
+ """
287
+ Retrieves new ledger entries between the specified blocks.
288
+
289
+ Args:
290
+ start_block: The starting block number.
291
+ end_block: The ending block number.
292
+
293
+ Yields:
294
+ AsyncIterator[T]: An async iterator of new ledger entries.
295
+ """
296
+ async with self._lock:
297
+ async for ledger_entry in self._load_new_objects(start_block, end_block, mem_cache):
298
+ yield ledger_entry
299
+
300
+ @abstractmethod
301
+ async def _load_new_objects(
302
+ self, start_block: Block, end_block: Block, mem_cache: bool
303
+ ) -> AsyncIterator[T]:
304
+ """
305
+ Abstract method to load new ledger entries between the specified blocks.
306
+
307
+ Args:
308
+ start_block: The starting block number.
309
+ end_block: The ending block number.
310
+
311
+ Yields:
312
+ AsyncIterator[T]: An async iterator of new ledger entries.
313
+ """
314
+ yield # type: ignore [misc]
315
+
316
+ def _check_blocks_against_cache(
317
+ self, start_block: Block, end_block: Block
318
+ ) -> tuple[Block, Block]:
319
+ """
320
+ Checks the specified block range against the cached block range.
321
+
322
+ Args:
323
+ start_block: The starting block number.
324
+ end_block: The ending block number.
325
+
326
+ Returns:
327
+ The adjusted block range.
328
+
329
+ Raises:
330
+ ValueError: If the start block is after the end block.
331
+ _exceptions.BlockRangeIsCached: If the block range is already cached.
332
+ _exceptions.BlockRangeOutOfBounds: If the block range is out of bounds.
333
+ """
334
+ if start_block > end_block:
335
+ raise ValueError(f"Start block {start_block} is after end block {end_block}")
336
+
337
+ # There is no cache
338
+ elif self.cached_from is None or self.cached_thru is None:
339
+ return start_block, end_block
340
+
341
+ # Range is cached
342
+ elif start_block >= self.cached_from and end_block <= self.cached_thru:
343
+ raise _exceptions.BlockRangeIsCached()
344
+
345
+ # Beginning of range is cached
346
+ elif (
347
+ start_block >= self.cached_from
348
+ and start_block < self.cached_thru
349
+ and end_block > self.cached_thru
350
+ ):
351
+ return self.cached_thru + 1, end_block
352
+
353
+ # End of range is cached
354
+ elif (
355
+ start_block < self.cached_from
356
+ and end_block >= self.cached_from
357
+ and end_block < self.cached_thru
358
+ ):
359
+ return start_block, self.cached_from - 1
360
+
361
+ # Beginning and end both outside bounds of cache to high side
362
+ elif start_block > self.cached_thru:
363
+ return self.cached_thru + 1, end_block
364
+
365
+ # Beginning and end both outside bounds of cache to low side
366
+ elif end_block < self.cached_from:
367
+ return start_block, self.cached_from - 1
368
+
369
+ # Beginning and end both outside bounds of cache, split
370
+ elif start_block < self.cached_from and end_block > self.cached_thru:
371
+ raise _exceptions.BlockRangeOutOfBounds(start_block, end_block, self)
372
+
373
+ raise NotImplementedError(
374
+ f"This is a work in progress and we still need code for this specific case. Feel free to create an issue on our github if you need this.\n\nstart_block: {start_block} end_block: {end_block} cached_from: {self.cached_from} cached_thru: {self.cached_thru}"
375
+ )
376
+
377
+
378
+ class TransactionsList(PandableList[Transaction]):
379
+ """
380
+ A list subclass for transactions that can convert to a :class:`DataFrame`.
381
+ """
382
+
383
+ def _df(self) -> DataFrame:
384
+ """
385
+ Converts the list of transactions to a DataFrame.
386
+
387
+ Returns:
388
+ DataFrame: The transactions as a DataFrame.
389
+ """
390
+ df = DataFrame(self)
391
+ if len(df) > 0:
392
+ df.chainId = df.chainId.apply(int)
393
+ df.blockNumber = df.blockNumber.apply(int)
394
+ df.transactionIndex = df.transactionIndex.apply(int)
395
+ df.nonce = df.nonce.apply(int)
396
+ df.gas = df.gas.apply(int)
397
+ df.gasPrice = df.gasPrice.apply(int)
398
+ return df
399
+
400
+
401
+ Nonce = int
402
+
403
+
404
+ class AddressTransactionsLedger(AddressLedgerBase[TransactionsList, Transaction]):
405
+ """
406
+ A ledger for managing transaction entries.
407
+ """
408
+
409
+ _list_type = TransactionsList
410
+ __slots__ = ("cached_thru_nonce", "_queue", "_ready", "_num_workers", "_workers")
411
+
412
+ def __init__(self, portfolio_address: "PortfolioAddress", num_workers: int = 1000):
413
+ """
414
+ Initializes the AddressTransactionsLedger instance.
415
+
416
+ Args:
417
+ portfolio_address: The :class:`~eth_portfolio.address.PortfolioAddress` this ledger belongs to.
418
+ """
419
+ super().__init__(portfolio_address)
420
+ self.cached_thru_nonce = -1
421
+ """
422
+ The nonce through which all transactions have been loaded into memory.
423
+ """
424
+ self._queue = Queue()
425
+ self._ready = Queue()
426
+ self._num_workers = num_workers
427
+ self._workers = []
428
+
429
+ def __del__(self) -> None:
430
+ self.__stop_workers()
431
+
432
+ @stuck_coro_debugger
433
+ @set_end_block_if_none
434
+ async def _load_new_objects(self, _: Block, end_block: Block, mem_cache: bool) -> AsyncIterator[Transaction]: # type: ignore [override]
435
+ """
436
+ Loads new transaction entries between the specified blocks.
437
+
438
+ Args:
439
+ _: The starting block number (unused).
440
+ end_block: The ending block number.
441
+
442
+ Yields:
443
+ AsyncIterator[Transaction]: An async iterator of transaction entries.
444
+ """
445
+ if self.cached_thru and end_block < self.cached_thru:
446
+ return
447
+ if not mem_cache:
448
+ logger.warning(
449
+ f"{type(self).__name__}._load_new_objects mem_cache arg is not yet implemented"
450
+ )
451
+ address = self.address
452
+ end_block_nonce: int = await get_nonce_at_block(address, end_block)
453
+ if nonces := tuple(range(self.cached_thru_nonce + 1, end_block_nonce + 1)):
454
+ for i, nonce in enumerate(nonces):
455
+ self._queue.put_nowait(nonce)
456
+
457
+ # Keep the event loop relatively unblocked
458
+ # and let the rpc start doing work asap
459
+ if i % 1000:
460
+ await yield_to_loop()
461
+
462
+ len_nonces = len(nonces)
463
+ del nonces
464
+
465
+ self._ensure_workers(min(len_nonces, self._num_workers))
466
+
467
+ transactions = []
468
+ transaction: Transaction | None
469
+ for _ in tqdm(range(len_nonces), desc=f"Transactions {address}"):
470
+ nonce, transaction = await self._ready.get()
471
+ if transaction:
472
+ if isinstance(transaction, Exception):
473
+ raise transaction
474
+ transactions.append(transaction)
475
+ yield transaction
476
+ elif nonce == 0 and self.cached_thru_nonce == -1:
477
+ # Gnosis safes
478
+ self.cached_thru_nonce = 0
479
+ else:
480
+ # NOTE Are we sure this is the correct way to handle this scenario? Are we sure it will ever even occur with the new gnosis handling?
481
+ logger.warning("No transaction with nonce %s for %s", nonce, address)
482
+
483
+ self.__stop_workers()
484
+
485
+ if transactions:
486
+ self.objects.extend(transactions)
487
+ if self.objects:
488
+ self.objects.sort(key=lambda t: t.nonce)
489
+ self.cached_thru_nonce = self.objects[-1].nonce
490
+
491
+ if self.cached_from is None:
492
+ self.cached_from = 0
493
+ if self.cached_thru is None or end_block > self.cached_thru:
494
+ self.cached_thru = end_block
495
+
496
+ def _ensure_workers(self, num_workers: int) -> None:
497
+ len_workers = len(self._workers)
498
+ if len_workers < num_workers:
499
+ worker_fn = self.__worker_fn
500
+ address = self.address
501
+ load_prices = self.load_prices
502
+ queue_get = stuck_coro_debugger(self._queue.get)
503
+ put_ready = self._ready.put_nowait
504
+
505
+ self._workers.extend(
506
+ create_task(
507
+ coro=worker_fn(address, load_prices, queue_get, put_ready),
508
+ name=f"AddressTransactionsLedger worker {i} for {address}",
509
+ )
510
+ for i in range(num_workers - len_workers)
511
+ )
512
+
513
+ async def __worker_fn(
514
+ self,
515
+ address: ChecksumAddress,
516
+ load_prices: bool,
517
+ queue_get: Callable[[], Nonce],
518
+ put_ready: Callable[[Nonce, Transaction | None], None],
519
+ ) -> NoReturn:
520
+ try:
521
+ while True:
522
+ nonce = await queue_get()
523
+ try:
524
+ put_ready(await load_transaction(address, nonce, load_prices))
525
+ except Exception as e:
526
+ put_ready((nonce, e))
527
+ except Exception as e:
528
+ logger.error("%s in %s __worker_coro", type(e), self)
529
+ logger.exception(e)
530
+ raise
531
+
532
+ def __stop_workers(self) -> None:
533
+ logger.debug("stopping workers for %s", self)
534
+ workers = self._workers
535
+ pop_next = workers.pop
536
+ for _ in range(len(workers)):
537
+ pop_next().cancel()
538
+
539
+
540
+ class InternalTransfersList(PandableList[InternalTransfer]):
541
+ """
542
+ A list subclass for internal transfer entries that can convert to a :class:`DataFrame`.
543
+ """
544
+
545
+
546
+ @a_sync.Semaphore(128, __name__ + ".trace_filter")
547
+ @stuck_coro_debugger
548
+ @eth_retry.auto_retry
549
+ async def trace_filter(
550
+ from_block: BlockNumber,
551
+ to_block: BlockNumber,
552
+ params: TraceFilterParams,
553
+ ) -> list[FilterTrace]:
554
+ return await __trace_filter(from_block, to_block, params)
555
+
556
+
557
+ async def __trace_filter(
558
+ from_block: BlockNumber,
559
+ to_block: BlockNumber,
560
+ params: TraceFilterParams,
561
+ ) -> list[FilterTrace]:
562
+ try:
563
+ return await dank_mids.eth.trace_filter(
564
+ {"fromBlock": from_block, "toBlock": to_block, **params}
565
+ )
566
+ except ClientResponseError as e:
567
+ if e.status != HTTPStatus.SERVICE_UNAVAILABLE or to_block == from_block:
568
+ raise
569
+ except TypeError as e:
570
+ # This is some intermittent error I need to debug in dank_mids, I think it occurs when we get rate limited
571
+ if str(e) != "a bytes-like object is required, not 'NoneType'":
572
+ raise
573
+ await sleep(0.5)
574
+ # remove this logger when I know there are no looping issues
575
+ logger.info("call failed, trying again")
576
+
577
+ range_size = to_block - from_block + 1
578
+ chunk_size = range_size // 2
579
+ halfway = from_block + chunk_size
580
+
581
+ results = await gather(
582
+ __trace_filter(from_block, BlockNumber(halfway), params),
583
+ __trace_filter(BlockNumber(halfway + 1), to_block, params),
584
+ )
585
+ return results[0] + results[1]
586
+
587
+
588
+ @alru_cache(maxsize=None)
589
+ @eth_retry.auto_retry(min_sleep_time=1, max_sleep_time=3, max_retries=20, suppress_logs=1)
590
+ async def get_transaction_status(txhash: str) -> Status:
591
+ """
592
+ Retrieves the status for a transaction.
593
+
594
+ This function is cached to disk to reduce resource usage.
595
+
596
+ Args:
597
+ txhash: The hash of the transaction.
598
+
599
+ Returns:
600
+ The status of the transaction.
601
+ """
602
+ return await dank_mids.eth.get_transaction_status(txhash)
603
+
604
+
605
+ _trace_semaphores = defaultdict(lambda: a_sync.Semaphore(4, __name__ + ".trace_semaphore"))
606
+
607
+
608
+ @cache_to_disk
609
+ @eth_retry.auto_retry
610
+ async def get_traces(
611
+ from_block: BlockNumber,
612
+ to_block: BlockNumber,
613
+ filter_params: TraceFilterParams,
614
+ ) -> list[FilterTrace]:
615
+ """
616
+ Retrieves traces from the web3 provider using the given parameters.
617
+
618
+ This function is cached to disk to reduce resource usage.
619
+
620
+ Args:
621
+ filter_params: The parameters for the trace filter.
622
+
623
+ Returns:
624
+ The list of traces.
625
+ """
626
+ if chain.id == Network.Polygon:
627
+ logger.warning(
628
+ "polygon doesnt support trace_filter method, must develop alternate solution"
629
+ )
630
+ return []
631
+ semaphore_key = (
632
+ tuple(filter_params.get("toAddress", ("",))),
633
+ tuple(filter_params.get("fromAddress", ("",))),
634
+ )
635
+ async with _trace_semaphores[semaphore_key]:
636
+ traces = await trace_filter(from_block, to_block, filter_params)
637
+ return await _check_traces(traces) if traces else []
638
+
639
+
640
+ @stuck_coro_debugger
641
+ @eth_retry.auto_retry
642
+ async def _check_traces(traces: list[FilterTrace]) -> list[FilterTrace]:
643
+ good_traces = []
644
+ append = good_traces.append
645
+
646
+ check_status_tasks = a_sync.TaskMapping(get_transaction_status)
647
+
648
+ for i, trace in enumerate(traces):
649
+ # Make sure we don't block up the event loop
650
+ if i % 500:
651
+ await yield_to_loop()
652
+
653
+ if "error" in trace:
654
+ continue
655
+
656
+ # NOTE: Not sure why these appear, but I've yet to come across an internal transfer
657
+ # that actually transmitted value to the singleton even though they appear to.
658
+ if (
659
+ isinstance(trace, call.Trace)
660
+ and trace.action.to == "0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552"
661
+ ): # Gnosis Safe Singleton 1.3.0
662
+ continue
663
+
664
+ if not isinstance(trace, reward.Trace):
665
+ # NOTE: We don't need to confirm block rewards came from a successful transaction, because they don't come from a transaction
666
+ check_status_tasks[trace.transactionHash]
667
+
668
+ append(trace)
669
+
670
+ # NOTE: We don't need to confirm block rewards came from a successful transaction, because they don't come from a transaction
671
+ return [
672
+ trace
673
+ for trace in good_traces
674
+ if isinstance(trace, reward.Trace)
675
+ or await check_status_tasks[trace.transactionHash] == Status.success
676
+ ]
677
+
678
+
679
+ BlockRange = tuple[Block, Block]
680
+
681
+
682
+ def _get_block_ranges(start_block: Block, end_block: Block) -> list[BlockRange]:
683
+ return [(i, i + BATCH_SIZE - 1) for i in range(start_block, end_block, BATCH_SIZE)]
684
+
685
+
686
+ class AddressInternalTransfersLedger(AddressLedgerBase[InternalTransfersList, InternalTransfer]):
687
+ """
688
+ A ledger for managing internal transfer entries.
689
+ """
690
+
691
+ _list_type = InternalTransfersList
692
+
693
+ @stuck_coro_debugger
694
+ @set_end_block_if_none
695
+ async def _load_new_objects(
696
+ self, start_block: Block, end_block: Block, mem_cache: bool
697
+ ) -> AsyncIterator[InternalTransfer]:
698
+ """
699
+ Loads new internal transfer entries between the specified blocks.
700
+
701
+ Args:
702
+ start_block: The starting block number.
703
+ end_block: The ending block number.
704
+
705
+ Yields:
706
+ AsyncIterator[InternalTransfer]: An async iterator of internal transfer entries.
707
+ """
708
+ if start_block == 0:
709
+ start_block = 1
710
+
711
+ if mem_cache:
712
+ try:
713
+ start_block, end_block = self._check_blocks_against_cache(start_block, end_block)
714
+ except _exceptions.BlockRangeIsCached:
715
+ return
716
+ except _exceptions.BlockRangeOutOfBounds as e:
717
+ await e.load_remaining()
718
+ return
719
+
720
+ # TODO: figure out where this float comes from and raise a TypeError there
721
+ if isinstance(start_block, float) and int(start_block) == start_block:
722
+ start_block = int(start_block)
723
+ if isinstance(end_block, float) and int(end_block) == end_block:
724
+ end_block = int(end_block)
725
+
726
+ address = self.address
727
+ if start_block == end_block:
728
+ trace_filter_coros = [
729
+ get_traces(start_block, end_block, {"toAddress": [address]}),
730
+ get_traces(start_block, end_block, {"fromAddress": [address]}),
731
+ ]
732
+ else:
733
+ block_ranges = _get_block_ranges(start_block, end_block)
734
+ addr_filters = {"toAddress": [address]}, {"fromAddress": [address]}
735
+ trace_filter_coros = [
736
+ get_traces(start, end, addr_filter)
737
+ for (start, end), addr_filter in product(block_ranges, addr_filters)
738
+ ]
739
+
740
+ # NOTE: We only want tqdm progress bar when there is work to do
741
+ if len(trace_filter_coros) < 10:
742
+ generator_function = a_sync.as_completed
743
+ else:
744
+ generator_function = partial( # type: ignore [assignment]
745
+ a_sync.as_completed, tqdm=True, desc=f"Trace Filters {address}"
746
+ )
747
+
748
+ load = InternalTransfer.from_trace
749
+
750
+ if mem_cache:
751
+ internal_transfers = []
752
+ append_transfer = internal_transfers.append
753
+
754
+ done = 0
755
+ if self.load_prices:
756
+ traces = []
757
+ async for chunk in generator_function(trace_filter_coros, aiter=True):
758
+ traces.extend(chunk)
759
+
760
+ if traces:
761
+ tasks = []
762
+ while traces:
763
+ tasks.extend(
764
+ create_task(load(trace, load_prices=True)) for trace in traces[:5000]
765
+ )
766
+ traces = traces[5000:]
767
+ # let the tasks start sending calls to your node now
768
+ # without waiting for all tasks to be created
769
+ await yield_to_loop()
770
+
771
+ async for internal_transfer in a_sync.as_completed(
772
+ tasks, aiter=True, tqdm=True, desc=f"Internal Transfers {address}"
773
+ ):
774
+ if internal_transfer is not None:
775
+ if mem_cache:
776
+ append_transfer(internal_transfer)
777
+ yield internal_transfer
778
+
779
+ done += 1
780
+ if done % 1000 == 0:
781
+ await yield_to_loop()
782
+
783
+ else:
784
+ async for chunk in generator_function(trace_filter_coros, aiter=True):
785
+ for trace in chunk:
786
+ internal_transfer = await load(trace, load_prices=False)
787
+ if internal_transfer is not None:
788
+ if mem_cache:
789
+ append_transfer(internal_transfer)
790
+ yield internal_transfer
791
+
792
+ done += 1
793
+ if done % 1000 == 0:
794
+ await yield_to_loop()
795
+
796
+ if mem_cache and internal_transfers:
797
+ self.objects.extend(internal_transfers)
798
+ self.objects.sort(key=lambda t: (t.block_number, t.transaction_index))
799
+
800
+ if self.cached_from is None or start_block < self.cached_from:
801
+ self.cached_from = start_block
802
+ if self.cached_thru is None or end_block > self.cached_thru:
803
+ self.cached_thru = end_block
804
+
805
+
806
+ _yield_tokens_semaphore = a_sync.Semaphore(
807
+ 10, name="eth_portfolio._ledgers.address._yield_tokens_semaphore"
808
+ )
809
+
810
+
811
+ class TokenTransfersList(PandableList[TokenTransfer]):
812
+ """
813
+ A list subclass for token transfer entries that can convert to a :class:`DataFrame`.
814
+ """
815
+
816
+
817
+ class AddressTokenTransfersLedger(AddressLedgerBase[TokenTransfersList, TokenTransfer]):
818
+ """
819
+ A ledger for managing token transfer entries.
820
+ """
821
+
822
+ _list_type = TokenTransfersList
823
+ __slots__ = ("_transfers",)
824
+
825
+ def __init__(self, portfolio_address: "PortfolioAddress"):
826
+ """
827
+ Initializes the AddressTokenTransfersLedger instance.
828
+
829
+ Args:
830
+ portfolio_address: The :class:`~eth_portfolio.address.PortfolioAddress` this ledger belongs to.
831
+ """
832
+ super().__init__(portfolio_address)
833
+ self._transfers = TokenTransfers(
834
+ self.address, self.portfolio_address._start_block, load_prices=self.load_prices
835
+ )
836
+ """
837
+ TokenTransfers: Instance for handling token transfer operations.
838
+ """
839
+
840
+ @stuck_coro_debugger
841
+ async def list_tokens_at_block(self, block: int | None = None) -> list[ERC20]:
842
+ """
843
+ Lists the tokens held at a specific block.
844
+
845
+ Args:
846
+ block (int | None): The block number. Defaults to None.
847
+
848
+ Returns:
849
+ List[ERC20]: The list of ERC20 tokens.
850
+
851
+ Examples:
852
+ >>> tokens = await ledger.list_tokens_at_block(12345678)
853
+ """
854
+ return [token async for token in self._yield_tokens_at_block(block)]
855
+
856
+ async def _yield_tokens_at_block(self, block: int | None = None) -> AsyncIterator[ERC20]:
857
+ """
858
+ Yields the tokens held at a specific block.
859
+
860
+ Args:
861
+ block (int | None): The block number. Defaults to None.
862
+
863
+ Yields:
864
+ AsyncIterator[ERC20]: An async iterator of ERC20 tokens.
865
+ """
866
+ async with _yield_tokens_semaphore:
867
+ yielded = set()
868
+ async for transfer in self[:block]:
869
+ address = transfer.token_address
870
+ if address not in yielded:
871
+ yielded.add(address)
872
+ yield ERC20(address, asynchronous=self.asynchronous)
873
+
874
+ @stuck_coro_debugger
875
+ @set_end_block_if_none
876
+ async def _load_new_objects(self, start_block: Block, end_block: Block, mem_cache: bool) -> AsyncIterator[TokenTransfer]: # type: ignore [override]
877
+ """
878
+ Loads new token transfer entries between the specified blocks.
879
+
880
+ Args:
881
+ start_block: The starting block number.
882
+ end_block: The ending block number.
883
+
884
+ Yields:
885
+ AsyncIterator[TokenTransfer]: An async iterator of token transfer entries.
886
+ """
887
+ if mem_cache:
888
+ try:
889
+ start_block, end_block = self._check_blocks_against_cache(start_block, end_block)
890
+ except _exceptions.BlockRangeIsCached:
891
+ return
892
+ except _exceptions.BlockRangeOutOfBounds as e:
893
+ await e.load_remaining()
894
+ return
895
+
896
+ if tasks := [
897
+ task
898
+ async for task in self._transfers.yield_thru_block(end_block)
899
+ if start_block <= task.block # type: ignore [attr-defined]
900
+ ]:
901
+ token_transfers = []
902
+ append_token_transfer = token_transfers.append
903
+ done = 0
904
+ async for token_transfer in a_sync.as_completed(
905
+ tasks, aiter=True, tqdm=True, desc=f"Token Transfers {self.address}"
906
+ ):
907
+ if token_transfer:
908
+ if mem_cache:
909
+ append_token_transfer(token_transfer)
910
+ yield token_transfer
911
+
912
+ # Don't let the event loop get congested
913
+ done += 1
914
+ if done % 100 == 0:
915
+ await yield_to_loop()
916
+
917
+ if mem_cache and token_transfers:
918
+ self.objects.extend(token_transfers)
919
+ self.objects.sort(key=lambda t: (t.block_number, t.transaction_index, t.log_index))
920
+
921
+ if self.cached_from is None or start_block < self.cached_from:
922
+ self.cached_from = start_block
923
+ if self.cached_thru is None or end_block > self.cached_thru:
924
+ self.cached_thru = end_block