eth-portfolio 0.5.8__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eth-portfolio might be problematic. Click here for more details.

Files changed (83) hide show
  1. eth_portfolio/__init__.py +24 -0
  2. eth_portfolio/_argspec.cp310-win_amd64.pyd +0 -0
  3. eth_portfolio/_argspec.py +43 -0
  4. eth_portfolio/_cache.py +119 -0
  5. eth_portfolio/_config.cp310-win_amd64.pyd +0 -0
  6. eth_portfolio/_config.py +4 -0
  7. eth_portfolio/_db/__init__.py +0 -0
  8. eth_portfolio/_db/decorators.py +147 -0
  9. eth_portfolio/_db/entities.py +311 -0
  10. eth_portfolio/_db/utils.py +619 -0
  11. eth_portfolio/_decimal.py +154 -0
  12. eth_portfolio/_decorators.py +84 -0
  13. eth_portfolio/_exceptions.py +65 -0
  14. eth_portfolio/_ledgers/__init__.py +0 -0
  15. eth_portfolio/_ledgers/address.py +917 -0
  16. eth_portfolio/_ledgers/portfolio.py +328 -0
  17. eth_portfolio/_loaders/__init__.py +33 -0
  18. eth_portfolio/_loaders/_nonce.cp310-win_amd64.pyd +0 -0
  19. eth_portfolio/_loaders/_nonce.py +193 -0
  20. eth_portfolio/_loaders/balances.cp310-win_amd64.pyd +0 -0
  21. eth_portfolio/_loaders/balances.py +95 -0
  22. eth_portfolio/_loaders/token_transfer.py +215 -0
  23. eth_portfolio/_loaders/transaction.py +240 -0
  24. eth_portfolio/_loaders/utils.cp310-win_amd64.pyd +0 -0
  25. eth_portfolio/_loaders/utils.py +67 -0
  26. eth_portfolio/_shitcoins.cp310-win_amd64.pyd +0 -0
  27. eth_portfolio/_shitcoins.py +342 -0
  28. eth_portfolio/_stableish.cp310-win_amd64.pyd +0 -0
  29. eth_portfolio/_stableish.py +42 -0
  30. eth_portfolio/_submodules.py +72 -0
  31. eth_portfolio/_utils.py +229 -0
  32. eth_portfolio/_ydb/__init__.py +0 -0
  33. eth_portfolio/_ydb/token_transfers.py +144 -0
  34. eth_portfolio/address.py +396 -0
  35. eth_portfolio/buckets.py +212 -0
  36. eth_portfolio/constants.cp310-win_amd64.pyd +0 -0
  37. eth_portfolio/constants.py +87 -0
  38. eth_portfolio/portfolio.py +669 -0
  39. eth_portfolio/protocols/__init__.py +64 -0
  40. eth_portfolio/protocols/_base.py +107 -0
  41. eth_portfolio/protocols/convex.py +17 -0
  42. eth_portfolio/protocols/dsr.py +50 -0
  43. eth_portfolio/protocols/lending/README.md +6 -0
  44. eth_portfolio/protocols/lending/__init__.py +50 -0
  45. eth_portfolio/protocols/lending/_base.py +56 -0
  46. eth_portfolio/protocols/lending/compound.py +186 -0
  47. eth_portfolio/protocols/lending/liquity.py +108 -0
  48. eth_portfolio/protocols/lending/maker.py +110 -0
  49. eth_portfolio/protocols/lending/unit.py +44 -0
  50. eth_portfolio/protocols/liquity.py +17 -0
  51. eth_portfolio/py.typed +0 -0
  52. eth_portfolio/structs/__init__.py +43 -0
  53. eth_portfolio/structs/modified.py +69 -0
  54. eth_portfolio/structs/structs.py +628 -0
  55. eth_portfolio/typing/__init__.py +1418 -0
  56. eth_portfolio/typing/balance/single.py +176 -0
  57. eth_portfolio-0.5.8.dist-info/METADATA +28 -0
  58. eth_portfolio-0.5.8.dist-info/RECORD +83 -0
  59. eth_portfolio-0.5.8.dist-info/WHEEL +5 -0
  60. eth_portfolio-0.5.8.dist-info/entry_points.txt +2 -0
  61. eth_portfolio-0.5.8.dist-info/top_level.txt +3 -0
  62. eth_portfolio__mypyc.cp310-win_amd64.pyd +0 -0
  63. eth_portfolio_scripts/__init__.py +17 -0
  64. eth_portfolio_scripts/_args.py +26 -0
  65. eth_portfolio_scripts/_logging.py +14 -0
  66. eth_portfolio_scripts/_portfolio.py +209 -0
  67. eth_portfolio_scripts/_utils.py +106 -0
  68. eth_portfolio_scripts/balances.cp310-win_amd64.pyd +0 -0
  69. eth_portfolio_scripts/balances.py +56 -0
  70. eth_portfolio_scripts/docker/.grafana/dashboards/Portfolio/Balances.json +1962 -0
  71. eth_portfolio_scripts/docker/.grafana/dashboards/dashboards.yaml +10 -0
  72. eth_portfolio_scripts/docker/.grafana/datasources/datasources.yml +11 -0
  73. eth_portfolio_scripts/docker/__init__.cp310-win_amd64.pyd +0 -0
  74. eth_portfolio_scripts/docker/__init__.py +16 -0
  75. eth_portfolio_scripts/docker/check.cp310-win_amd64.pyd +0 -0
  76. eth_portfolio_scripts/docker/check.py +66 -0
  77. eth_portfolio_scripts/docker/docker-compose.yaml +61 -0
  78. eth_portfolio_scripts/docker/docker_compose.cp310-win_amd64.pyd +0 -0
  79. eth_portfolio_scripts/docker/docker_compose.py +97 -0
  80. eth_portfolio_scripts/main.py +118 -0
  81. eth_portfolio_scripts/py.typed +1 -0
  82. eth_portfolio_scripts/victoria/__init__.py +72 -0
  83. eth_portfolio_scripts/victoria/types.py +38 -0
@@ -0,0 +1,917 @@
1
+ """
2
+ This module defines the :class:`~eth_portfolio.AddressLedgerBase`, :class:`~eth_portfolio.TransactionsList`,
3
+ :class:`~eth_portfolio.AddressTransactionsLedger`, :class:`~eth_portfolio.InternalTransfersList`,
4
+ :class:`~eth_portfolio.AddressInternalTransfersLedger`, :class:`~eth_portfolio.TokenTransfersList`,
5
+ and :class:`~eth_portfolio.AddressTokenTransfersLedger` classes. These classes manage and interact with ledger entries
6
+ such as transactions, internal transfers, and token transfers associated with Ethereum addresses within the `eth-portfolio` system.
7
+
8
+ These classes leverage the `a_sync` library to support both synchronous and asynchronous operations, allowing efficient data gathering
9
+ and processing without blocking, thus improving the overall responsiveness and performance of portfolio operations.
10
+ """
11
+
12
+ from abc import ABCMeta, abstractmethod
13
+ from asyncio import Lock, Queue, create_task, gather, sleep
14
+ from collections import defaultdict
15
+ from collections.abc import AsyncGenerator, AsyncIterator, Callable
16
+ from functools import partial
17
+ from http import HTTPStatus
18
+ from itertools import product
19
+ from logging import getLogger
20
+ from typing import TYPE_CHECKING, Final, Generic, NoReturn, TypeVar, Union
21
+
22
+ import a_sync
23
+ import dank_mids
24
+ import eth_retry
25
+ from a_sync.asyncio import sleep0 as yield_to_loop
26
+ from aiohttp import ClientResponseError
27
+ from brownie import chain
28
+ from dank_mids.eth import TraceFilterParams
29
+ from eth_typing import BlockNumber, ChecksumAddress
30
+ from evmspec import FilterTrace
31
+ from evmspec.structs.receipt import Status
32
+ from evmspec.structs.trace import call, reward
33
+ from faster_async_lru import alru_cache
34
+ from pandas import DataFrame
35
+ from tqdm import tqdm
36
+ from y import ERC20, Network
37
+ from y._decorators import stuck_coro_debugger
38
+ from y.datatypes import Block
39
+ from y.utils.events import BATCH_SIZE
40
+
41
+ from eth_portfolio import _exceptions
42
+ from eth_portfolio._cache import cache_to_disk
43
+ from eth_portfolio._decorators import set_end_block_if_none
44
+ from eth_portfolio._loaders.transaction import get_nonce_at_block, load_transaction
45
+ from eth_portfolio._utils import PandableList, _AiterMixin, _YieldEvery, get_buffered_chain_height
46
+ from eth_portfolio._ydb.token_transfers import TokenTransfers
47
+ from eth_portfolio.structs import InternalTransfer, TokenTransfer, Transaction
48
+
49
+ if TYPE_CHECKING:
50
+ from eth_portfolio.address import PortfolioAddress
51
+
52
+ logger = getLogger(__name__)
53
+
54
+
55
+ T = TypeVar("T")
56
+
57
+ _LedgerEntryList = TypeVar(
58
+ "_LedgerEntryList", "TransactionsList", "InternalTransfersList", "TokenTransfersList"
59
+ )
60
+ PandableLedgerEntryList = Union["TransactionsList", "InternalTransfersList", "TokenTransfersList"]
61
+
62
+
63
+ class AddressLedgerBase(
64
+ a_sync.ASyncGenericBase, _AiterMixin[T], Generic[_LedgerEntryList, T], metaclass=ABCMeta
65
+ ):
66
+ """
67
+ Abstract base class for address ledgers in the eth-portfolio system.
68
+ """
69
+
70
+ __slots__ = (
71
+ "address",
72
+ "asynchronous",
73
+ "cached_from",
74
+ "cached_thru",
75
+ "load_prices",
76
+ "objects",
77
+ "portfolio_address",
78
+ "_lock",
79
+ )
80
+
81
+ def __init__(self, portfolio_address: "PortfolioAddress") -> None:
82
+ """
83
+ Initializes the AddressLedgerBase instance.
84
+
85
+ Args:
86
+ portfolio_address: The :class:`~eth_portfolio.address.PortfolioAddress` this ledger belongs to.
87
+ """
88
+
89
+ # TODO replace the following line with an abc implementation.
90
+ # assert isinstance(portfolio_address, PortfolioAddress), f"address must be a PortfolioAddress. try passing in PortfolioAddress({portfolio_address}) instead."
91
+
92
+ super().__init__()
93
+
94
+ self.portfolio_address = portfolio_address
95
+ """
96
+ The portfolio address this ledger belongs to.
97
+ """
98
+
99
+ self.address: Final = self.portfolio_address.address
100
+ """
101
+ The Ethereum address being managed.
102
+ """
103
+
104
+ self.asynchronous: Final = self.portfolio_address.asynchronous
105
+ """
106
+ Flag indicating if the operations are asynchronous.
107
+ """
108
+
109
+ self.load_prices: Final = self.portfolio_address.load_prices
110
+ """
111
+ Indicates if price loading is enabled.
112
+ """
113
+
114
+ self.objects: Final[_LedgerEntryList] = self._list_type()
115
+ """
116
+ _LedgerEntryList: List of ledger entries.
117
+ """
118
+
119
+ # NOTE: The following two properties will both be ints once the cache has contents
120
+ self.cached_from: int = None
121
+ """
122
+ The block from which all entries for this ledger have been loaded into memory.
123
+ """
124
+
125
+ self.cached_thru: int = None
126
+ """
127
+ The block through which all entries for this ledger have been loaded into memory.
128
+ """
129
+
130
+ self._lock: Final = Lock()
131
+ """
132
+ Lock: Lock for synchronizing access to ledger entries.
133
+ """
134
+
135
+ def __hash__(self) -> int:
136
+ """
137
+ Returns the hash of the address.
138
+
139
+ Returns:
140
+ The hash value.
141
+ """
142
+ return hash(self.address)
143
+
144
+ def __repr__(self) -> str:
145
+ return f"<{type(self).__name__} for {self.address} at {hex(id(self))}>"
146
+
147
+ @property
148
+ @abstractmethod
149
+ def _list_type(self) -> type[_LedgerEntryList]:
150
+ """
151
+ Type of list used to store ledger entries.
152
+ """
153
+ ...
154
+
155
+ @property
156
+ def _start_block(self) -> int:
157
+ """
158
+ Returns the starting block for the portfolio address.
159
+
160
+ Returns:
161
+ The starting block number.
162
+ """
163
+ return self.portfolio_address._start_block
164
+
165
+ async def _get_and_yield(
166
+ self, start_block: Block, end_block: Block, mem_cache: bool
167
+ ) -> AsyncGenerator[T, None]:
168
+ """
169
+ Yields ledger entries between the specified blocks.
170
+
171
+ Args:
172
+ start_block: The starting block number.
173
+ end_block: The ending block number.
174
+
175
+ Yields:
176
+ AsyncGenerator[T, None]: An async generator of ledger entries.
177
+ """
178
+ yielder = _YieldEvery(500)
179
+
180
+ async def unblock_loop() -> None:
181
+ """
182
+ Let the event loop run at least once for every 100
183
+ objects yielded so it doesn't get too congested.
184
+ """
185
+ await yielder.tick()
186
+
187
+ if not mem_cache:
188
+ async for ledger_entry in self._get_new_objects(start_block, end_block, False):
189
+ yield ledger_entry
190
+ await unblock_loop()
191
+ return
192
+
193
+ if self.objects and end_block and self.objects[-1].block_number > end_block:
194
+ for ledger_entry in self.objects:
195
+ block = ledger_entry.block_number
196
+ if block < start_block:
197
+ continue
198
+ elif block > end_block:
199
+ return
200
+ yield ledger_entry
201
+ await unblock_loop()
202
+
203
+ yielded = set()
204
+ for ledger_entry in self.objects:
205
+ block = ledger_entry.block_number
206
+ if block < start_block:
207
+ continue
208
+ elif end_block and block > end_block:
209
+ break
210
+ yield ledger_entry
211
+ yielded.add(ledger_entry)
212
+ await unblock_loop()
213
+ async for ledger_entry in self._get_new_objects(start_block, end_block, True): # type: ignore [assignment, misc]
214
+ if ledger_entry not in yielded:
215
+ yield ledger_entry
216
+ yielded.add(ledger_entry)
217
+ await unblock_loop()
218
+ for ledger_entry in self.objects:
219
+ block = ledger_entry.block_number
220
+ if block < start_block:
221
+ continue
222
+ elif end_block and block > end_block:
223
+ break
224
+ if ledger_entry not in yielded:
225
+ yield ledger_entry
226
+ yielded.add(ledger_entry)
227
+ await unblock_loop()
228
+
229
+ @set_end_block_if_none
230
+ @stuck_coro_debugger
231
+ async def get(self, start_block: Block, end_block: Block) -> _LedgerEntryList:
232
+ """
233
+ Retrieves ledger entries between the specified blocks.
234
+
235
+ Args:
236
+ start_block: The starting block number.
237
+ end_block: The ending block number.
238
+
239
+ Returns:
240
+ _LedgerEntryList: The list of ledger entries.
241
+
242
+ Examples:
243
+ >>> entries = await ledger.get(12000000, 12345678)
244
+ """
245
+ return self._list_type([ledger_entry async for ledger_entry in self[start_block:end_block]])
246
+
247
+ @stuck_coro_debugger
248
+ async def new(self) -> _LedgerEntryList:
249
+ """
250
+ Retrieves new ledger entries since the last cached block.
251
+
252
+ Returns:
253
+ _LedgerEntryList: The list of new ledger entries.
254
+
255
+ Examples:
256
+ >>> new_entries = await ledger.new()
257
+ """
258
+ start_block = 0 if self.cached_thru is None else self.cached_thru + 1
259
+ end_block = await get_buffered_chain_height()
260
+ return self[start_block, end_block] # type: ignore [index, return-value]
261
+
262
+ async def sent(
263
+ self, start_block: Block | None = None, end_block: Block | None = None
264
+ ) -> AsyncIterator[T]:
265
+ address = self.portfolio_address.address
266
+ async for obj in self[start_block:end_block]:
267
+ if obj.from_address == address:
268
+ yield obj
269
+
270
+ async def received(
271
+ self, start_block: Block | None = None, end_block: Block | None = None
272
+ ) -> AsyncIterator[T]:
273
+ address = self.portfolio_address.address
274
+ async for obj in self[start_block:end_block]:
275
+ if obj.from_address != address:
276
+ yield obj
277
+
278
+ @stuck_coro_debugger
279
+ @set_end_block_if_none
280
+ async def _get_new_objects(
281
+ self, start_block: Block, end_block: Block, mem_cache: bool
282
+ ) -> AsyncIterator[T]:
283
+ """
284
+ Retrieves new ledger entries between the specified blocks.
285
+
286
+ Args:
287
+ start_block: The starting block number.
288
+ end_block: The ending block number.
289
+
290
+ Yields:
291
+ AsyncIterator[T]: An async iterator of new ledger entries.
292
+ """
293
+ async with self._lock:
294
+ async for ledger_entry in self._load_new_objects(start_block, end_block, mem_cache):
295
+ yield ledger_entry
296
+
297
+ @abstractmethod
298
+ async def _load_new_objects(
299
+ self, start_block: Block, end_block: Block, mem_cache: bool
300
+ ) -> AsyncIterator[T]:
301
+ """
302
+ Abstract method to load new ledger entries between the specified blocks.
303
+
304
+ Args:
305
+ start_block: The starting block number.
306
+ end_block: The ending block number.
307
+
308
+ Yields:
309
+ AsyncIterator[T]: An async iterator of new ledger entries.
310
+ """
311
+ yield # type: ignore [misc]
312
+
313
+ def _check_blocks_against_cache(
314
+ self, start_block: Block, end_block: Block
315
+ ) -> tuple[Block, Block]:
316
+ """
317
+ Checks the specified block range against the cached block range.
318
+
319
+ Args:
320
+ start_block: The starting block number.
321
+ end_block: The ending block number.
322
+
323
+ Returns:
324
+ The adjusted block range.
325
+
326
+ Raises:
327
+ ValueError: If the start block is after the end block.
328
+ _exceptions.BlockRangeIsCached: If the block range is already cached.
329
+ _exceptions.BlockRangeOutOfBounds: If the block range is out of bounds.
330
+ """
331
+ if start_block > end_block:
332
+ raise ValueError(f"Start block {start_block} is after end block {end_block}")
333
+
334
+ # There is no cache
335
+ elif self.cached_from is None or self.cached_thru is None:
336
+ return start_block, end_block
337
+
338
+ # Range is cached
339
+ elif start_block >= self.cached_from and end_block <= self.cached_thru:
340
+ raise _exceptions.BlockRangeIsCached()
341
+
342
+ # Beginning of range is cached
343
+ elif (
344
+ start_block >= self.cached_from
345
+ and start_block < self.cached_thru
346
+ and end_block > self.cached_thru
347
+ ):
348
+ return self.cached_thru + 1, end_block
349
+
350
+ # End of range is cached
351
+ elif (
352
+ start_block < self.cached_from
353
+ and end_block >= self.cached_from
354
+ and end_block < self.cached_thru
355
+ ):
356
+ return start_block, self.cached_from - 1
357
+
358
+ # Beginning and end both outside bounds of cache to high side
359
+ elif start_block > self.cached_thru:
360
+ return self.cached_thru + 1, end_block
361
+
362
+ # Beginning and end both outside bounds of cache to low side
363
+ elif end_block < self.cached_from:
364
+ return start_block, self.cached_from - 1
365
+
366
+ # Beginning and end both outside bounds of cache, split
367
+ elif start_block < self.cached_from and end_block > self.cached_thru:
368
+ raise _exceptions.BlockRangeOutOfBounds(start_block, end_block, self)
369
+
370
+ raise NotImplementedError(
371
+ f"This is a work in progress and we still need code for this specific case. Feel free to create an issue on our github if you need this.\n\nstart_block: {start_block} end_block: {end_block} cached_from: {self.cached_from} cached_thru: {self.cached_thru}"
372
+ )
373
+
374
+
375
+ class TransactionsList(PandableList[Transaction]):
376
+ """
377
+ A list subclass for transactions that can convert to a :class:`DataFrame`.
378
+ """
379
+
380
+ def _df(self) -> DataFrame:
381
+ """
382
+ Converts the list of transactions to a DataFrame.
383
+
384
+ Returns:
385
+ DataFrame: The transactions as a DataFrame.
386
+ """
387
+ df = DataFrame(self)
388
+ if len(df) > 0:
389
+ df.chainId = df.chainId.apply(int)
390
+ df.blockNumber = df.blockNumber.apply(int)
391
+ df.transactionIndex = df.transactionIndex.apply(int)
392
+ df.nonce = df.nonce.apply(int)
393
+ df.gas = df.gas.apply(int)
394
+ df.gasPrice = df.gasPrice.apply(int)
395
+ return df
396
+
397
+
398
+ Nonce = int
399
+
400
+
401
+ class AddressTransactionsLedger(AddressLedgerBase[TransactionsList, Transaction]):
402
+ """
403
+ A ledger for managing transaction entries.
404
+ """
405
+
406
+ _list_type = TransactionsList
407
+ __slots__ = ("cached_thru_nonce", "_queue", "_ready", "_num_workers", "_workers")
408
+
409
+ def __init__(self, portfolio_address: "PortfolioAddress", num_workers: int = 1000):
410
+ """
411
+ Initializes the AddressTransactionsLedger instance.
412
+
413
+ Args:
414
+ portfolio_address: The :class:`~eth_portfolio.address.PortfolioAddress` this ledger belongs to.
415
+ """
416
+ super().__init__(portfolio_address)
417
+ self.cached_thru_nonce = -1
418
+ """
419
+ The nonce through which all transactions have been loaded into memory.
420
+ """
421
+ self._queue = Queue()
422
+ self._ready = Queue()
423
+ self._num_workers = num_workers
424
+ self._workers = []
425
+
426
+ def __del__(self) -> None:
427
+ self.__stop_workers()
428
+
429
+ @stuck_coro_debugger
430
+ @set_end_block_if_none
431
+ async def _load_new_objects(self, _: Block, end_block: Block, mem_cache: bool) -> AsyncIterator[Transaction]: # type: ignore [override]
432
+ """
433
+ Loads new transaction entries between the specified blocks.
434
+
435
+ Args:
436
+ _: The starting block number (unused).
437
+ end_block: The ending block number.
438
+
439
+ Yields:
440
+ AsyncIterator[Transaction]: An async iterator of transaction entries.
441
+ """
442
+ # NOTE: start_block is intentionally ignored for now. Filtering by block here risks
443
+ # corrupting the nonce-based cache (cached_thru_nonce) unless we refactor cache semantics.
444
+ if self.cached_thru and end_block < self.cached_thru:
445
+ return
446
+ if not mem_cache:
447
+ logger.warning(
448
+ f"{type(self).__name__}._load_new_objects mem_cache arg is not yet implemented"
449
+ )
450
+ address = self.address
451
+ end_block_nonce: int = await get_nonce_at_block(address, end_block)
452
+ if nonces := tuple(range(self.cached_thru_nonce + 1, end_block_nonce + 1)):
453
+ yielder = _YieldEvery(1000)
454
+ for nonce in nonces:
455
+ self._queue.put_nowait(nonce)
456
+
457
+ # Keep the event loop relatively unblocked
458
+ # and let the rpc start doing work asap
459
+ await yielder.tick()
460
+
461
+ len_nonces = len(nonces)
462
+ del nonces
463
+
464
+ self._ensure_workers(min(len_nonces, self._num_workers))
465
+
466
+ transactions = []
467
+ transaction: Transaction | None
468
+ for _ in tqdm(range(len_nonces), desc=f"Transactions {address}"):
469
+ nonce, transaction = await self._ready.get()
470
+ if transaction:
471
+ if isinstance(transaction, Exception):
472
+ raise transaction
473
+ transactions.append(transaction)
474
+ yield transaction
475
+ elif nonce == 0 and self.cached_thru_nonce == -1:
476
+ # Gnosis safes
477
+ self.cached_thru_nonce = 0
478
+ else:
479
+ # NOTE Are we sure this is the correct way to handle this scenario? Are we sure it will ever even occur with the new gnosis handling?
480
+ logger.warning("No transaction with nonce %s for %s", nonce, address)
481
+
482
+ self.__stop_workers()
483
+
484
+ if transactions:
485
+ self.objects.extend(transactions)
486
+ if self.objects:
487
+ self.objects.sort(key=lambda t: t.nonce)
488
+ self.cached_thru_nonce = self.objects[-1].nonce
489
+
490
+ if self.cached_from is None:
491
+ self.cached_from = 0
492
+ if self.cached_thru is None or end_block > self.cached_thru:
493
+ self.cached_thru = end_block
494
+
495
+ def _ensure_workers(self, num_workers: int) -> None:
496
+ len_workers = len(self._workers)
497
+ if len_workers < num_workers:
498
+ worker_fn = self.__worker_fn
499
+ address = self.address
500
+ load_prices = self.load_prices
501
+ queue_get = stuck_coro_debugger(self._queue.get)
502
+ put_ready = self._ready.put_nowait
503
+
504
+ self._workers.extend(
505
+ create_task(
506
+ coro=worker_fn(address, load_prices, queue_get, put_ready),
507
+ name=f"AddressTransactionsLedger worker {i} for {address}",
508
+ )
509
+ for i in range(num_workers - len_workers)
510
+ )
511
+
512
+ async def __worker_fn(
513
+ self,
514
+ address: ChecksumAddress,
515
+ load_prices: bool,
516
+ queue_get: Callable[[], Nonce],
517
+ put_ready: Callable[[Nonce, Transaction | None], None],
518
+ ) -> NoReturn:
519
+ try:
520
+ while True:
521
+ nonce = await queue_get()
522
+ try:
523
+ put_ready(await load_transaction(address, nonce, load_prices))
524
+ except Exception as e:
525
+ put_ready((nonce, e))
526
+ except Exception as e:
527
+ logger.error("%s in %s __worker_coro", type(e), self)
528
+ logger.exception(e)
529
+ raise
530
+
531
+ def __stop_workers(self) -> None:
532
+ logger.debug("stopping workers for %s", self)
533
+ workers = self._workers
534
+ pop_next = workers.pop
535
+ for _ in range(len(workers)):
536
+ pop_next().cancel()
537
+
538
+
539
+ class InternalTransfersList(PandableList[InternalTransfer]):
540
+ """
541
+ A list subclass for internal transfer entries that can convert to a :class:`DataFrame`.
542
+ """
543
+
544
+
545
+ @a_sync.Semaphore(128, __name__ + ".trace_filter")
546
+ @stuck_coro_debugger
547
+ @eth_retry.auto_retry
548
+ async def trace_filter(
549
+ from_block: BlockNumber,
550
+ to_block: BlockNumber,
551
+ params: TraceFilterParams,
552
+ ) -> list[FilterTrace]:
553
+ return await __trace_filter(from_block, to_block, params)
554
+
555
+
556
+ async def __trace_filter(
557
+ from_block: BlockNumber,
558
+ to_block: BlockNumber,
559
+ params: TraceFilterParams,
560
+ ) -> list[FilterTrace]:
561
+ try:
562
+ return await dank_mids.eth.trace_filter(
563
+ {"fromBlock": from_block, "toBlock": to_block, **params}
564
+ )
565
+ except ClientResponseError as e:
566
+ if e.status != HTTPStatus.SERVICE_UNAVAILABLE or to_block == from_block:
567
+ raise
568
+ except TypeError as e:
569
+ # This is some intermittent error I need to debug in dank_mids, I think it occurs when we get rate limited
570
+ if str(e) != "a bytes-like object is required, not 'NoneType'":
571
+ raise
572
+ await sleep(0.5)
573
+ # remove this logger when I know there are no looping issues
574
+ logger.info("call failed, trying again")
575
+
576
+ range_size = to_block - from_block + 1
577
+ chunk_size = range_size // 2
578
+ halfway = from_block + chunk_size
579
+
580
+ results = await gather(
581
+ __trace_filter(from_block, BlockNumber(halfway), params),
582
+ __trace_filter(BlockNumber(halfway + 1), to_block, params),
583
+ )
584
+ return results[0] + results[1]
585
+
586
+
587
+ @alru_cache(maxsize=None)
588
+ @eth_retry.auto_retry(min_sleep_time=1, max_sleep_time=3, max_retries=20, suppress_logs=1)
589
+ async def get_transaction_status(txhash: str) -> Status:
590
+ """
591
+ Retrieves the status for a transaction.
592
+
593
+ This function is cached to disk to reduce resource usage.
594
+
595
+ Args:
596
+ txhash: The hash of the transaction.
597
+
598
+ Returns:
599
+ The status of the transaction.
600
+ """
601
+ return await dank_mids.eth.get_transaction_status(txhash)
602
+
603
+
604
+ _trace_semaphores = defaultdict(lambda: a_sync.Semaphore(4, __name__ + ".trace_semaphore"))
605
+
606
+
607
+ @cache_to_disk
608
+ @eth_retry.auto_retry
609
+ async def get_traces(
610
+ from_block: BlockNumber,
611
+ to_block: BlockNumber,
612
+ filter_params: TraceFilterParams,
613
+ ) -> list[FilterTrace]:
614
+ """
615
+ Retrieves traces from the web3 provider using the given parameters.
616
+
617
+ This function is cached to disk to reduce resource usage.
618
+
619
+ Args:
620
+ filter_params: The parameters for the trace filter.
621
+
622
+ Returns:
623
+ The list of traces.
624
+ """
625
+ if chain.id == Network.Polygon:
626
+ logger.warning(
627
+ "polygon doesnt support trace_filter method, must develop alternate solution"
628
+ )
629
+ return []
630
+ semaphore_key = (
631
+ tuple(filter_params.get("toAddress", ("",))),
632
+ tuple(filter_params.get("fromAddress", ("",))),
633
+ )
634
+ async with _trace_semaphores[semaphore_key]:
635
+ traces = await trace_filter(from_block, to_block, filter_params)
636
+ return await _check_traces(traces) if traces else []
637
+
638
+
639
+ @stuck_coro_debugger
640
+ @eth_retry.auto_retry
641
+ async def _check_traces(traces: list[FilterTrace]) -> list[FilterTrace]:
642
+ good_traces = []
643
+ append = good_traces.append
644
+
645
+ check_status_tasks = a_sync.TaskMapping(get_transaction_status)
646
+
647
+ yielder = _YieldEvery(500)
648
+ for trace in traces:
649
+ # Make sure we don't block up the event loop
650
+ await yielder.tick()
651
+
652
+ if "error" in trace:
653
+ continue
654
+
655
+ # NOTE: Not sure why these appear, but I've yet to come across an internal transfer
656
+ # that actually transmitted value to the singleton even though they appear to.
657
+ if (
658
+ isinstance(trace, call.Trace)
659
+ and trace.action.to == "0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552"
660
+ ): # Gnosis Safe Singleton 1.3.0
661
+ continue
662
+
663
+ if not isinstance(trace, reward.Trace):
664
+ # NOTE: We don't need to confirm block rewards came from a successful transaction, because they don't come from a transaction
665
+ check_status_tasks[trace.transactionHash]
666
+
667
+ append(trace)
668
+
669
+ # NOTE: We don't need to confirm block rewards came from a successful transaction, because they don't come from a transaction
670
+ return [
671
+ trace
672
+ for trace in good_traces
673
+ if isinstance(trace, reward.Trace)
674
+ or await check_status_tasks[trace.transactionHash] == Status.success
675
+ ]
676
+
677
+
678
+ BlockRange = tuple[Block, Block]
679
+
680
+
681
+ def _get_block_ranges(start_block: Block, end_block: Block) -> list[BlockRange]:
682
+ return [(i, i + BATCH_SIZE - 1) for i in range(start_block, end_block, BATCH_SIZE)]
683
+
684
+
685
+ class AddressInternalTransfersLedger(AddressLedgerBase[InternalTransfersList, InternalTransfer]):
686
+ """
687
+ A ledger for managing internal transfer entries.
688
+ """
689
+
690
+ _list_type = InternalTransfersList
691
+
692
+ @stuck_coro_debugger
693
+ @set_end_block_if_none
694
+ async def _load_new_objects(
695
+ self, start_block: Block, end_block: Block, mem_cache: bool
696
+ ) -> AsyncIterator[InternalTransfer]:
697
+ """
698
+ Loads new internal transfer entries between the specified blocks.
699
+
700
+ Args:
701
+ start_block: The starting block number.
702
+ end_block: The ending block number.
703
+
704
+ Yields:
705
+ AsyncIterator[InternalTransfer]: An async iterator of internal transfer entries.
706
+ """
707
+ if start_block == 0:
708
+ start_block = 1
709
+
710
+ if mem_cache:
711
+ try:
712
+ start_block, end_block = self._check_blocks_against_cache(start_block, end_block)
713
+ except _exceptions.BlockRangeIsCached:
714
+ return
715
+ except _exceptions.BlockRangeOutOfBounds as e:
716
+ await e.load_remaining()
717
+ return
718
+
719
+ # TODO: figure out where this float comes from and raise a TypeError there
720
+ if isinstance(start_block, float) and int(start_block) == start_block:
721
+ start_block = int(start_block)
722
+ if isinstance(end_block, float) and int(end_block) == end_block:
723
+ end_block = int(end_block)
724
+
725
+ address = self.address
726
+ if start_block == end_block:
727
+ trace_filter_coros = [
728
+ get_traces(start_block, end_block, {"toAddress": [address]}),
729
+ get_traces(start_block, end_block, {"fromAddress": [address]}),
730
+ ]
731
+ else:
732
+ block_ranges = _get_block_ranges(start_block, end_block)
733
+ addr_filters = {"toAddress": [address]}, {"fromAddress": [address]}
734
+ trace_filter_coros = [
735
+ get_traces(start, end, addr_filter)
736
+ for (start, end), addr_filter in product(block_ranges, addr_filters)
737
+ ]
738
+
739
+ # NOTE: We only want tqdm progress bar when there is work to do
740
+ if len(trace_filter_coros) < 10:
741
+ generator_function = a_sync.as_completed
742
+ else:
743
+ generator_function = partial( # type: ignore [assignment]
744
+ a_sync.as_completed, tqdm=True, desc=f"Trace Filters {address}"
745
+ )
746
+
747
+ load = InternalTransfer.from_trace
748
+
749
+ if mem_cache:
750
+ internal_transfers = []
751
+ append_transfer = internal_transfers.append
752
+
753
+ yielder = _YieldEvery(1000)
754
+ if self.load_prices:
755
+ traces = []
756
+ async for chunk in generator_function(trace_filter_coros, aiter=True):
757
+ traces.extend(chunk)
758
+
759
+ if traces:
760
+ tasks = []
761
+ while traces:
762
+ tasks.extend(
763
+ create_task(load(trace, load_prices=True)) for trace in traces[:5000]
764
+ )
765
+ traces = traces[5000:]
766
+ # let the tasks start sending calls to your node now
767
+ # without waiting for all tasks to be created
768
+ await yield_to_loop()
769
+
770
+ async for internal_transfer in a_sync.as_completed(
771
+ tasks, aiter=True, tqdm=True, desc=f"Internal Transfers {address}"
772
+ ):
773
+ if internal_transfer is not None:
774
+ if mem_cache:
775
+ append_transfer(internal_transfer)
776
+ yield internal_transfer
777
+
778
+ await yielder.tick()
779
+
780
+ else:
781
+ async for chunk in generator_function(trace_filter_coros, aiter=True):
782
+ for trace in chunk:
783
+ internal_transfer = await load(trace, load_prices=False)
784
+ if internal_transfer is not None:
785
+ if mem_cache:
786
+ append_transfer(internal_transfer)
787
+ yield internal_transfer
788
+
789
+ await yielder.tick()
790
+
791
+ if mem_cache and internal_transfers:
792
+ self.objects.extend(internal_transfers)
793
+ self.objects.sort(key=lambda t: (t.block_number, t.transaction_index))
794
+
795
+ if self.cached_from is None or start_block < self.cached_from:
796
+ self.cached_from = start_block
797
+ if self.cached_thru is None or end_block > self.cached_thru:
798
+ self.cached_thru = end_block
799
+
800
+
801
+ _yield_tokens_semaphore = a_sync.Semaphore(
802
+ 10, name="eth_portfolio._ledgers.address._yield_tokens_semaphore"
803
+ )
804
+
805
+
806
+ class TokenTransfersList(PandableList[TokenTransfer]):
807
+ """
808
+ A list subclass for token transfer entries that can convert to a :class:`DataFrame`.
809
+ """
810
+
811
+
812
+ class AddressTokenTransfersLedger(AddressLedgerBase[TokenTransfersList, TokenTransfer]):
813
+ """
814
+ A ledger for managing token transfer entries.
815
+ """
816
+
817
+ _list_type = TokenTransfersList
818
+ __slots__ = ("_transfers",)
819
+
820
+ def __init__(self, portfolio_address: "PortfolioAddress"):
821
+ """
822
+ Initializes the AddressTokenTransfersLedger instance.
823
+
824
+ Args:
825
+ portfolio_address: The :class:`~eth_portfolio.address.PortfolioAddress` this ledger belongs to.
826
+ """
827
+ super().__init__(portfolio_address)
828
+ self._transfers = TokenTransfers(
829
+ self.address, self.portfolio_address._start_block, load_prices=self.load_prices
830
+ )
831
+ """
832
+ TokenTransfers: Instance for handling token transfer operations.
833
+ """
834
+
835
+ @stuck_coro_debugger
836
+ async def list_tokens_at_block(self, block: int | None = None) -> list[ERC20]:
837
+ """
838
+ Lists the tokens held at a specific block.
839
+
840
+ Args:
841
+ block (int | None): The block number. Defaults to None.
842
+
843
+ Returns:
844
+ List[ERC20]: The list of ERC20 tokens.
845
+
846
+ Examples:
847
+ >>> tokens = await ledger.list_tokens_at_block(12345678)
848
+ """
849
+ return [token async for token in self._yield_tokens_at_block(block)]
850
+
851
+ async def _yield_tokens_at_block(self, block: int | None = None) -> AsyncIterator[ERC20]:
852
+ """
853
+ Yields the tokens held at a specific block.
854
+
855
+ Args:
856
+ block (int | None): The block number. Defaults to None.
857
+
858
+ Yields:
859
+ AsyncIterator[ERC20]: An async iterator of ERC20 tokens.
860
+ """
861
+ async with _yield_tokens_semaphore:
862
+ yielded = set()
863
+ async for transfer in self[:block]:
864
+ address = transfer.token_address
865
+ if address not in yielded:
866
+ yielded.add(address)
867
+ yield ERC20(address, asynchronous=self.asynchronous)
868
+
869
+ @stuck_coro_debugger
870
+ @set_end_block_if_none
871
+ async def _load_new_objects(self, start_block: Block, end_block: Block, mem_cache: bool) -> AsyncIterator[TokenTransfer]: # type: ignore [override]
872
+ """
873
+ Loads new token transfer entries between the specified blocks.
874
+
875
+ Args:
876
+ start_block: The starting block number.
877
+ end_block: The ending block number.
878
+
879
+ Yields:
880
+ AsyncIterator[TokenTransfer]: An async iterator of token transfer entries.
881
+ """
882
+ if mem_cache:
883
+ try:
884
+ start_block, end_block = self._check_blocks_against_cache(start_block, end_block)
885
+ except _exceptions.BlockRangeIsCached:
886
+ return
887
+ except _exceptions.BlockRangeOutOfBounds as e:
888
+ await e.load_remaining()
889
+ return
890
+
891
+ if tasks := [
892
+ task
893
+ async for task in self._transfers.yield_thru_block(end_block)
894
+ if start_block <= task.block # type: ignore [attr-defined]
895
+ ]:
896
+ token_transfers = []
897
+ append_token_transfer = token_transfers.append
898
+ yielder = _YieldEvery(100)
899
+ async for token_transfer in a_sync.as_completed(
900
+ tasks, aiter=True, tqdm=True, desc=f"Token Transfers {self.address}"
901
+ ):
902
+ if token_transfer:
903
+ if mem_cache:
904
+ append_token_transfer(token_transfer)
905
+ yield token_transfer
906
+
907
+ # Don't let the event loop get congested
908
+ await yielder.tick()
909
+
910
+ if mem_cache and token_transfers:
911
+ self.objects.extend(token_transfers)
912
+ self.objects.sort(key=lambda t: (t.block_number, t.transaction_index, t.log_index))
913
+
914
+ if self.cached_from is None or start_block < self.cached_from:
915
+ self.cached_from = start_block
916
+ if self.cached_thru is None or end_block > self.cached_thru:
917
+ self.cached_thru = end_block