faster-eth-utils 5.3.2__cp312-cp312-musllinux_1_2_i686.whl → 5.3.22__cp312-cp312-musllinux_1_2_i686.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of faster-eth-utils might be problematic. Click here for more details.

Files changed (46) hide show
  1. faster_eth_utils/abi.cpython-312-i386-linux-musl.so +0 -0
  2. faster_eth_utils/abi.py +99 -74
  3. faster_eth_utils/address.cpython-312-i386-linux-musl.so +0 -0
  4. faster_eth_utils/address.py +15 -36
  5. faster_eth_utils/applicators.cpython-312-i386-linux-musl.so +0 -0
  6. faster_eth_utils/applicators.py +79 -59
  7. faster_eth_utils/conversions.cpython-312-i386-linux-musl.so +0 -0
  8. faster_eth_utils/conversions.py +18 -20
  9. faster_eth_utils/crypto.cpython-312-i386-linux-musl.so +0 -0
  10. faster_eth_utils/crypto.py +3 -8
  11. faster_eth_utils/currency.cpython-312-i386-linux-musl.so +0 -0
  12. faster_eth_utils/currency.py +11 -8
  13. faster_eth_utils/curried/__init__.py +65 -65
  14. faster_eth_utils/debug.cpython-312-i386-linux-musl.so +0 -0
  15. faster_eth_utils/decorators.cpython-312-i386-linux-musl.so +0 -0
  16. faster_eth_utils/decorators.py +65 -29
  17. faster_eth_utils/encoding.cpython-312-i386-linux-musl.so +0 -0
  18. faster_eth_utils/encoding.py +1 -1
  19. faster_eth_utils/exceptions.cpython-312-i386-linux-musl.so +0 -0
  20. faster_eth_utils/exceptions.py +8 -1
  21. faster_eth_utils/functional.cpython-312-i386-linux-musl.so +0 -0
  22. faster_eth_utils/functional.py +39 -27
  23. faster_eth_utils/hexadecimal.cpython-312-i386-linux-musl.so +0 -0
  24. faster_eth_utils/hexadecimal.py +8 -12
  25. faster_eth_utils/humanize.cpython-312-i386-linux-musl.so +0 -0
  26. faster_eth_utils/humanize.py +18 -22
  27. faster_eth_utils/logging.py +51 -44
  28. faster_eth_utils/module_loading.cpython-312-i386-linux-musl.so +0 -0
  29. faster_eth_utils/network.cpython-312-i386-linux-musl.so +0 -0
  30. faster_eth_utils/network.py +11 -4
  31. faster_eth_utils/numeric.cpython-312-i386-linux-musl.so +0 -0
  32. faster_eth_utils/pydantic.py +15 -13
  33. faster_eth_utils/toolz.cpython-312-i386-linux-musl.so +0 -0
  34. faster_eth_utils/toolz.py +82 -152
  35. faster_eth_utils/types.cpython-312-i386-linux-musl.so +0 -0
  36. faster_eth_utils/types.py +20 -17
  37. faster_eth_utils/units.cpython-312-i386-linux-musl.so +0 -0
  38. {faster_eth_utils-5.3.2.dist-info → faster_eth_utils-5.3.22.dist-info}/METADATA +46 -17
  39. faster_eth_utils-5.3.22.dist-info/RECORD +53 -0
  40. faster_eth_utils-5.3.22.dist-info/top_level.txt +3 -0
  41. faster_eth_utils__mypyc.cpython-312-i386-linux-musl.so +0 -0
  42. bce0bfc64ce5e845ec4a__mypyc.cpython-312-i386-linux-musl.so +0 -0
  43. faster_eth_utils-5.3.2.dist-info/RECORD +0 -47
  44. faster_eth_utils-5.3.2.dist-info/top_level.txt +0 -3
  45. {faster_eth_utils-5.3.2.dist-info → faster_eth_utils-5.3.22.dist-info}/WHEEL +0 -0
  46. {faster_eth_utils-5.3.2.dist-info → faster_eth_utils-5.3.22.dist-info}/licenses/LICENSE +0 -0
@@ -1,69 +1,103 @@
1
1
  import functools
2
- import itertools
3
- import os
2
+ from collections.abc import Callable
4
3
  from typing import (
5
4
  Any,
6
- Callable,
7
- Dict,
8
- Optional,
9
- Type,
5
+ Concatenate,
6
+ Final,
7
+ Generic,
10
8
  TypeVar,
11
9
  final,
12
10
  )
13
11
 
14
- from .types import (
15
- is_text,
16
- )
12
+ from typing_extensions import ParamSpec
13
+
14
+ P = ParamSpec("P")
17
15
 
18
16
  T = TypeVar("T")
19
17
 
18
+ TInstance = TypeVar("TInstance", bound=object)
19
+ """A TypeVar representing an instance that a method can bind to."""
20
+
20
21
 
21
22
  @final
22
- class combomethod:
23
- def __init__(self, method: Callable[..., Any]) -> None:
24
- self.method = method
23
+ class combomethod(Generic[TInstance, P, T]):
24
+ def __init__(
25
+ self, method: Callable[Concatenate[TInstance | type[TInstance], P], T]
26
+ ) -> None:
27
+ self.method: Final = method
28
+
29
+ def __repr__(self) -> str:
30
+ return f"combomethod({self.method})"
25
31
 
26
32
  def __get__(
27
- self, obj: Optional[T] = None, objtype: Optional[Type[T]] = None
28
- ) -> Callable[..., Any]:
29
- @functools.wraps(self.method)
30
- def _wrapper(*args: Any, **kwargs: Any) -> Any:
31
- if obj is not None:
32
- return self.method(obj, *args, **kwargs)
33
- else:
34
- return self.method(objtype, *args, **kwargs)
33
+ self,
34
+ obj: TInstance | None,
35
+ objtype: type[TInstance],
36
+ ) -> Callable[P, T]:
37
+
38
+ method = self.method
39
+ bound_arg = objtype if obj is None else obj
40
+
41
+ @functools.wraps(method)
42
+ def _wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
43
+ return method(bound_arg, *args, **kwargs)
35
44
 
36
45
  return _wrapper
37
46
 
38
47
 
39
- def return_arg_type(at_position: int) -> Callable[..., Callable[..., T]]:
48
+ _return_arg_type_deco_cache: Final[
49
+ dict[int, Callable[[Callable[P, T]], Callable[P, Any]]]
50
+ ] = {}
51
+ # No need to hold so many unique instances in memory
52
+
53
+
54
+ def return_arg_type(at_position: int) -> Callable[[Callable[P, T]], Callable[P, Any]]:
40
55
  """
41
56
  Wrap the return value with the result of `type(args[at_position])`.
42
57
  """
58
+ if deco := _return_arg_type_deco_cache.get(at_position):
59
+ return deco
43
60
 
44
- def decorator(to_wrap: Callable[..., Any]) -> Callable[..., T]:
61
+ def decorator(to_wrap: Callable[P, Any]) -> Callable[P, Any]:
45
62
  @functools.wraps(to_wrap)
46
- def wrapper(*args: Any, **kwargs: Any) -> T: # type: ignore
63
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> Any:
47
64
  result = to_wrap(*args, **kwargs)
48
65
  ReturnType = type(args[at_position])
49
- return ReturnType(result) # type: ignore
66
+ return ReturnType(result) # type: ignore [call-arg]
50
67
 
51
68
  return wrapper
52
69
 
70
+ _return_arg_type_deco_cache[at_position] = decorator
71
+
53
72
  return decorator
54
73
 
55
74
 
75
+ ExcType = type[BaseException]
76
+
77
+ ReplaceExceptionsCache = dict[
78
+ tuple[tuple[ExcType, ExcType], ...],
79
+ Callable[[Callable[P, T]], Callable[P, T]],
80
+ ]
81
+
82
+ _replace_exceptions_deco_cache: Final[ReplaceExceptionsCache[..., Any]] = {}
83
+ # No need to hold so many unique instances in memory
84
+
85
+
56
86
  def replace_exceptions(
57
- old_to_new_exceptions: Dict[Type[BaseException], Type[BaseException]]
58
- ) -> Callable[[Callable[..., T]], Callable[..., T]]:
87
+ old_to_new_exceptions: dict[ExcType, ExcType],
88
+ ) -> Callable[[Callable[P, T]], Callable[P, T]]:
59
89
  """
60
90
  Replaces old exceptions with new exceptions to be raised in their place.
61
91
  """
62
- old_exceptions = tuple(old_to_new_exceptions.keys())
92
+ cache_key = tuple(old_to_new_exceptions.items())
93
+ if deco := _replace_exceptions_deco_cache.get(cache_key):
94
+ return deco
95
+
96
+ old_exceptions = tuple(old_to_new_exceptions)
63
97
 
64
- def decorator(to_wrap: Callable[..., T]) -> Callable[..., T]:
98
+ def decorator(to_wrap: Callable[P, T]) -> Callable[P, T]:
65
99
  @functools.wraps(to_wrap)
66
- def wrapped(*args: Any, **kwargs: Any) -> T:
100
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> T:
67
101
  try:
68
102
  return to_wrap(*args, **kwargs)
69
103
  except old_exceptions as err:
@@ -76,4 +110,6 @@ def replace_exceptions(
76
110
 
77
111
  return wrapped
78
112
 
113
+ _replace_exceptions_deco_cache[cache_key] = decorator
114
+
79
115
  return decorator
@@ -2,5 +2,5 @@ def int_to_big_endian(value: int) -> bytes:
2
2
  return value.to_bytes((value.bit_length() + 7) // 8 or 1, "big")
3
3
 
4
4
 
5
- def big_endian_to_int(value: bytes) -> int:
5
+ def big_endian_to_int(value: bytes | bytearray) -> int:
6
6
  return int.from_bytes(value, "big")
@@ -1,4 +1,11 @@
1
- class ValidationError(Exception):
1
+ """
2
+ faster-eth-utils exceptions always inherit from eth-utils exceptions, so porting to faster-eth-utils
3
+ does not require any change to your existing exception handlers. They will continue to work.
4
+ """
5
+
6
+ import eth_utils.exceptions
7
+
8
+ class ValidationError(eth_utils.exceptions.ValidationError): # type: ignore[misc]
2
9
  """
3
10
  Raised when something does not pass a validation check.
4
11
  """
@@ -1,23 +1,23 @@
1
1
  import collections
2
2
  import functools
3
3
  import itertools
4
+ from collections.abc import Callable, Iterable, Mapping
4
5
  from typing import ( # noqa: F401
5
- Any,
6
- Callable,
7
6
  Dict,
8
- Iterable,
9
7
  List,
10
- Mapping,
11
8
  Set,
12
9
  Tuple,
13
10
  TypeVar,
14
11
  Union,
15
12
  )
16
13
 
14
+ from typing_extensions import ParamSpec
15
+
17
16
  from .toolz import (
18
17
  compose as _compose,
19
18
  )
20
19
 
20
+ P = ParamSpec("P")
21
21
  T = TypeVar("T")
22
22
 
23
23
 
@@ -33,41 +33,53 @@ TFOut = TypeVar("TFOut")
33
33
  def combine(
34
34
  f: Callable[[TGOut], TFOut], g: Callable[[TGIn], TGOut]
35
35
  ) -> Callable[[TGIn], TFOut]:
36
- return lambda x: f(g(x))
36
+ def combined(x: TGIn) -> TFOut:
37
+ return f(g(x))
38
+ return combined
39
+
40
+
41
+ TCb = TypeVar("TCb")
37
42
 
38
43
 
39
44
  def apply_to_return_value(
40
- callback: Callable[..., T]
41
- ) -> Callable[..., Callable[..., T]]:
42
- def outer(fn: Callable[..., T]) -> Callable[..., T]:
43
- # We would need to type annotate *args and **kwargs but doing so segfaults
44
- # the PyPy builds. We ignore instead.
45
+ callback: Callable[[T], TCb]
46
+ ) -> Callable[[Callable[P, T]], Callable[P, TCb]]:
47
+ def outer(fn: Callable[P, T]) -> Callable[P, TCb]:
45
48
  @functools.wraps(fn)
46
- def inner(*args, **kwargs) -> T: # type: ignore
49
+ def inner(*args: P.args, **kwargs: P.kwargs) -> TCb:
47
50
  return callback(fn(*args, **kwargs))
48
-
49
51
  return inner
50
-
51
52
  return outer
52
53
 
53
54
 
54
55
  TVal = TypeVar("TVal")
55
56
  TKey = TypeVar("TKey")
56
- to_tuple = apply_to_return_value(
57
- tuple
58
- ) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., Tuple[TVal, ...]]] # noqa: E501
59
- to_list = apply_to_return_value(
60
- list
61
- ) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., List[TVal]]] # noqa: E501
62
- to_set = apply_to_return_value(
63
- set
64
- ) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., Set[TVal]]] # noqa: E501
65
- to_dict = apply_to_return_value(
66
- dict
67
- ) # type: Callable[[Callable[..., Iterable[Union[Mapping[TKey, TVal], Tuple[TKey, TVal]]]]], Callable[..., Dict[TKey, TVal]]] # noqa: E501
68
- to_ordered_dict = apply_to_return_value(
57
+
58
+ def to_tuple(fn: Callable[P, Iterable[TVal]]) -> Callable[P, tuple[TVal, ...]]:
59
+ def to_tuple_wrap(*args: P.args, **kwargs: P.kwargs) -> tuple[TVal, ...]:
60
+ return tuple(fn(*args, **kwargs))
61
+ return to_tuple_wrap
62
+
63
+ def to_list(fn: Callable[P, Iterable[TVal]]) -> Callable[P, list[TVal]]:
64
+ def to_list_wrap(*args: P.args, **kwargs: P.kwargs) -> list[TVal]:
65
+ return list(fn(*args, **kwargs))
66
+ return to_list_wrap
67
+
68
+ def to_set(fn: Callable[P, Iterable[TVal]]) -> Callable[P, set[TVal]]:
69
+ def to_set_wrap(*args: P.args, **kwargs: P.kwargs) -> set[TVal]:
70
+ return set(fn(*args, **kwargs))
71
+ return to_set_wrap
72
+
73
+ def to_dict(
74
+ fn: Callable[P, Mapping[TKey, TVal] | Iterable[tuple[TKey, TVal]]]
75
+ ) -> Callable[P, dict[TKey, TVal]]:
76
+ def to_dict_wrap(*args: P.args, **kwargs: P.kwargs) -> dict[TKey, TVal]:
77
+ return dict(fn(*args, **kwargs))
78
+ return to_dict_wrap
79
+
80
+ to_ordered_dict = apply_to_return_value( # type: ignore [assignment]
69
81
  collections.OrderedDict
70
- ) # type: Callable[[Callable[..., Iterable[Union[Mapping[TKey, TVal], Tuple[TKey, TVal]]]]], Callable[..., collections.OrderedDict[TKey, TVal]]] # noqa: E501
82
+ ) # type: Callable[[Callable[P, Union[Mapping[TKey, TVal], Iterable[Tuple[TKey, TVal]]]]], Callable[P, collections.OrderedDict[TKey, TVal]]] # noqa: E501
71
83
  sort_return = _compose(to_tuple, apply_to_return_value(sorted))
72
84
  flatten_return = _compose(
73
85
  to_tuple, apply_to_return_value(itertools.chain.from_iterable)
@@ -4,22 +4,18 @@ import binascii
4
4
  import re
5
5
  from typing import (
6
6
  Any,
7
- AnyStr,
8
7
  Final,
9
- Union,
8
+ TypeGuard,
10
9
  )
11
10
 
12
11
  from eth_typing import (
13
12
  HexStr,
14
13
  )
15
- from typing_extensions import (
16
- TypeGuard,
17
- )
18
14
 
19
15
  _HEX_REGEXP_MATCH: Final = re.compile("(0[xX])?[0-9a-fA-F]*").fullmatch
20
16
 
21
- hexlify: Final = binascii.hexlify
22
- unhexlify: Final = binascii.unhexlify
17
+ _hexlify: Final = binascii.hexlify
18
+ _unhexlify: Final = binascii.unhexlify
23
19
 
24
20
 
25
21
 
@@ -29,11 +25,11 @@ def decode_hex(value: str) -> bytes:
29
25
  non_prefixed = remove_0x_prefix(HexStr(value))
30
26
  # unhexlify will only accept bytes type someday
31
27
  ascii_hex = non_prefixed.encode("ascii")
32
- return unhexlify(ascii_hex)
28
+ return _unhexlify(ascii_hex)
33
29
 
34
30
 
35
- def encode_hex(value: AnyStr) -> HexStr:
36
- ascii_bytes: Union[bytes, bytearray]
31
+ def encode_hex(value: str | bytes | bytearray) -> HexStr:
32
+ ascii_bytes: bytes | bytearray
37
33
  if isinstance(value, (bytes, bytearray)):
38
34
  ascii_bytes = value
39
35
  elif isinstance(value, str):
@@ -41,7 +37,7 @@ def encode_hex(value: AnyStr) -> HexStr:
41
37
  else:
42
38
  raise TypeError("Value must be an instance of str or unicode")
43
39
 
44
- binary_hex = hexlify(ascii_bytes)
40
+ binary_hex = _hexlify(ascii_bytes)
45
41
  return add_0x_prefix(HexStr(binary_hex.decode("ascii")))
46
42
 
47
43
 
@@ -51,7 +47,7 @@ def is_0x_prefixed(value: str) -> bool:
51
47
  # raise TypeError(
52
48
  # f"is_0x_prefixed requires text typed arguments. Got: {repr(value)}"
53
49
  # )
54
- return value.startswith(("0x", "0X"))
50
+ return value.startswith("0x") or value.startswith("0X")
55
51
 
56
52
 
57
53
  def remove_0x_prefix(value: HexStr) -> HexStr:
@@ -1,10 +1,10 @@
1
+ from collections.abc import (
2
+ Iterable,
3
+ Iterator,
4
+ )
1
5
  from typing import (
2
6
  Any,
3
7
  Final,
4
- Iterable,
5
- Iterator,
6
- Tuple,
7
- Union,
8
8
  )
9
9
  from urllib import (
10
10
  parse,
@@ -23,17 +23,14 @@ from faster_eth_utils.currency import (
23
23
  from . import toolz
24
24
 
25
25
 
26
- sliding_window: Final = toolz.sliding_window
27
- take: Final = toolz.take
28
-
29
-
30
- def humanize_seconds(seconds: Union[float, int]) -> str:
31
- if int(seconds) == 0:
26
+ def humanize_seconds(seconds: float | int) -> str:
27
+ seconds_int = int(seconds)
28
+ if seconds_int == 0:
32
29
  return "0s"
33
30
 
34
- unit_values = _consume_leading_zero_units(_humanize_seconds(int(seconds)))
31
+ unit_values = _consume_leading_zero_units(_humanize_seconds(seconds_int))
35
32
 
36
- return "".join((f"{amount}{unit}" for amount, unit in take(3, unit_values)))
33
+ return "".join(f"{amount}{unit}" for amount, unit in toolz.take(3, unit_values))
37
34
 
38
35
 
39
36
  SECOND: Final = 1
@@ -57,8 +54,8 @@ UNITS: Final = (
57
54
 
58
55
 
59
56
  def _consume_leading_zero_units(
60
- units_iter: Iterator[Tuple[int, str]]
61
- ) -> Iterator[Tuple[int, str]]:
57
+ units_iter: Iterator[tuple[int, str]]
58
+ ) -> Iterator[tuple[int, str]]:
62
59
  for amount, unit in units_iter:
63
60
  if amount == 0:
64
61
  continue
@@ -69,7 +66,7 @@ def _consume_leading_zero_units(
69
66
  yield from units_iter
70
67
 
71
68
 
72
- def _humanize_seconds(seconds: int) -> Iterator[Tuple[int, str]]:
69
+ def _humanize_seconds(seconds: int) -> Iterator[tuple[int, str]]:
73
70
  remainder = seconds
74
71
 
75
72
  for duration, unit in UNITS:
@@ -144,9 +141,9 @@ def _is_CIDv0_ipfs_hash(ipfs_hash: str) -> bool:
144
141
  return False
145
142
 
146
143
 
147
- def _find_breakpoints(values: Tuple[int, ...]) -> Iterator[int]:
144
+ def _find_breakpoints(values: tuple[int, ...]) -> Iterator[int]:
148
145
  yield 0
149
- for index, (left, right) in enumerate(sliding_window(2, values), 1):
146
+ for index, (left, right) in enumerate(toolz.sliding_window(2, values), 1):
150
147
  if left + 1 == right:
151
148
  continue
152
149
  else:
@@ -154,7 +151,7 @@ def _find_breakpoints(values: Tuple[int, ...]) -> Iterator[int]:
154
151
  yield len(values)
155
152
 
156
153
 
157
- def _extract_integer_ranges(values: Tuple[int, ...]) -> Iterator[Tuple[int, int]]:
154
+ def _extract_integer_ranges(values: tuple[int, ...]) -> Iterator[tuple[int, int]]:
158
155
  """
159
156
  Return a tuple of consecutive ranges of integers.
160
157
 
@@ -164,12 +161,12 @@ def _extract_integer_ranges(values: Tuple[int, ...]) -> Iterator[Tuple[int, int]
164
161
  - fn(1, 2, 3, 7, 8, 9) -> ((1, 3), (7, 9))
165
162
  - fn(1, 7, 8, 9) -> ((1, 1), (7, 9))
166
163
  """
167
- for left, right in sliding_window(2, _find_breakpoints(values)):
164
+ for left, right in toolz.sliding_window(2, _find_breakpoints(values)):
168
165
  chunk = values[left:right]
169
166
  yield chunk[0], chunk[-1]
170
167
 
171
168
 
172
- def _humanize_range(bounds: Tuple[int, int]) -> str:
169
+ def _humanize_range(bounds: tuple[int, int]) -> str:
173
170
  left, right = bounds
174
171
  if left == right:
175
172
  return str(left)
@@ -201,5 +198,4 @@ def humanize_wei(number: int) -> str:
201
198
  else:
202
199
  unit = "wei"
203
200
  amount = from_wei(number, unit)
204
- x = f"{str(amount)} {unit}"
205
- return x
201
+ return f"{str(amount)} {unit}"
@@ -1,17 +1,19 @@
1
- import contextlib
1
+ import logging
2
+ from collections.abc import (
3
+ Iterator,
4
+ )
5
+ from contextlib import (
6
+ contextmanager,
7
+ )
2
8
  from functools import (
3
9
  cached_property,
4
10
  )
5
- import logging
6
11
  from typing import (
7
12
  Any,
8
- Dict,
9
- Iterator,
10
- Tuple,
11
- Type,
13
+ Final,
12
14
  TypeVar,
13
- Union,
14
15
  cast,
16
+ overload,
15
17
  )
16
18
 
17
19
  from .toolz import (
@@ -22,6 +24,11 @@ DEBUG2_LEVEL_NUM = 8
22
24
 
23
25
  TLogger = TypeVar("TLogger", bound=logging.Logger)
24
26
 
27
+ Logger: Final = logging.Logger
28
+ getLogger: Final = logging.getLogger
29
+ getLoggerClass: Final = logging.getLoggerClass
30
+ setLoggerClass: Final = logging.setLoggerClass
31
+
25
32
 
26
33
  class ExtendedDebugLogger(logging.Logger):
27
34
  """
@@ -41,7 +48,7 @@ class ExtendedDebugLogger(logging.Logger):
41
48
  # lambda to further speed up
42
49
  self.__dict__["debug2"] = lambda message, *args, **kwargs: None
43
50
 
44
- def __reduce__(self) -> Tuple[Any, ...]:
51
+ def __reduce__(self) -> tuple[Any, ...]:
45
52
  # This is needed because our parent's implementation could
46
53
  # cause us to become a regular Logger on unpickling.
47
54
  return get_extended_debug_logger, (self.name,)
@@ -53,37 +60,38 @@ def setup_DEBUG2_logging() -> None:
53
60
  """
54
61
  if not hasattr(logging, "DEBUG2"):
55
62
  logging.addLevelName(DEBUG2_LEVEL_NUM, "DEBUG2")
56
- logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore
63
+ logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore [attr-defined]
57
64
 
58
-
59
- @contextlib.contextmanager
60
- def _use_logger_class(logger_class: Type[logging.Logger]) -> Iterator[None]:
61
- original_logger_class = logging.getLoggerClass()
62
- logging.setLoggerClass(logger_class)
65
+ @contextmanager
66
+ def _use_logger_class(logger_class: type[logging.Logger]) -> Iterator[None]:
67
+ original_logger_class = getLoggerClass()
68
+ setLoggerClass(logger_class)
63
69
  try:
64
70
  yield
65
71
  finally:
66
- logging.setLoggerClass(original_logger_class)
72
+ setLoggerClass(original_logger_class)
67
73
 
68
74
 
69
- def get_logger(name: str, logger_class: Union[Type[TLogger], None] = None) -> TLogger:
75
+ @overload
76
+ def get_logger(name: str, logger_class: type[TLogger]) -> TLogger: ...
77
+ @overload
78
+ def get_logger(name: str, logger_class: None = None) -> logging.Logger: ...
79
+ def get_logger(name: str, logger_class: type[TLogger] | None = None) -> TLogger | logging.Logger:
70
80
  if logger_class is None:
71
- return cast(TLogger, logging.getLogger(name))
72
- else:
73
- with _use_logger_class(logger_class):
74
- # The logging module caches logger instances. The following code
75
- # ensures that if there is a cached instance that we don't
76
- # accidentally return the incorrect logger type because the logging
77
- # module does not *update* the cached instance in the event that
78
- # the global logging class changes.
79
- #
80
- # types ignored b/c mypy doesn't identify presence of
81
- # manager on logging.Logger
82
- manager = logging.Logger.manager
83
- if name in manager.loggerDict:
84
- if type(manager.loggerDict[name]) is not logger_class:
85
- del manager.loggerDict[name]
86
- return cast(TLogger, logging.getLogger(name))
81
+ return getLogger(name)
82
+
83
+ with _use_logger_class(logger_class):
84
+ # The logging module caches logger instances. The following code
85
+ # ensures that if there is a cached instance that we don't
86
+ # accidentally return the incorrect logger type because the logging
87
+ # module does not *update* the cached instance in the event that
88
+ # the global logging class changes.
89
+ manager = Logger.manager
90
+ logger_dict = manager.loggerDict
91
+ cached_logger = logger_dict.get(name)
92
+ if cached_logger is not None and type(cached_logger) is not logger_class:
93
+ del logger_dict[name]
94
+ return cast(TLogger, getLogger(name))
87
95
 
88
96
 
89
97
  def get_extended_debug_logger(name: str) -> ExtendedDebugLogger:
@@ -101,13 +109,13 @@ class HasLoggerMeta(type):
101
109
  to use when creating the associated logger for a given class.
102
110
  """
103
111
 
104
- logger_class = logging.Logger
112
+ logger_class = Logger
105
113
 
106
114
  def __new__(
107
- mcls: Type[THasLoggerMeta],
115
+ mcls: type[THasLoggerMeta],
108
116
  name: str,
109
- bases: Tuple[Type[Any]],
110
- namespace: Dict[str, Any],
117
+ bases: tuple[type[Any]],
118
+ namespace: dict[str, Any],
111
119
  ) -> THasLoggerMeta:
112
120
  if "logger" in namespace:
113
121
  # If a logger was explicitly declared we shouldn't do anything to
@@ -115,22 +123,21 @@ class HasLoggerMeta(type):
115
123
  return super().__new__(mcls, name, bases, namespace)
116
124
  if "__qualname__" not in namespace:
117
125
  raise AttributeError("Missing __qualname__")
118
-
119
- with _use_logger_class(mcls.logger_class):
120
- logger = logging.getLogger(namespace["__qualname__"])
126
+
127
+ logger = get_logger(namespace["__qualname__"], mcls.logger_class)
121
128
 
122
129
  return super().__new__(mcls, name, bases, assoc(namespace, "logger", logger))
123
130
 
124
131
  @classmethod
125
132
  def replace_logger_class(
126
- mcls: Type[THasLoggerMeta], value: Type[logging.Logger]
127
- ) -> Type[THasLoggerMeta]:
133
+ mcls: type[THasLoggerMeta], value: type[logging.Logger]
134
+ ) -> type[THasLoggerMeta]:
128
135
  return type(mcls.__name__, (mcls,), {"logger_class": value})
129
136
 
130
137
  @classmethod
131
138
  def meta_compat(
132
- mcls: Type[THasLoggerMeta], other: Type[type]
133
- ) -> Type[THasLoggerMeta]:
139
+ mcls: type[THasLoggerMeta], other: type[type]
140
+ ) -> type[THasLoggerMeta]:
134
141
  return type(mcls.__name__, (mcls, other), {})
135
142
 
136
143
 
@@ -141,5 +148,5 @@ class HasLogger(metaclass=HasLoggerMeta):
141
148
  HasExtendedDebugLoggerMeta = HasLoggerMeta.replace_logger_class(ExtendedDebugLogger)
142
149
 
143
150
 
144
- class HasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore
151
+ class HasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore[metaclass]
145
152
  logger: ExtendedDebugLogger
@@ -3,19 +3,26 @@ from dataclasses import (
3
3
  )
4
4
  import json
5
5
  import os
6
+ import sys
7
+ from pathlib import (
8
+ Path,
9
+ )
6
10
  from typing import (
7
- List,
11
+ Final,
8
12
  )
9
13
 
10
14
  from eth_typing import (
11
15
  ChainId,
12
16
  )
13
17
 
14
- from faster_eth_utils import (
18
+ from faster_eth_utils.exceptions import (
15
19
  ValidationError,
16
20
  )
17
21
 
18
22
 
23
+ FASTER_ETH_UTILS_FOLDER: Final = Path(sys.modules["faster_eth_utils"].__file__).parent # type: ignore [arg-type]
24
+
25
+
19
26
  @dataclass
20
27
  class Network:
21
28
  chain_id: int
@@ -24,11 +31,11 @@ class Network:
24
31
  symbol: ChainId
25
32
 
26
33
 
27
- def initialize_network_objects() -> List[Network]:
34
+ def initialize_network_objects() -> list[Network]:
28
35
  networks_obj = []
29
36
 
30
37
  networks_json_path = os.path.abspath(
31
- os.path.join(os.path.dirname(__file__), "__json")
38
+ os.path.join(str(FASTER_ETH_UTILS_FOLDER), "__json")
32
39
  )
33
40
  with open(
34
41
  os.path.join(networks_json_path, "eth_networks.json"),