faster-eth-utils 2.3.1__cp311-cp311-win32.whl → 5.3.23__cp311-cp311-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of faster-eth-utils might be problematic. Click here for more details.

Files changed (54) hide show
  1. faster_eth_utils/__init__.py +54 -17
  2. faster_eth_utils/__json/eth_networks.json +1 -0
  3. faster_eth_utils/__main__.py +3 -1
  4. faster_eth_utils/abi.cp311-win32.pyd +0 -0
  5. faster_eth_utils/abi.py +840 -37
  6. faster_eth_utils/address.cp311-win32.pyd +0 -0
  7. faster_eth_utils/address.py +54 -66
  8. faster_eth_utils/applicators.cp311-win32.pyd +0 -0
  9. faster_eth_utils/applicators.py +126 -71
  10. faster_eth_utils/conversions.cp311-win32.pyd +0 -0
  11. faster_eth_utils/conversions.py +57 -30
  12. faster_eth_utils/crypto.cp311-win32.pyd +0 -0
  13. faster_eth_utils/crypto.py +11 -6
  14. faster_eth_utils/currency.cp311-win32.pyd +0 -0
  15. faster_eth_utils/currency.py +74 -33
  16. faster_eth_utils/curried/__init__.py +110 -89
  17. faster_eth_utils/debug.cp311-win32.pyd +0 -0
  18. faster_eth_utils/debug.py +3 -3
  19. faster_eth_utils/decorators.cp311-win32.pyd +0 -0
  20. faster_eth_utils/decorators.py +73 -24
  21. faster_eth_utils/encoding.cp311-win32.pyd +0 -0
  22. faster_eth_utils/encoding.py +1 -1
  23. faster_eth_utils/exceptions.cp311-win32.pyd +0 -0
  24. faster_eth_utils/exceptions.py +8 -3
  25. faster_eth_utils/functional.cp311-win32.pyd +0 -0
  26. faster_eth_utils/functional.py +42 -28
  27. faster_eth_utils/hexadecimal.cp311-win32.pyd +0 -0
  28. faster_eth_utils/hexadecimal.py +34 -26
  29. faster_eth_utils/humanize.cp311-win32.pyd +0 -0
  30. faster_eth_utils/humanize.py +55 -27
  31. faster_eth_utils/logging.py +65 -64
  32. faster_eth_utils/module_loading.cp311-win32.pyd +0 -0
  33. faster_eth_utils/module_loading.py +8 -7
  34. faster_eth_utils/network.cp311-win32.pyd +0 -0
  35. faster_eth_utils/network.py +25 -14
  36. faster_eth_utils/numeric.cp311-win32.pyd +0 -0
  37. faster_eth_utils/numeric.py +11 -4
  38. faster_eth_utils/pydantic.py +99 -0
  39. faster_eth_utils/toolz.cp311-win32.pyd +0 -0
  40. faster_eth_utils/toolz.py +82 -152
  41. faster_eth_utils/types.cp311-win32.pyd +0 -0
  42. faster_eth_utils/types.py +34 -21
  43. faster_eth_utils/typing/misc.py +3 -1
  44. faster_eth_utils/units.cp311-win32.pyd +0 -0
  45. faster_eth_utils-5.3.23.dist-info/METADATA +192 -0
  46. faster_eth_utils-5.3.23.dist-info/RECORD +53 -0
  47. {faster_eth_utils-2.3.1.dist-info → faster_eth_utils-5.3.23.dist-info}/licenses/LICENSE +1 -1
  48. faster_eth_utils-5.3.23.dist-info/top_level.txt +3 -0
  49. faster_eth_utils__mypyc.cp311-win32.pyd +0 -0
  50. bce0bfc64ce5e845ec4a__mypyc.cp311-win32.pyd +0 -0
  51. faster_eth_utils-2.3.1.dist-info/METADATA +0 -160
  52. faster_eth_utils-2.3.1.dist-info/RECORD +0 -45
  53. faster_eth_utils-2.3.1.dist-info/top_level.txt +0 -3
  54. {faster_eth_utils-2.3.1.dist-info → faster_eth_utils-5.3.23.dist-info}/WHEEL +0 -0
@@ -1,6 +1,11 @@
1
- class ValidationError(Exception):
1
+ """
2
+ faster-eth-utils exceptions always inherit from eth-utils exceptions, so porting to faster-eth-utils
3
+ does not require any change to your existing exception handlers. They will continue to work.
4
+ """
5
+
6
+ import eth_utils.exceptions
7
+
8
+ class ValidationError(eth_utils.exceptions.ValidationError): # type: ignore[misc]
2
9
  """
3
10
  Raised when something does not pass a validation check.
4
11
  """
5
-
6
- pass
@@ -1,21 +1,23 @@
1
1
  import collections
2
2
  import functools
3
3
  import itertools
4
+ from collections.abc import Callable, Iterable, Mapping
4
5
  from typing import ( # noqa: F401
5
- Any,
6
- Callable,
7
6
  Dict,
8
- Iterable,
9
7
  List,
10
- Mapping,
11
8
  Set,
12
9
  Tuple,
13
10
  TypeVar,
14
11
  Union,
15
12
  )
16
13
 
17
- from .toolz import compose as _compose
14
+ from typing_extensions import ParamSpec
18
15
 
16
+ from .toolz import (
17
+ compose as _compose,
18
+ )
19
+
20
+ P = ParamSpec("P")
19
21
  T = TypeVar("T")
20
22
 
21
23
 
@@ -31,41 +33,53 @@ TFOut = TypeVar("TFOut")
31
33
  def combine(
32
34
  f: Callable[[TGOut], TFOut], g: Callable[[TGIn], TGOut]
33
35
  ) -> Callable[[TGIn], TFOut]:
34
- return lambda x: f(g(x))
36
+ def combined(x: TGIn) -> TFOut:
37
+ return f(g(x))
38
+ return combined
39
+
40
+
41
+ TCb = TypeVar("TCb")
35
42
 
36
43
 
37
44
  def apply_to_return_value(
38
- callback: Callable[..., T]
39
- ) -> Callable[..., Callable[..., T]]:
40
- def outer(fn: Callable[..., T]) -> Callable[..., T]:
41
- # We would need to type annotate *args and **kwargs but doing so segfaults
42
- # the PyPy builds. We ignore instead.
45
+ callback: Callable[[T], TCb]
46
+ ) -> Callable[[Callable[P, T]], Callable[P, TCb]]:
47
+ def outer(fn: Callable[P, T]) -> Callable[P, TCb]:
43
48
  @functools.wraps(fn)
44
- def inner(*args, **kwargs) -> T: # type: ignore
49
+ def inner(*args: P.args, **kwargs: P.kwargs) -> TCb:
45
50
  return callback(fn(*args, **kwargs))
46
-
47
51
  return inner
48
-
49
52
  return outer
50
53
 
51
54
 
52
55
  TVal = TypeVar("TVal")
53
56
  TKey = TypeVar("TKey")
54
- to_tuple = apply_to_return_value(
55
- tuple
56
- ) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., Tuple[TVal, ...]]] # noqa: E501
57
- to_list = apply_to_return_value(
58
- list
59
- ) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., List[TVal]]] # noqa: E501
60
- to_set = apply_to_return_value(
61
- set
62
- ) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., Set[TVal]]] # noqa: E501
63
- to_dict = apply_to_return_value(
64
- dict
65
- ) # type: Callable[[Callable[..., Iterable[Union[Mapping[TKey, TVal], Tuple[TKey, TVal]]]]], Callable[..., Dict[TKey, TVal]]] # noqa: E501
66
- to_ordered_dict = apply_to_return_value(
57
+
58
+ def to_tuple(fn: Callable[P, Iterable[TVal]]) -> Callable[P, tuple[TVal, ...]]:
59
+ def to_tuple_wrap(*args: P.args, **kwargs: P.kwargs) -> tuple[TVal, ...]:
60
+ return tuple(fn(*args, **kwargs))
61
+ return to_tuple_wrap
62
+
63
+ def to_list(fn: Callable[P, Iterable[TVal]]) -> Callable[P, list[TVal]]:
64
+ def to_list_wrap(*args: P.args, **kwargs: P.kwargs) -> list[TVal]:
65
+ return list(fn(*args, **kwargs))
66
+ return to_list_wrap
67
+
68
+ def to_set(fn: Callable[P, Iterable[TVal]]) -> Callable[P, set[TVal]]:
69
+ def to_set_wrap(*args: P.args, **kwargs: P.kwargs) -> set[TVal]:
70
+ return set(fn(*args, **kwargs))
71
+ return to_set_wrap
72
+
73
+ def to_dict(
74
+ fn: Callable[P, Mapping[TKey, TVal] | Iterable[tuple[TKey, TVal]]]
75
+ ) -> Callable[P, dict[TKey, TVal]]:
76
+ def to_dict_wrap(*args: P.args, **kwargs: P.kwargs) -> dict[TKey, TVal]:
77
+ return dict(fn(*args, **kwargs))
78
+ return to_dict_wrap
79
+
80
+ to_ordered_dict = apply_to_return_value( # type: ignore [assignment]
67
81
  collections.OrderedDict
68
- ) # type: Callable[[Callable[..., Iterable[Union[Mapping[TKey, TVal], Tuple[TKey, TVal]]]]], Callable[..., collections.OrderedDict[TKey, TVal]]] # noqa: E501
82
+ ) # type: Callable[[Callable[P, Union[Mapping[TKey, TVal], Iterable[Tuple[TKey, TVal]]]]], Callable[P, collections.OrderedDict[TKey, TVal]]] # noqa: E501
69
83
  sort_return = _compose(to_tuple, apply_to_return_value(sorted))
70
84
  flatten_return = _compose(
71
85
  to_tuple, apply_to_return_value(itertools.chain.from_iterable)
@@ -2,42 +2,52 @@
2
2
 
3
3
  import binascii
4
4
  import re
5
- from typing import Any, AnyStr, Final
5
+ from typing import (
6
+ Any,
7
+ Final,
8
+ TypeGuard,
9
+ )
6
10
 
7
- from eth_typing import HexStr
11
+ from eth_typing import (
12
+ HexStr,
13
+ )
8
14
 
9
- from .types import is_string, is_text
15
+ _HEX_REGEXP_MATCH: Final = re.compile("(0[xX])?[0-9a-fA-F]*").fullmatch
16
+
17
+ _hexlify: Final = binascii.hexlify
18
+ _unhexlify: Final = binascii.unhexlify
10
19
 
11
- _HEX_REGEXP: Final = re.compile("(0[xX])?[0-9a-fA-F]*")
12
20
 
13
21
 
14
22
  def decode_hex(value: str) -> bytes:
15
- if not is_text(value):
23
+ if not isinstance(value, str):
16
24
  raise TypeError("Value must be an instance of str")
17
25
  non_prefixed = remove_0x_prefix(HexStr(value))
18
26
  # unhexlify will only accept bytes type someday
19
27
  ascii_hex = non_prefixed.encode("ascii")
20
- return binascii.unhexlify(ascii_hex)
28
+ return _unhexlify(ascii_hex)
21
29
 
22
30
 
23
- def encode_hex(value: AnyStr) -> HexStr:
24
- if not is_string(value):
25
- raise TypeError("Value must be an instance of str or unicode")
26
- elif isinstance(value, (bytes, bytearray)):
31
+ def encode_hex(value: str | bytes | bytearray) -> HexStr:
32
+ ascii_bytes: bytes | bytearray
33
+ if isinstance(value, (bytes, bytearray)):
27
34
  ascii_bytes = value
28
- else:
35
+ elif isinstance(value, str):
29
36
  ascii_bytes = value.encode("ascii")
37
+ else:
38
+ raise TypeError("Value must be an instance of str or unicode")
30
39
 
31
- binary_hex = binascii.hexlify(ascii_bytes)
40
+ binary_hex = _hexlify(ascii_bytes)
32
41
  return add_0x_prefix(HexStr(binary_hex.decode("ascii")))
33
42
 
34
43
 
35
44
  def is_0x_prefixed(value: str) -> bool:
36
- if not is_text(value):
37
- raise TypeError(
38
- "is_0x_prefixed requires text typed arguments. Got: {0}".format(repr(value))
39
- )
40
- return value.startswith(("0x", "0X"))
45
+ # this check is not needed in the compiled version
46
+ # if not isinstance(value, str):
47
+ # raise TypeError(
48
+ # f"is_0x_prefixed requires text typed arguments. Got: {repr(value)}"
49
+ # )
50
+ return value.startswith("0x") or value.startswith("0X")
41
51
 
42
52
 
43
53
  def remove_0x_prefix(value: HexStr) -> HexStr:
@@ -52,17 +62,15 @@ def add_0x_prefix(value: HexStr) -> HexStr:
52
62
  return HexStr("0x" + value)
53
63
 
54
64
 
55
- def is_hexstr(value: Any) -> bool:
56
- if not is_text(value) or not value:
65
+ def is_hexstr(value: Any) -> TypeGuard[HexStr]:
66
+ if not isinstance(value, str) or not value:
57
67
  return False
58
- return _HEX_REGEXP.fullmatch(value) is not None
68
+ return _HEX_REGEXP_MATCH(value) is not None
59
69
 
60
70
 
61
- def is_hex(value: Any) -> bool:
62
- if not is_text(value):
63
- raise TypeError(
64
- "is_hex requires text typed arguments. Got: {0}".format(repr(value))
65
- )
71
+ def is_hex(value: Any) -> TypeGuard[HexStr]:
72
+ if not isinstance(value, str):
73
+ raise TypeError(f"is_hex requires text typed arguments. Got: {repr(value)}")
66
74
  if not value:
67
75
  return False
68
- return _HEX_REGEXP.fullmatch(value) is not None
76
+ return _HEX_REGEXP_MATCH(value) is not None
Binary file
@@ -1,22 +1,36 @@
1
- from typing import Any, Final, Iterable, Iterator, Tuple, Union
2
- from urllib import parse
1
+ from collections.abc import (
2
+ Iterable,
3
+ Iterator,
4
+ )
5
+ from typing import (
6
+ Any,
7
+ Final,
8
+ )
9
+ from urllib import (
10
+ parse,
11
+ )
3
12
 
4
- from eth_typing import URI, Hash32
13
+ from eth_typing import (
14
+ URI,
15
+ Hash32,
16
+ )
5
17
 
6
- from faster_eth_utils.currency import denoms, from_wei
18
+ from faster_eth_utils.currency import (
19
+ denoms,
20
+ from_wei,
21
+ )
7
22
 
8
- from .toolz import sliding_window, take
23
+ from . import toolz
9
24
 
10
25
 
11
- def humanize_seconds(seconds: Union[float, int]) -> str:
12
- if int(seconds) == 0:
26
+ def humanize_seconds(seconds: float | int) -> str:
27
+ seconds_int = int(seconds)
28
+ if seconds_int == 0:
13
29
  return "0s"
14
30
 
15
- unit_values = _consume_leading_zero_units(_humanize_seconds(int(seconds)))
31
+ unit_values = _consume_leading_zero_units(_humanize_seconds(seconds_int))
16
32
 
17
- return "".join(
18
- ("{0}{1}".format(amount, unit) for amount, unit in take(3, unit_values))
19
- )
33
+ return "".join(f"{amount}{unit}" for amount, unit in toolz.take(3, unit_values))
20
34
 
21
35
 
22
36
  SECOND: Final = 1
@@ -40,8 +54,8 @@ UNITS: Final = (
40
54
 
41
55
 
42
56
  def _consume_leading_zero_units(
43
- units_iter: Iterator[Tuple[int, str]]
44
- ) -> Iterator[Tuple[int, str]]:
57
+ units_iter: Iterator[tuple[int, str]]
58
+ ) -> Iterator[tuple[int, str]]:
45
59
  for amount, unit in units_iter:
46
60
  if amount == 0:
47
61
  continue
@@ -52,7 +66,7 @@ def _consume_leading_zero_units(
52
66
  yield from units_iter
53
67
 
54
68
 
55
- def _humanize_seconds(seconds: int) -> Iterator[Tuple[int, str]]:
69
+ def _humanize_seconds(seconds: int) -> Iterator[tuple[int, str]]:
56
70
  remainder = seconds
57
71
 
58
72
  for duration, unit in UNITS:
@@ -74,7 +88,22 @@ def humanize_bytes(value: bytes) -> str:
74
88
  value_as_hex = value.hex()
75
89
  head = value_as_hex[:DISPLAY_HASH_CHARS]
76
90
  tail = value_as_hex[-1 * DISPLAY_HASH_CHARS :]
77
- return "{0}..{1}".format(head, tail)
91
+ return f"{head}..{tail}"
92
+
93
+
94
+ def humanize_hexstr(value: str) -> str:
95
+ tail = value[-1 * DISPLAY_HASH_CHARS :]
96
+
97
+ if value[:2] == "0x":
98
+ if len(value[2:]) <= DISPLAY_HASH_CHARS * 2:
99
+ return value
100
+ head = value[2 : DISPLAY_HASH_CHARS + 2]
101
+ return f"0x{head}..{tail}"
102
+ else:
103
+ if len(value) <= DISPLAY_HASH_CHARS * 2:
104
+ return value
105
+ head = value[:DISPLAY_HASH_CHARS]
106
+ return f"{head}..{tail}"
78
107
 
79
108
 
80
109
  def humanize_hash(value: Hash32) -> str:
@@ -84,15 +113,15 @@ def humanize_hash(value: Hash32) -> str:
84
113
  def humanize_ipfs_uri(uri: URI) -> str:
85
114
  if not is_ipfs_uri(uri):
86
115
  raise TypeError(
87
- "%s does not look like a valid IPFS uri. Currently, "
88
- "only CIDv0 hash schemes are supported." % uri
116
+ f"{uri} does not look like a valid IPFS uri. Currently, "
117
+ "only CIDv0 hash schemes are supported."
89
118
  )
90
119
 
91
120
  parsed = parse.urlparse(uri)
92
121
  ipfs_hash = parsed.netloc
93
122
  head = ipfs_hash[:DISPLAY_HASH_CHARS]
94
123
  tail = ipfs_hash[-1 * DISPLAY_HASH_CHARS :]
95
- return "ipfs://{0}..{1}".format(head, tail)
124
+ return f"ipfs://{head}..{tail}"
96
125
 
97
126
 
98
127
  def is_ipfs_uri(value: Any) -> bool:
@@ -112,9 +141,9 @@ def _is_CIDv0_ipfs_hash(ipfs_hash: str) -> bool:
112
141
  return False
113
142
 
114
143
 
115
- def _find_breakpoints(*values: int) -> Iterator[int]:
144
+ def _find_breakpoints(values: tuple[int, ...]) -> Iterator[int]:
116
145
  yield 0
117
- for index, (left, right) in enumerate(sliding_window(2, values), 1):
146
+ for index, (left, right) in enumerate(toolz.sliding_window(2, values), 1):
118
147
  if left + 1 == right:
119
148
  continue
120
149
  else:
@@ -122,7 +151,7 @@ def _find_breakpoints(*values: int) -> Iterator[int]:
122
151
  yield len(values)
123
152
 
124
153
 
125
- def _extract_integer_ranges(*values: int) -> Iterator[Tuple[int, int]]:
154
+ def _extract_integer_ranges(values: tuple[int, ...]) -> Iterator[tuple[int, int]]:
126
155
  """
127
156
  Return a tuple of consecutive ranges of integers.
128
157
 
@@ -132,17 +161,17 @@ def _extract_integer_ranges(*values: int) -> Iterator[Tuple[int, int]]:
132
161
  - fn(1, 2, 3, 7, 8, 9) -> ((1, 3), (7, 9))
133
162
  - fn(1, 7, 8, 9) -> ((1, 1), (7, 9))
134
163
  """
135
- for left, right in sliding_window(2, _find_breakpoints(*values)):
164
+ for left, right in toolz.sliding_window(2, _find_breakpoints(values)):
136
165
  chunk = values[left:right]
137
166
  yield chunk[0], chunk[-1]
138
167
 
139
168
 
140
- def _humanize_range(bounds: Tuple[int, int]) -> str:
169
+ def _humanize_range(bounds: tuple[int, int]) -> str:
141
170
  left, right = bounds
142
171
  if left == right:
143
172
  return str(left)
144
173
  else:
145
- return "{left}-{right}".format(left=left, right=right)
174
+ return f"{left}-{right}"
146
175
 
147
176
 
148
177
  def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
@@ -158,7 +187,7 @@ def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
158
187
  if not values:
159
188
  return "(empty)"
160
189
  else:
161
- return "|".join(map(_humanize_range, _extract_integer_ranges(*values)))
190
+ return "|".join(_humanize_range(range) for range in _extract_integer_ranges(values))
162
191
 
163
192
 
164
193
  def humanize_wei(number: int) -> str:
@@ -169,5 +198,4 @@ def humanize_wei(number: int) -> str:
169
198
  else:
170
199
  unit = "wei"
171
200
  amount = from_wei(number, unit)
172
- x = f"{str(amount)} {unit}"
173
- return x
201
+ return f"{str(amount)} {unit}"
@@ -1,19 +1,34 @@
1
- import contextlib
2
1
  import logging
3
- import sys
4
- from typing import Any, Dict, Iterator, Tuple, Type, TypeVar, cast
5
-
6
- from .toolz import assoc
7
-
8
- if sys.version_info < (3, 8):
9
- from cached_property import cached_property
10
- else:
11
- from functools import cached_property
2
+ from collections.abc import (
3
+ Iterator,
4
+ )
5
+ from contextlib import (
6
+ contextmanager,
7
+ )
8
+ from functools import (
9
+ cached_property,
10
+ )
11
+ from typing import (
12
+ Any,
13
+ Final,
14
+ TypeVar,
15
+ cast,
16
+ overload,
17
+ )
18
+
19
+ from .toolz import (
20
+ assoc,
21
+ )
12
22
 
13
23
  DEBUG2_LEVEL_NUM = 8
14
24
 
15
25
  TLogger = TypeVar("TLogger", bound=logging.Logger)
16
26
 
27
+ Logger: Final = logging.Logger
28
+ getLogger: Final = logging.getLogger
29
+ getLoggerClass: Final = logging.getLoggerClass
30
+ setLoggerClass: Final = logging.setLoggerClass
31
+
17
32
 
18
33
  class ExtendedDebugLogger(logging.Logger):
19
34
  """
@@ -33,7 +48,7 @@ class ExtendedDebugLogger(logging.Logger):
33
48
  # lambda to further speed up
34
49
  self.__dict__["debug2"] = lambda message, *args, **kwargs: None
35
50
 
36
- def __reduce__(self) -> Tuple[Any, ...]:
51
+ def __reduce__(self) -> tuple[Any, ...]:
37
52
  # This is needed because our parent's implementation could
38
53
  # cause us to become a regular Logger on unpickling.
39
54
  return get_extended_debug_logger, (self.name,)
@@ -45,37 +60,38 @@ def setup_DEBUG2_logging() -> None:
45
60
  """
46
61
  if not hasattr(logging, "DEBUG2"):
47
62
  logging.addLevelName(DEBUG2_LEVEL_NUM, "DEBUG2")
48
- setattr(logging, "DEBUG2", DEBUG2_LEVEL_NUM) # typing: ignore
63
+ logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore [attr-defined]
49
64
 
50
-
51
- @contextlib.contextmanager
52
- def _use_logger_class(logger_class: Type[logging.Logger]) -> Iterator[None]:
53
- original_logger_class = logging.getLoggerClass()
54
- logging.setLoggerClass(logger_class)
65
+ @contextmanager
66
+ def _use_logger_class(logger_class: type[logging.Logger]) -> Iterator[None]:
67
+ original_logger_class = getLoggerClass()
68
+ setLoggerClass(logger_class)
55
69
  try:
56
70
  yield
57
71
  finally:
58
- logging.setLoggerClass(original_logger_class)
72
+ setLoggerClass(original_logger_class)
59
73
 
60
74
 
61
- def get_logger(name: str, logger_class: Type[TLogger] = None) -> TLogger:
75
+ @overload
76
+ def get_logger(name: str, logger_class: type[TLogger]) -> TLogger: ...
77
+ @overload
78
+ def get_logger(name: str, logger_class: None = None) -> logging.Logger: ...
79
+ def get_logger(name: str, logger_class: type[TLogger] | None = None) -> TLogger | logging.Logger:
62
80
  if logger_class is None:
63
- return cast(TLogger, logging.getLogger(name))
64
- else:
65
- with _use_logger_class(logger_class):
66
- # The logging module caches logger instances. The following code
67
- # ensures that if there is a cached instance that we don't
68
- # accidentally return the incorrect logger type because the logging
69
- # module does not *update* the cached instance in the event that
70
- # the global logging class changes.
71
- #
72
- # types ignored b/c mypy doesn't identify presence of
73
- # manager on logging.Logger
74
- manager = logging.Logger.manager
75
- if name in manager.loggerDict:
76
- if type(manager.loggerDict[name]) is not logger_class:
77
- del manager.loggerDict[name]
78
- return cast(TLogger, logging.getLogger(name))
81
+ return getLogger(name)
82
+
83
+ with _use_logger_class(logger_class):
84
+ # The logging module caches logger instances. The following code
85
+ # ensures that if there is a cached instance that we don't
86
+ # accidentally return the incorrect logger type because the logging
87
+ # module does not *update* the cached instance in the event that
88
+ # the global logging class changes.
89
+ manager = Logger.manager
90
+ logger_dict = manager.loggerDict
91
+ cached_logger = logger_dict.get(name)
92
+ if cached_logger is not None and type(cached_logger) is not logger_class:
93
+ del logger_dict[name]
94
+ return cast(TLogger, getLogger(name))
79
95
 
80
96
 
81
97
  def get_extended_debug_logger(name: str) -> ExtendedDebugLogger:
@@ -93,13 +109,13 @@ class HasLoggerMeta(type):
93
109
  to use when creating the associated logger for a given class.
94
110
  """
95
111
 
96
- logger_class = logging.Logger
112
+ logger_class = Logger
97
113
 
98
114
  def __new__(
99
- mcls: Type[THasLoggerMeta],
115
+ mcls: type[THasLoggerMeta],
100
116
  name: str,
101
- bases: Tuple[Type[Any]],
102
- namespace: Dict[str, Any],
117
+ bases: tuple[type[Any]],
118
+ namespace: dict[str, Any],
103
119
  ) -> THasLoggerMeta:
104
120
  if "logger" in namespace:
105
121
  # If a logger was explicitly declared we shouldn't do anything to
@@ -107,45 +123,30 @@ class HasLoggerMeta(type):
107
123
  return super().__new__(mcls, name, bases, namespace)
108
124
  if "__qualname__" not in namespace:
109
125
  raise AttributeError("Missing __qualname__")
110
-
111
- with _use_logger_class(mcls.logger_class):
112
- logger = logging.getLogger(namespace["__qualname__"])
126
+
127
+ logger = get_logger(namespace["__qualname__"], mcls.logger_class)
113
128
 
114
129
  return super().__new__(mcls, name, bases, assoc(namespace, "logger", logger))
115
130
 
116
131
  @classmethod
117
132
  def replace_logger_class(
118
- mcls: Type[THasLoggerMeta], value: Type[logging.Logger]
119
- ) -> Type[THasLoggerMeta]:
133
+ mcls: type[THasLoggerMeta], value: type[logging.Logger]
134
+ ) -> type[THasLoggerMeta]:
120
135
  return type(mcls.__name__, (mcls,), {"logger_class": value})
121
136
 
122
137
  @classmethod
123
138
  def meta_compat(
124
- mcls: Type[THasLoggerMeta], other: Type[type]
125
- ) -> Type[THasLoggerMeta]:
139
+ mcls: type[THasLoggerMeta], other: type[type]
140
+ ) -> type[THasLoggerMeta]:
126
141
  return type(mcls.__name__, (mcls, other), {})
127
142
 
128
143
 
129
- class _BaseHasLogger(metaclass=HasLoggerMeta):
130
- # This class exists to a allow us to define the type of the logger. Once
131
- # python3.5 is deprecated this can be removed in favor of a simple type
132
- # annotation on the main class.
133
- logger = logging.Logger("") # type: logging.Logger
134
-
135
-
136
- class HasLogger(_BaseHasLogger):
137
- pass
144
+ class HasLogger(metaclass=HasLoggerMeta):
145
+ logger: logging.Logger
138
146
 
139
147
 
140
148
  HasExtendedDebugLoggerMeta = HasLoggerMeta.replace_logger_class(ExtendedDebugLogger)
141
149
 
142
150
 
143
- class _BaseHasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore
144
- # This class exists to a allow us to define the type of the logger. Once
145
- # python3.5 is deprecated this can be removed in favor of a simple type
146
- # annotation on the main class.
147
- logger = ExtendedDebugLogger("") # type: ExtendedDebugLogger
148
-
149
-
150
- class HasExtendedDebugLogger(_BaseHasExtendedDebugLogger):
151
- pass
151
+ class HasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore[metaclass]
152
+ logger: ExtendedDebugLogger
@@ -1,5 +1,9 @@
1
- from importlib import import_module
2
- from typing import Any
1
+ from importlib import (
2
+ import_module,
3
+ )
4
+ from typing import (
5
+ Any,
6
+ )
3
7
 
4
8
 
5
9
  def import_string(dotted_path: str) -> Any:
@@ -15,7 +19,7 @@ def import_string(dotted_path: str) -> Any:
15
19
  try:
16
20
  module_path, class_name = dotted_path.rsplit(".", 1)
17
21
  except ValueError:
18
- msg = "%s doesn't look like a module path" % dotted_path
22
+ msg = f"{dotted_path} doesn't look like a module path"
19
23
  raise ImportError(msg)
20
24
 
21
25
  module = import_module(module_path)
@@ -23,8 +27,5 @@ def import_string(dotted_path: str) -> Any:
23
27
  try:
24
28
  return getattr(module, class_name)
25
29
  except AttributeError:
26
- msg = 'Module "%s" does not define a "%s" attribute/class' % (
27
- module_path,
28
- class_name,
29
- )
30
+ msg = f'Module "{module_path}" does not define a "{class_name}" attribute/class'
30
31
  raise ImportError(msg)
Binary file