faster-eth-utils 2.3.1__cp311-cp311-win32.whl → 5.3.23__cp311-cp311-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of faster-eth-utils might be problematic. Click here for more details.
- faster_eth_utils/__init__.py +54 -17
- faster_eth_utils/__json/eth_networks.json +1 -0
- faster_eth_utils/__main__.py +3 -1
- faster_eth_utils/abi.cp311-win32.pyd +0 -0
- faster_eth_utils/abi.py +840 -37
- faster_eth_utils/address.cp311-win32.pyd +0 -0
- faster_eth_utils/address.py +54 -66
- faster_eth_utils/applicators.cp311-win32.pyd +0 -0
- faster_eth_utils/applicators.py +126 -71
- faster_eth_utils/conversions.cp311-win32.pyd +0 -0
- faster_eth_utils/conversions.py +57 -30
- faster_eth_utils/crypto.cp311-win32.pyd +0 -0
- faster_eth_utils/crypto.py +11 -6
- faster_eth_utils/currency.cp311-win32.pyd +0 -0
- faster_eth_utils/currency.py +74 -33
- faster_eth_utils/curried/__init__.py +110 -89
- faster_eth_utils/debug.cp311-win32.pyd +0 -0
- faster_eth_utils/debug.py +3 -3
- faster_eth_utils/decorators.cp311-win32.pyd +0 -0
- faster_eth_utils/decorators.py +73 -24
- faster_eth_utils/encoding.cp311-win32.pyd +0 -0
- faster_eth_utils/encoding.py +1 -1
- faster_eth_utils/exceptions.cp311-win32.pyd +0 -0
- faster_eth_utils/exceptions.py +8 -3
- faster_eth_utils/functional.cp311-win32.pyd +0 -0
- faster_eth_utils/functional.py +42 -28
- faster_eth_utils/hexadecimal.cp311-win32.pyd +0 -0
- faster_eth_utils/hexadecimal.py +34 -26
- faster_eth_utils/humanize.cp311-win32.pyd +0 -0
- faster_eth_utils/humanize.py +55 -27
- faster_eth_utils/logging.py +65 -64
- faster_eth_utils/module_loading.cp311-win32.pyd +0 -0
- faster_eth_utils/module_loading.py +8 -7
- faster_eth_utils/network.cp311-win32.pyd +0 -0
- faster_eth_utils/network.py +25 -14
- faster_eth_utils/numeric.cp311-win32.pyd +0 -0
- faster_eth_utils/numeric.py +11 -4
- faster_eth_utils/pydantic.py +99 -0
- faster_eth_utils/toolz.cp311-win32.pyd +0 -0
- faster_eth_utils/toolz.py +82 -152
- faster_eth_utils/types.cp311-win32.pyd +0 -0
- faster_eth_utils/types.py +34 -21
- faster_eth_utils/typing/misc.py +3 -1
- faster_eth_utils/units.cp311-win32.pyd +0 -0
- faster_eth_utils-5.3.23.dist-info/METADATA +192 -0
- faster_eth_utils-5.3.23.dist-info/RECORD +53 -0
- {faster_eth_utils-2.3.1.dist-info → faster_eth_utils-5.3.23.dist-info}/licenses/LICENSE +1 -1
- faster_eth_utils-5.3.23.dist-info/top_level.txt +3 -0
- faster_eth_utils__mypyc.cp311-win32.pyd +0 -0
- bce0bfc64ce5e845ec4a__mypyc.cp311-win32.pyd +0 -0
- faster_eth_utils-2.3.1.dist-info/METADATA +0 -160
- faster_eth_utils-2.3.1.dist-info/RECORD +0 -45
- faster_eth_utils-2.3.1.dist-info/top_level.txt +0 -3
- {faster_eth_utils-2.3.1.dist-info → faster_eth_utils-5.3.23.dist-info}/WHEEL +0 -0
faster_eth_utils/exceptions.py
CHANGED
|
@@ -1,6 +1,11 @@
|
|
|
1
|
-
|
|
1
|
+
"""
|
|
2
|
+
faster-eth-utils exceptions always inherit from eth-utils exceptions, so porting to faster-eth-utils
|
|
3
|
+
does not require any change to your existing exception handlers. They will continue to work.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import eth_utils.exceptions
|
|
7
|
+
|
|
8
|
+
class ValidationError(eth_utils.exceptions.ValidationError): # type: ignore[misc]
|
|
2
9
|
"""
|
|
3
10
|
Raised when something does not pass a validation check.
|
|
4
11
|
"""
|
|
5
|
-
|
|
6
|
-
pass
|
|
Binary file
|
faster_eth_utils/functional.py
CHANGED
|
@@ -1,21 +1,23 @@
|
|
|
1
1
|
import collections
|
|
2
2
|
import functools
|
|
3
3
|
import itertools
|
|
4
|
+
from collections.abc import Callable, Iterable, Mapping
|
|
4
5
|
from typing import ( # noqa: F401
|
|
5
|
-
Any,
|
|
6
|
-
Callable,
|
|
7
6
|
Dict,
|
|
8
|
-
Iterable,
|
|
9
7
|
List,
|
|
10
|
-
Mapping,
|
|
11
8
|
Set,
|
|
12
9
|
Tuple,
|
|
13
10
|
TypeVar,
|
|
14
11
|
Union,
|
|
15
12
|
)
|
|
16
13
|
|
|
17
|
-
from
|
|
14
|
+
from typing_extensions import ParamSpec
|
|
18
15
|
|
|
16
|
+
from .toolz import (
|
|
17
|
+
compose as _compose,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
P = ParamSpec("P")
|
|
19
21
|
T = TypeVar("T")
|
|
20
22
|
|
|
21
23
|
|
|
@@ -31,41 +33,53 @@ TFOut = TypeVar("TFOut")
|
|
|
31
33
|
def combine(
|
|
32
34
|
f: Callable[[TGOut], TFOut], g: Callable[[TGIn], TGOut]
|
|
33
35
|
) -> Callable[[TGIn], TFOut]:
|
|
34
|
-
|
|
36
|
+
def combined(x: TGIn) -> TFOut:
|
|
37
|
+
return f(g(x))
|
|
38
|
+
return combined
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
TCb = TypeVar("TCb")
|
|
35
42
|
|
|
36
43
|
|
|
37
44
|
def apply_to_return_value(
|
|
38
|
-
callback: Callable[
|
|
39
|
-
) -> Callable[
|
|
40
|
-
def outer(fn: Callable[
|
|
41
|
-
# We would need to type annotate *args and **kwargs but doing so segfaults
|
|
42
|
-
# the PyPy builds. We ignore instead.
|
|
45
|
+
callback: Callable[[T], TCb]
|
|
46
|
+
) -> Callable[[Callable[P, T]], Callable[P, TCb]]:
|
|
47
|
+
def outer(fn: Callable[P, T]) -> Callable[P, TCb]:
|
|
43
48
|
@functools.wraps(fn)
|
|
44
|
-
def inner(*args, **kwargs) ->
|
|
49
|
+
def inner(*args: P.args, **kwargs: P.kwargs) -> TCb:
|
|
45
50
|
return callback(fn(*args, **kwargs))
|
|
46
|
-
|
|
47
51
|
return inner
|
|
48
|
-
|
|
49
52
|
return outer
|
|
50
53
|
|
|
51
54
|
|
|
52
55
|
TVal = TypeVar("TVal")
|
|
53
56
|
TKey = TypeVar("TKey")
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
57
|
+
|
|
58
|
+
def to_tuple(fn: Callable[P, Iterable[TVal]]) -> Callable[P, tuple[TVal, ...]]:
|
|
59
|
+
def to_tuple_wrap(*args: P.args, **kwargs: P.kwargs) -> tuple[TVal, ...]:
|
|
60
|
+
return tuple(fn(*args, **kwargs))
|
|
61
|
+
return to_tuple_wrap
|
|
62
|
+
|
|
63
|
+
def to_list(fn: Callable[P, Iterable[TVal]]) -> Callable[P, list[TVal]]:
|
|
64
|
+
def to_list_wrap(*args: P.args, **kwargs: P.kwargs) -> list[TVal]:
|
|
65
|
+
return list(fn(*args, **kwargs))
|
|
66
|
+
return to_list_wrap
|
|
67
|
+
|
|
68
|
+
def to_set(fn: Callable[P, Iterable[TVal]]) -> Callable[P, set[TVal]]:
|
|
69
|
+
def to_set_wrap(*args: P.args, **kwargs: P.kwargs) -> set[TVal]:
|
|
70
|
+
return set(fn(*args, **kwargs))
|
|
71
|
+
return to_set_wrap
|
|
72
|
+
|
|
73
|
+
def to_dict(
|
|
74
|
+
fn: Callable[P, Mapping[TKey, TVal] | Iterable[tuple[TKey, TVal]]]
|
|
75
|
+
) -> Callable[P, dict[TKey, TVal]]:
|
|
76
|
+
def to_dict_wrap(*args: P.args, **kwargs: P.kwargs) -> dict[TKey, TVal]:
|
|
77
|
+
return dict(fn(*args, **kwargs))
|
|
78
|
+
return to_dict_wrap
|
|
79
|
+
|
|
80
|
+
to_ordered_dict = apply_to_return_value( # type: ignore [assignment]
|
|
67
81
|
collections.OrderedDict
|
|
68
|
-
) # type: Callable[[Callable[
|
|
82
|
+
) # type: Callable[[Callable[P, Union[Mapping[TKey, TVal], Iterable[Tuple[TKey, TVal]]]]], Callable[P, collections.OrderedDict[TKey, TVal]]] # noqa: E501
|
|
69
83
|
sort_return = _compose(to_tuple, apply_to_return_value(sorted))
|
|
70
84
|
flatten_return = _compose(
|
|
71
85
|
to_tuple, apply_to_return_value(itertools.chain.from_iterable)
|
|
Binary file
|
faster_eth_utils/hexadecimal.py
CHANGED
|
@@ -2,42 +2,52 @@
|
|
|
2
2
|
|
|
3
3
|
import binascii
|
|
4
4
|
import re
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import (
|
|
6
|
+
Any,
|
|
7
|
+
Final,
|
|
8
|
+
TypeGuard,
|
|
9
|
+
)
|
|
6
10
|
|
|
7
|
-
from eth_typing import
|
|
11
|
+
from eth_typing import (
|
|
12
|
+
HexStr,
|
|
13
|
+
)
|
|
8
14
|
|
|
9
|
-
|
|
15
|
+
_HEX_REGEXP_MATCH: Final = re.compile("(0[xX])?[0-9a-fA-F]*").fullmatch
|
|
16
|
+
|
|
17
|
+
_hexlify: Final = binascii.hexlify
|
|
18
|
+
_unhexlify: Final = binascii.unhexlify
|
|
10
19
|
|
|
11
|
-
_HEX_REGEXP: Final = re.compile("(0[xX])?[0-9a-fA-F]*")
|
|
12
20
|
|
|
13
21
|
|
|
14
22
|
def decode_hex(value: str) -> bytes:
|
|
15
|
-
if not
|
|
23
|
+
if not isinstance(value, str):
|
|
16
24
|
raise TypeError("Value must be an instance of str")
|
|
17
25
|
non_prefixed = remove_0x_prefix(HexStr(value))
|
|
18
26
|
# unhexlify will only accept bytes type someday
|
|
19
27
|
ascii_hex = non_prefixed.encode("ascii")
|
|
20
|
-
return
|
|
28
|
+
return _unhexlify(ascii_hex)
|
|
21
29
|
|
|
22
30
|
|
|
23
|
-
def encode_hex(value:
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
elif isinstance(value, (bytes, bytearray)):
|
|
31
|
+
def encode_hex(value: str | bytes | bytearray) -> HexStr:
|
|
32
|
+
ascii_bytes: bytes | bytearray
|
|
33
|
+
if isinstance(value, (bytes, bytearray)):
|
|
27
34
|
ascii_bytes = value
|
|
28
|
-
|
|
35
|
+
elif isinstance(value, str):
|
|
29
36
|
ascii_bytes = value.encode("ascii")
|
|
37
|
+
else:
|
|
38
|
+
raise TypeError("Value must be an instance of str or unicode")
|
|
30
39
|
|
|
31
|
-
binary_hex =
|
|
40
|
+
binary_hex = _hexlify(ascii_bytes)
|
|
32
41
|
return add_0x_prefix(HexStr(binary_hex.decode("ascii")))
|
|
33
42
|
|
|
34
43
|
|
|
35
44
|
def is_0x_prefixed(value: str) -> bool:
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
45
|
+
# this check is not needed in the compiled version
|
|
46
|
+
# if not isinstance(value, str):
|
|
47
|
+
# raise TypeError(
|
|
48
|
+
# f"is_0x_prefixed requires text typed arguments. Got: {repr(value)}"
|
|
49
|
+
# )
|
|
50
|
+
return value.startswith("0x") or value.startswith("0X")
|
|
41
51
|
|
|
42
52
|
|
|
43
53
|
def remove_0x_prefix(value: HexStr) -> HexStr:
|
|
@@ -52,17 +62,15 @@ def add_0x_prefix(value: HexStr) -> HexStr:
|
|
|
52
62
|
return HexStr("0x" + value)
|
|
53
63
|
|
|
54
64
|
|
|
55
|
-
def is_hexstr(value: Any) ->
|
|
56
|
-
if not
|
|
65
|
+
def is_hexstr(value: Any) -> TypeGuard[HexStr]:
|
|
66
|
+
if not isinstance(value, str) or not value:
|
|
57
67
|
return False
|
|
58
|
-
return
|
|
68
|
+
return _HEX_REGEXP_MATCH(value) is not None
|
|
59
69
|
|
|
60
70
|
|
|
61
|
-
def is_hex(value: Any) ->
|
|
62
|
-
if not
|
|
63
|
-
raise TypeError(
|
|
64
|
-
"is_hex requires text typed arguments. Got: {0}".format(repr(value))
|
|
65
|
-
)
|
|
71
|
+
def is_hex(value: Any) -> TypeGuard[HexStr]:
|
|
72
|
+
if not isinstance(value, str):
|
|
73
|
+
raise TypeError(f"is_hex requires text typed arguments. Got: {repr(value)}")
|
|
66
74
|
if not value:
|
|
67
75
|
return False
|
|
68
|
-
return
|
|
76
|
+
return _HEX_REGEXP_MATCH(value) is not None
|
|
Binary file
|
faster_eth_utils/humanize.py
CHANGED
|
@@ -1,22 +1,36 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
1
|
+
from collections.abc import (
|
|
2
|
+
Iterable,
|
|
3
|
+
Iterator,
|
|
4
|
+
)
|
|
5
|
+
from typing import (
|
|
6
|
+
Any,
|
|
7
|
+
Final,
|
|
8
|
+
)
|
|
9
|
+
from urllib import (
|
|
10
|
+
parse,
|
|
11
|
+
)
|
|
3
12
|
|
|
4
|
-
from eth_typing import
|
|
13
|
+
from eth_typing import (
|
|
14
|
+
URI,
|
|
15
|
+
Hash32,
|
|
16
|
+
)
|
|
5
17
|
|
|
6
|
-
from faster_eth_utils.currency import
|
|
18
|
+
from faster_eth_utils.currency import (
|
|
19
|
+
denoms,
|
|
20
|
+
from_wei,
|
|
21
|
+
)
|
|
7
22
|
|
|
8
|
-
from .
|
|
23
|
+
from . import toolz
|
|
9
24
|
|
|
10
25
|
|
|
11
|
-
def humanize_seconds(seconds:
|
|
12
|
-
|
|
26
|
+
def humanize_seconds(seconds: float | int) -> str:
|
|
27
|
+
seconds_int = int(seconds)
|
|
28
|
+
if seconds_int == 0:
|
|
13
29
|
return "0s"
|
|
14
30
|
|
|
15
|
-
unit_values = _consume_leading_zero_units(_humanize_seconds(
|
|
31
|
+
unit_values = _consume_leading_zero_units(_humanize_seconds(seconds_int))
|
|
16
32
|
|
|
17
|
-
return "".join(
|
|
18
|
-
("{0}{1}".format(amount, unit) for amount, unit in take(3, unit_values))
|
|
19
|
-
)
|
|
33
|
+
return "".join(f"{amount}{unit}" for amount, unit in toolz.take(3, unit_values))
|
|
20
34
|
|
|
21
35
|
|
|
22
36
|
SECOND: Final = 1
|
|
@@ -40,8 +54,8 @@ UNITS: Final = (
|
|
|
40
54
|
|
|
41
55
|
|
|
42
56
|
def _consume_leading_zero_units(
|
|
43
|
-
units_iter: Iterator[
|
|
44
|
-
) -> Iterator[
|
|
57
|
+
units_iter: Iterator[tuple[int, str]]
|
|
58
|
+
) -> Iterator[tuple[int, str]]:
|
|
45
59
|
for amount, unit in units_iter:
|
|
46
60
|
if amount == 0:
|
|
47
61
|
continue
|
|
@@ -52,7 +66,7 @@ def _consume_leading_zero_units(
|
|
|
52
66
|
yield from units_iter
|
|
53
67
|
|
|
54
68
|
|
|
55
|
-
def _humanize_seconds(seconds: int) -> Iterator[
|
|
69
|
+
def _humanize_seconds(seconds: int) -> Iterator[tuple[int, str]]:
|
|
56
70
|
remainder = seconds
|
|
57
71
|
|
|
58
72
|
for duration, unit in UNITS:
|
|
@@ -74,7 +88,22 @@ def humanize_bytes(value: bytes) -> str:
|
|
|
74
88
|
value_as_hex = value.hex()
|
|
75
89
|
head = value_as_hex[:DISPLAY_HASH_CHARS]
|
|
76
90
|
tail = value_as_hex[-1 * DISPLAY_HASH_CHARS :]
|
|
77
|
-
return "{
|
|
91
|
+
return f"{head}..{tail}"
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def humanize_hexstr(value: str) -> str:
|
|
95
|
+
tail = value[-1 * DISPLAY_HASH_CHARS :]
|
|
96
|
+
|
|
97
|
+
if value[:2] == "0x":
|
|
98
|
+
if len(value[2:]) <= DISPLAY_HASH_CHARS * 2:
|
|
99
|
+
return value
|
|
100
|
+
head = value[2 : DISPLAY_HASH_CHARS + 2]
|
|
101
|
+
return f"0x{head}..{tail}"
|
|
102
|
+
else:
|
|
103
|
+
if len(value) <= DISPLAY_HASH_CHARS * 2:
|
|
104
|
+
return value
|
|
105
|
+
head = value[:DISPLAY_HASH_CHARS]
|
|
106
|
+
return f"{head}..{tail}"
|
|
78
107
|
|
|
79
108
|
|
|
80
109
|
def humanize_hash(value: Hash32) -> str:
|
|
@@ -84,15 +113,15 @@ def humanize_hash(value: Hash32) -> str:
|
|
|
84
113
|
def humanize_ipfs_uri(uri: URI) -> str:
|
|
85
114
|
if not is_ipfs_uri(uri):
|
|
86
115
|
raise TypeError(
|
|
87
|
-
"
|
|
88
|
-
"only CIDv0 hash schemes are supported."
|
|
116
|
+
f"{uri} does not look like a valid IPFS uri. Currently, "
|
|
117
|
+
"only CIDv0 hash schemes are supported."
|
|
89
118
|
)
|
|
90
119
|
|
|
91
120
|
parsed = parse.urlparse(uri)
|
|
92
121
|
ipfs_hash = parsed.netloc
|
|
93
122
|
head = ipfs_hash[:DISPLAY_HASH_CHARS]
|
|
94
123
|
tail = ipfs_hash[-1 * DISPLAY_HASH_CHARS :]
|
|
95
|
-
return "ipfs://{
|
|
124
|
+
return f"ipfs://{head}..{tail}"
|
|
96
125
|
|
|
97
126
|
|
|
98
127
|
def is_ipfs_uri(value: Any) -> bool:
|
|
@@ -112,9 +141,9 @@ def _is_CIDv0_ipfs_hash(ipfs_hash: str) -> bool:
|
|
|
112
141
|
return False
|
|
113
142
|
|
|
114
143
|
|
|
115
|
-
def _find_breakpoints(
|
|
144
|
+
def _find_breakpoints(values: tuple[int, ...]) -> Iterator[int]:
|
|
116
145
|
yield 0
|
|
117
|
-
for index, (left, right) in enumerate(sliding_window(2, values), 1):
|
|
146
|
+
for index, (left, right) in enumerate(toolz.sliding_window(2, values), 1):
|
|
118
147
|
if left + 1 == right:
|
|
119
148
|
continue
|
|
120
149
|
else:
|
|
@@ -122,7 +151,7 @@ def _find_breakpoints(*values: int) -> Iterator[int]:
|
|
|
122
151
|
yield len(values)
|
|
123
152
|
|
|
124
153
|
|
|
125
|
-
def _extract_integer_ranges(
|
|
154
|
+
def _extract_integer_ranges(values: tuple[int, ...]) -> Iterator[tuple[int, int]]:
|
|
126
155
|
"""
|
|
127
156
|
Return a tuple of consecutive ranges of integers.
|
|
128
157
|
|
|
@@ -132,17 +161,17 @@ def _extract_integer_ranges(*values: int) -> Iterator[Tuple[int, int]]:
|
|
|
132
161
|
- fn(1, 2, 3, 7, 8, 9) -> ((1, 3), (7, 9))
|
|
133
162
|
- fn(1, 7, 8, 9) -> ((1, 1), (7, 9))
|
|
134
163
|
"""
|
|
135
|
-
for left, right in sliding_window(2, _find_breakpoints(
|
|
164
|
+
for left, right in toolz.sliding_window(2, _find_breakpoints(values)):
|
|
136
165
|
chunk = values[left:right]
|
|
137
166
|
yield chunk[0], chunk[-1]
|
|
138
167
|
|
|
139
168
|
|
|
140
|
-
def _humanize_range(bounds:
|
|
169
|
+
def _humanize_range(bounds: tuple[int, int]) -> str:
|
|
141
170
|
left, right = bounds
|
|
142
171
|
if left == right:
|
|
143
172
|
return str(left)
|
|
144
173
|
else:
|
|
145
|
-
return "{left}-{right}"
|
|
174
|
+
return f"{left}-{right}"
|
|
146
175
|
|
|
147
176
|
|
|
148
177
|
def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
|
|
@@ -158,7 +187,7 @@ def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
|
|
|
158
187
|
if not values:
|
|
159
188
|
return "(empty)"
|
|
160
189
|
else:
|
|
161
|
-
return "|".join(
|
|
190
|
+
return "|".join(_humanize_range(range) for range in _extract_integer_ranges(values))
|
|
162
191
|
|
|
163
192
|
|
|
164
193
|
def humanize_wei(number: int) -> str:
|
|
@@ -169,5 +198,4 @@ def humanize_wei(number: int) -> str:
|
|
|
169
198
|
else:
|
|
170
199
|
unit = "wei"
|
|
171
200
|
amount = from_wei(number, unit)
|
|
172
|
-
|
|
173
|
-
return x
|
|
201
|
+
return f"{str(amount)} {unit}"
|
faster_eth_utils/logging.py
CHANGED
|
@@ -1,19 +1,34 @@
|
|
|
1
|
-
import contextlib
|
|
2
1
|
import logging
|
|
3
|
-
import
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
from
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
2
|
+
from collections.abc import (
|
|
3
|
+
Iterator,
|
|
4
|
+
)
|
|
5
|
+
from contextlib import (
|
|
6
|
+
contextmanager,
|
|
7
|
+
)
|
|
8
|
+
from functools import (
|
|
9
|
+
cached_property,
|
|
10
|
+
)
|
|
11
|
+
from typing import (
|
|
12
|
+
Any,
|
|
13
|
+
Final,
|
|
14
|
+
TypeVar,
|
|
15
|
+
cast,
|
|
16
|
+
overload,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
from .toolz import (
|
|
20
|
+
assoc,
|
|
21
|
+
)
|
|
12
22
|
|
|
13
23
|
DEBUG2_LEVEL_NUM = 8
|
|
14
24
|
|
|
15
25
|
TLogger = TypeVar("TLogger", bound=logging.Logger)
|
|
16
26
|
|
|
27
|
+
Logger: Final = logging.Logger
|
|
28
|
+
getLogger: Final = logging.getLogger
|
|
29
|
+
getLoggerClass: Final = logging.getLoggerClass
|
|
30
|
+
setLoggerClass: Final = logging.setLoggerClass
|
|
31
|
+
|
|
17
32
|
|
|
18
33
|
class ExtendedDebugLogger(logging.Logger):
|
|
19
34
|
"""
|
|
@@ -33,7 +48,7 @@ class ExtendedDebugLogger(logging.Logger):
|
|
|
33
48
|
# lambda to further speed up
|
|
34
49
|
self.__dict__["debug2"] = lambda message, *args, **kwargs: None
|
|
35
50
|
|
|
36
|
-
def __reduce__(self) ->
|
|
51
|
+
def __reduce__(self) -> tuple[Any, ...]:
|
|
37
52
|
# This is needed because our parent's implementation could
|
|
38
53
|
# cause us to become a regular Logger on unpickling.
|
|
39
54
|
return get_extended_debug_logger, (self.name,)
|
|
@@ -45,37 +60,38 @@ def setup_DEBUG2_logging() -> None:
|
|
|
45
60
|
"""
|
|
46
61
|
if not hasattr(logging, "DEBUG2"):
|
|
47
62
|
logging.addLevelName(DEBUG2_LEVEL_NUM, "DEBUG2")
|
|
48
|
-
|
|
63
|
+
logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore [attr-defined]
|
|
49
64
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
logging.setLoggerClass(logger_class)
|
|
65
|
+
@contextmanager
|
|
66
|
+
def _use_logger_class(logger_class: type[logging.Logger]) -> Iterator[None]:
|
|
67
|
+
original_logger_class = getLoggerClass()
|
|
68
|
+
setLoggerClass(logger_class)
|
|
55
69
|
try:
|
|
56
70
|
yield
|
|
57
71
|
finally:
|
|
58
|
-
|
|
72
|
+
setLoggerClass(original_logger_class)
|
|
59
73
|
|
|
60
74
|
|
|
61
|
-
|
|
75
|
+
@overload
|
|
76
|
+
def get_logger(name: str, logger_class: type[TLogger]) -> TLogger: ...
|
|
77
|
+
@overload
|
|
78
|
+
def get_logger(name: str, logger_class: None = None) -> logging.Logger: ...
|
|
79
|
+
def get_logger(name: str, logger_class: type[TLogger] | None = None) -> TLogger | logging.Logger:
|
|
62
80
|
if logger_class is None:
|
|
63
|
-
return
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
del manager.loggerDict[name]
|
|
78
|
-
return cast(TLogger, logging.getLogger(name))
|
|
81
|
+
return getLogger(name)
|
|
82
|
+
|
|
83
|
+
with _use_logger_class(logger_class):
|
|
84
|
+
# The logging module caches logger instances. The following code
|
|
85
|
+
# ensures that if there is a cached instance that we don't
|
|
86
|
+
# accidentally return the incorrect logger type because the logging
|
|
87
|
+
# module does not *update* the cached instance in the event that
|
|
88
|
+
# the global logging class changes.
|
|
89
|
+
manager = Logger.manager
|
|
90
|
+
logger_dict = manager.loggerDict
|
|
91
|
+
cached_logger = logger_dict.get(name)
|
|
92
|
+
if cached_logger is not None and type(cached_logger) is not logger_class:
|
|
93
|
+
del logger_dict[name]
|
|
94
|
+
return cast(TLogger, getLogger(name))
|
|
79
95
|
|
|
80
96
|
|
|
81
97
|
def get_extended_debug_logger(name: str) -> ExtendedDebugLogger:
|
|
@@ -93,13 +109,13 @@ class HasLoggerMeta(type):
|
|
|
93
109
|
to use when creating the associated logger for a given class.
|
|
94
110
|
"""
|
|
95
111
|
|
|
96
|
-
logger_class =
|
|
112
|
+
logger_class = Logger
|
|
97
113
|
|
|
98
114
|
def __new__(
|
|
99
|
-
mcls:
|
|
115
|
+
mcls: type[THasLoggerMeta],
|
|
100
116
|
name: str,
|
|
101
|
-
bases:
|
|
102
|
-
namespace:
|
|
117
|
+
bases: tuple[type[Any]],
|
|
118
|
+
namespace: dict[str, Any],
|
|
103
119
|
) -> THasLoggerMeta:
|
|
104
120
|
if "logger" in namespace:
|
|
105
121
|
# If a logger was explicitly declared we shouldn't do anything to
|
|
@@ -107,45 +123,30 @@ class HasLoggerMeta(type):
|
|
|
107
123
|
return super().__new__(mcls, name, bases, namespace)
|
|
108
124
|
if "__qualname__" not in namespace:
|
|
109
125
|
raise AttributeError("Missing __qualname__")
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
logger = logging.getLogger(namespace["__qualname__"])
|
|
126
|
+
|
|
127
|
+
logger = get_logger(namespace["__qualname__"], mcls.logger_class)
|
|
113
128
|
|
|
114
129
|
return super().__new__(mcls, name, bases, assoc(namespace, "logger", logger))
|
|
115
130
|
|
|
116
131
|
@classmethod
|
|
117
132
|
def replace_logger_class(
|
|
118
|
-
mcls:
|
|
119
|
-
) ->
|
|
133
|
+
mcls: type[THasLoggerMeta], value: type[logging.Logger]
|
|
134
|
+
) -> type[THasLoggerMeta]:
|
|
120
135
|
return type(mcls.__name__, (mcls,), {"logger_class": value})
|
|
121
136
|
|
|
122
137
|
@classmethod
|
|
123
138
|
def meta_compat(
|
|
124
|
-
mcls:
|
|
125
|
-
) ->
|
|
139
|
+
mcls: type[THasLoggerMeta], other: type[type]
|
|
140
|
+
) -> type[THasLoggerMeta]:
|
|
126
141
|
return type(mcls.__name__, (mcls, other), {})
|
|
127
142
|
|
|
128
143
|
|
|
129
|
-
class
|
|
130
|
-
|
|
131
|
-
# python3.5 is deprecated this can be removed in favor of a simple type
|
|
132
|
-
# annotation on the main class.
|
|
133
|
-
logger = logging.Logger("") # type: logging.Logger
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
class HasLogger(_BaseHasLogger):
|
|
137
|
-
pass
|
|
144
|
+
class HasLogger(metaclass=HasLoggerMeta):
|
|
145
|
+
logger: logging.Logger
|
|
138
146
|
|
|
139
147
|
|
|
140
148
|
HasExtendedDebugLoggerMeta = HasLoggerMeta.replace_logger_class(ExtendedDebugLogger)
|
|
141
149
|
|
|
142
150
|
|
|
143
|
-
class
|
|
144
|
-
|
|
145
|
-
# python3.5 is deprecated this can be removed in favor of a simple type
|
|
146
|
-
# annotation on the main class.
|
|
147
|
-
logger = ExtendedDebugLogger("") # type: ExtendedDebugLogger
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
class HasExtendedDebugLogger(_BaseHasExtendedDebugLogger):
|
|
151
|
-
pass
|
|
151
|
+
class HasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore[metaclass]
|
|
152
|
+
logger: ExtendedDebugLogger
|
|
Binary file
|
|
@@ -1,5 +1,9 @@
|
|
|
1
|
-
from importlib import
|
|
2
|
-
|
|
1
|
+
from importlib import (
|
|
2
|
+
import_module,
|
|
3
|
+
)
|
|
4
|
+
from typing import (
|
|
5
|
+
Any,
|
|
6
|
+
)
|
|
3
7
|
|
|
4
8
|
|
|
5
9
|
def import_string(dotted_path: str) -> Any:
|
|
@@ -15,7 +19,7 @@ def import_string(dotted_path: str) -> Any:
|
|
|
15
19
|
try:
|
|
16
20
|
module_path, class_name = dotted_path.rsplit(".", 1)
|
|
17
21
|
except ValueError:
|
|
18
|
-
msg = "
|
|
22
|
+
msg = f"{dotted_path} doesn't look like a module path"
|
|
19
23
|
raise ImportError(msg)
|
|
20
24
|
|
|
21
25
|
module = import_module(module_path)
|
|
@@ -23,8 +27,5 @@ def import_string(dotted_path: str) -> Any:
|
|
|
23
27
|
try:
|
|
24
28
|
return getattr(module, class_name)
|
|
25
29
|
except AttributeError:
|
|
26
|
-
msg = 'Module "
|
|
27
|
-
module_path,
|
|
28
|
-
class_name,
|
|
29
|
-
)
|
|
30
|
+
msg = f'Module "{module_path}" does not define a "{class_name}" attribute/class'
|
|
30
31
|
raise ImportError(msg)
|
|
Binary file
|