faster-eth-utils 2.3.1__cp311-cp311-win32.whl → 5.3.19__cp311-cp311-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of faster-eth-utils might be problematic. Click here for more details.
- faster_eth_utils/__init__.py +54 -17
- faster_eth_utils/__json/eth_networks.json +1 -0
- faster_eth_utils/__main__.py +3 -1
- faster_eth_utils/abi.cp311-win32.pyd +0 -0
- faster_eth_utils/abi.py +819 -37
- faster_eth_utils/address.cp311-win32.pyd +0 -0
- faster_eth_utils/address.py +57 -62
- faster_eth_utils/applicators.cp311-win32.pyd +0 -0
- faster_eth_utils/applicators.py +138 -70
- faster_eth_utils/conversions.cp311-win32.pyd +0 -0
- faster_eth_utils/conversions.py +54 -25
- faster_eth_utils/crypto.cp311-win32.pyd +0 -0
- faster_eth_utils/crypto.py +15 -5
- faster_eth_utils/currency.cp311-win32.pyd +0 -0
- faster_eth_utils/currency.py +70 -32
- faster_eth_utils/curried/__init__.py +107 -77
- faster_eth_utils/debug.cp311-win32.pyd +0 -0
- faster_eth_utils/debug.py +3 -3
- faster_eth_utils/decorators.cp311-win32.pyd +0 -0
- faster_eth_utils/decorators.py +73 -20
- faster_eth_utils/encoding.cp311-win32.pyd +0 -0
- faster_eth_utils/exceptions.cp311-win32.pyd +0 -0
- faster_eth_utils/exceptions.py +8 -3
- faster_eth_utils/functional.cp311-win32.pyd +0 -0
- faster_eth_utils/functional.py +41 -25
- faster_eth_utils/hexadecimal.cp311-win32.pyd +0 -0
- faster_eth_utils/hexadecimal.py +36 -24
- faster_eth_utils/humanize.cp311-win32.pyd +0 -0
- faster_eth_utils/humanize.py +46 -18
- faster_eth_utils/logging.py +43 -48
- faster_eth_utils/module_loading.cp311-win32.pyd +0 -0
- faster_eth_utils/module_loading.py +8 -7
- faster_eth_utils/network.cp311-win32.pyd +0 -0
- faster_eth_utils/network.py +25 -13
- faster_eth_utils/numeric.cp311-win32.pyd +0 -0
- faster_eth_utils/numeric.py +11 -4
- faster_eth_utils/pydantic.py +101 -0
- faster_eth_utils/toolz.cp311-win32.pyd +0 -0
- faster_eth_utils/toolz.py +82 -152
- faster_eth_utils/types.cp311-win32.pyd +0 -0
- faster_eth_utils/types.py +37 -21
- faster_eth_utils/typing/misc.py +3 -1
- faster_eth_utils/units.cp311-win32.pyd +0 -0
- faster_eth_utils-5.3.19.dist-info/METADATA +193 -0
- faster_eth_utils-5.3.19.dist-info/RECORD +53 -0
- {faster_eth_utils-2.3.1.dist-info → faster_eth_utils-5.3.19.dist-info}/licenses/LICENSE +1 -1
- faster_eth_utils-5.3.19.dist-info/top_level.txt +3 -0
- faster_eth_utils__mypyc.cp311-win32.pyd +0 -0
- bce0bfc64ce5e845ec4a__mypyc.cp311-win32.pyd +0 -0
- faster_eth_utils-2.3.1.dist-info/METADATA +0 -160
- faster_eth_utils-2.3.1.dist-info/RECORD +0 -45
- faster_eth_utils-2.3.1.dist-info/top_level.txt +0 -3
- {faster_eth_utils-2.3.1.dist-info → faster_eth_utils-5.3.19.dist-info}/WHEEL +0 -0
faster_eth_utils/functional.py
CHANGED
|
@@ -2,7 +2,6 @@ import collections
|
|
|
2
2
|
import functools
|
|
3
3
|
import itertools
|
|
4
4
|
from typing import ( # noqa: F401
|
|
5
|
-
Any,
|
|
6
5
|
Callable,
|
|
7
6
|
Dict,
|
|
8
7
|
Iterable,
|
|
@@ -14,8 +13,13 @@ from typing import ( # noqa: F401
|
|
|
14
13
|
Union,
|
|
15
14
|
)
|
|
16
15
|
|
|
17
|
-
from
|
|
16
|
+
from typing_extensions import ParamSpec
|
|
18
17
|
|
|
18
|
+
from .toolz import (
|
|
19
|
+
compose as _compose,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
P = ParamSpec("P")
|
|
19
23
|
T = TypeVar("T")
|
|
20
24
|
|
|
21
25
|
|
|
@@ -31,41 +35,53 @@ TFOut = TypeVar("TFOut")
|
|
|
31
35
|
def combine(
|
|
32
36
|
f: Callable[[TGOut], TFOut], g: Callable[[TGIn], TGOut]
|
|
33
37
|
) -> Callable[[TGIn], TFOut]:
|
|
34
|
-
|
|
38
|
+
def combined(x: TGIn) -> TFOut:
|
|
39
|
+
return f(g(x))
|
|
40
|
+
return combined
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
TCb = TypeVar("TCb")
|
|
35
44
|
|
|
36
45
|
|
|
37
46
|
def apply_to_return_value(
|
|
38
|
-
callback: Callable[
|
|
39
|
-
) -> Callable[
|
|
40
|
-
def outer(fn: Callable[
|
|
41
|
-
# We would need to type annotate *args and **kwargs but doing so segfaults
|
|
42
|
-
# the PyPy builds. We ignore instead.
|
|
47
|
+
callback: Callable[[T], TCb]
|
|
48
|
+
) -> Callable[[Callable[P, T]], Callable[P, TCb]]:
|
|
49
|
+
def outer(fn: Callable[P, T]) -> Callable[P, TCb]:
|
|
43
50
|
@functools.wraps(fn)
|
|
44
|
-
def inner(*args, **kwargs) ->
|
|
51
|
+
def inner(*args: P.args, **kwargs: P.kwargs) -> TCb:
|
|
45
52
|
return callback(fn(*args, **kwargs))
|
|
46
|
-
|
|
47
53
|
return inner
|
|
48
|
-
|
|
49
54
|
return outer
|
|
50
55
|
|
|
51
56
|
|
|
52
57
|
TVal = TypeVar("TVal")
|
|
53
58
|
TKey = TypeVar("TKey")
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
59
|
+
|
|
60
|
+
def to_tuple(fn: Callable[P, Iterable[TVal]]) -> Callable[P, Tuple[TVal, ...]]:
|
|
61
|
+
def to_tuple_wrap(*args: P.args, **kwargs: P.kwargs) -> Tuple[TVal, ...]:
|
|
62
|
+
return tuple(fn(*args, **kwargs))
|
|
63
|
+
return to_tuple_wrap
|
|
64
|
+
|
|
65
|
+
def to_list(fn: Callable[P, Iterable[TVal]]) -> Callable[P, List[TVal]]:
|
|
66
|
+
def to_list_wrap(*args: P.args, **kwargs: P.kwargs) -> List[TVal]:
|
|
67
|
+
return list(fn(*args, **kwargs))
|
|
68
|
+
return to_list_wrap
|
|
69
|
+
|
|
70
|
+
def to_set(fn: Callable[P, Iterable[TVal]]) -> Callable[P, Set[TVal]]:
|
|
71
|
+
def to_set_wrap(*args: P.args, **kwargs: P.kwargs) -> Set[TVal]:
|
|
72
|
+
return set(fn(*args, **kwargs))
|
|
73
|
+
return to_set_wrap
|
|
74
|
+
|
|
75
|
+
def to_dict(
|
|
76
|
+
fn: Callable[P, Union[Mapping[TKey, TVal], Iterable[Tuple[TKey, TVal]]]]
|
|
77
|
+
) -> Callable[P, Dict[TKey, TVal]]:
|
|
78
|
+
def to_dict_wrap(*args: P.args, **kwargs: P.kwargs) -> Dict[TKey, TVal]:
|
|
79
|
+
return dict(fn(*args, **kwargs))
|
|
80
|
+
return to_dict_wrap
|
|
81
|
+
|
|
82
|
+
to_ordered_dict = apply_to_return_value( # type: ignore [assignment]
|
|
67
83
|
collections.OrderedDict
|
|
68
|
-
) # type: Callable[[Callable[
|
|
84
|
+
) # type: Callable[[Callable[P, Union[Mapping[TKey, TVal], Iterable[Tuple[TKey, TVal]]]]], Callable[P, collections.OrderedDict[TKey, TVal]]] # noqa: E501
|
|
69
85
|
sort_return = _compose(to_tuple, apply_to_return_value(sorted))
|
|
70
86
|
flatten_return = _compose(
|
|
71
87
|
to_tuple, apply_to_return_value(itertools.chain.from_iterable)
|
|
Binary file
|
faster_eth_utils/hexadecimal.py
CHANGED
|
@@ -2,41 +2,55 @@
|
|
|
2
2
|
|
|
3
3
|
import binascii
|
|
4
4
|
import re
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import (
|
|
6
|
+
Any,
|
|
7
|
+
AnyStr,
|
|
8
|
+
Final,
|
|
9
|
+
Union,
|
|
10
|
+
)
|
|
6
11
|
|
|
7
|
-
from eth_typing import
|
|
12
|
+
from eth_typing import (
|
|
13
|
+
HexStr,
|
|
14
|
+
)
|
|
15
|
+
from typing_extensions import (
|
|
16
|
+
TypeGuard,
|
|
17
|
+
)
|
|
8
18
|
|
|
9
|
-
|
|
19
|
+
_HEX_REGEXP_MATCH: Final = re.compile("(0[xX])?[0-9a-fA-F]*").fullmatch
|
|
20
|
+
|
|
21
|
+
hexlify: Final = binascii.hexlify
|
|
22
|
+
unhexlify: Final = binascii.unhexlify
|
|
10
23
|
|
|
11
|
-
_HEX_REGEXP: Final = re.compile("(0[xX])?[0-9a-fA-F]*")
|
|
12
24
|
|
|
13
25
|
|
|
14
26
|
def decode_hex(value: str) -> bytes:
|
|
15
|
-
if not
|
|
27
|
+
if not isinstance(value, str):
|
|
16
28
|
raise TypeError("Value must be an instance of str")
|
|
17
29
|
non_prefixed = remove_0x_prefix(HexStr(value))
|
|
18
30
|
# unhexlify will only accept bytes type someday
|
|
19
31
|
ascii_hex = non_prefixed.encode("ascii")
|
|
20
|
-
return
|
|
32
|
+
return unhexlify(ascii_hex)
|
|
21
33
|
|
|
22
34
|
|
|
23
35
|
def encode_hex(value: AnyStr) -> HexStr:
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
elif isinstance(value, (bytes, bytearray)):
|
|
36
|
+
ascii_bytes: Union[bytes, bytearray]
|
|
37
|
+
if isinstance(value, (bytes, bytearray)):
|
|
27
38
|
ascii_bytes = value
|
|
28
|
-
|
|
39
|
+
elif isinstance(value, str):
|
|
29
40
|
ascii_bytes = value.encode("ascii")
|
|
41
|
+
else:
|
|
42
|
+
raise TypeError("Value must be an instance of str or unicode")
|
|
30
43
|
|
|
31
|
-
binary_hex =
|
|
44
|
+
binary_hex = hexlify(ascii_bytes)
|
|
32
45
|
return add_0x_prefix(HexStr(binary_hex.decode("ascii")))
|
|
33
46
|
|
|
34
47
|
|
|
35
48
|
def is_0x_prefixed(value: str) -> bool:
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
49
|
+
# this check is not needed in the compiled version
|
|
50
|
+
# if not isinstance(value, str):
|
|
51
|
+
# raise TypeError(
|
|
52
|
+
# f"is_0x_prefixed requires text typed arguments. Got: {repr(value)}"
|
|
53
|
+
# )
|
|
40
54
|
return value.startswith(("0x", "0X"))
|
|
41
55
|
|
|
42
56
|
|
|
@@ -52,17 +66,15 @@ def add_0x_prefix(value: HexStr) -> HexStr:
|
|
|
52
66
|
return HexStr("0x" + value)
|
|
53
67
|
|
|
54
68
|
|
|
55
|
-
def is_hexstr(value: Any) ->
|
|
56
|
-
if not
|
|
69
|
+
def is_hexstr(value: Any) -> TypeGuard[HexStr]:
|
|
70
|
+
if not isinstance(value, str) or not value:
|
|
57
71
|
return False
|
|
58
|
-
return
|
|
72
|
+
return _HEX_REGEXP_MATCH(value) is not None
|
|
59
73
|
|
|
60
74
|
|
|
61
|
-
def is_hex(value: Any) ->
|
|
62
|
-
if not
|
|
63
|
-
raise TypeError(
|
|
64
|
-
"is_hex requires text typed arguments. Got: {0}".format(repr(value))
|
|
65
|
-
)
|
|
75
|
+
def is_hex(value: Any) -> TypeGuard[HexStr]:
|
|
76
|
+
if not isinstance(value, str):
|
|
77
|
+
raise TypeError(f"is_hex requires text typed arguments. Got: {repr(value)}")
|
|
66
78
|
if not value:
|
|
67
79
|
return False
|
|
68
|
-
return
|
|
80
|
+
return _HEX_REGEXP_MATCH(value) is not None
|
|
Binary file
|
faster_eth_utils/humanize.py
CHANGED
|
@@ -1,11 +1,26 @@
|
|
|
1
|
-
from typing import
|
|
2
|
-
|
|
1
|
+
from typing import (
|
|
2
|
+
Any,
|
|
3
|
+
Final,
|
|
4
|
+
Iterable,
|
|
5
|
+
Iterator,
|
|
6
|
+
Tuple,
|
|
7
|
+
Union,
|
|
8
|
+
)
|
|
9
|
+
from urllib import (
|
|
10
|
+
parse,
|
|
11
|
+
)
|
|
3
12
|
|
|
4
|
-
from eth_typing import
|
|
13
|
+
from eth_typing import (
|
|
14
|
+
URI,
|
|
15
|
+
Hash32,
|
|
16
|
+
)
|
|
5
17
|
|
|
6
|
-
from faster_eth_utils.currency import
|
|
18
|
+
from faster_eth_utils.currency import (
|
|
19
|
+
denoms,
|
|
20
|
+
from_wei,
|
|
21
|
+
)
|
|
7
22
|
|
|
8
|
-
from .
|
|
23
|
+
from . import toolz
|
|
9
24
|
|
|
10
25
|
|
|
11
26
|
def humanize_seconds(seconds: Union[float, int]) -> str:
|
|
@@ -14,9 +29,7 @@ def humanize_seconds(seconds: Union[float, int]) -> str:
|
|
|
14
29
|
|
|
15
30
|
unit_values = _consume_leading_zero_units(_humanize_seconds(int(seconds)))
|
|
16
31
|
|
|
17
|
-
return "".join(
|
|
18
|
-
("{0}{1}".format(amount, unit) for amount, unit in take(3, unit_values))
|
|
19
|
-
)
|
|
32
|
+
return "".join((f"{amount}{unit}" for amount, unit in toolz.take(3, unit_values)))
|
|
20
33
|
|
|
21
34
|
|
|
22
35
|
SECOND: Final = 1
|
|
@@ -74,7 +87,22 @@ def humanize_bytes(value: bytes) -> str:
|
|
|
74
87
|
value_as_hex = value.hex()
|
|
75
88
|
head = value_as_hex[:DISPLAY_HASH_CHARS]
|
|
76
89
|
tail = value_as_hex[-1 * DISPLAY_HASH_CHARS :]
|
|
77
|
-
return "{
|
|
90
|
+
return f"{head}..{tail}"
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def humanize_hexstr(value: str) -> str:
|
|
94
|
+
tail = value[-1 * DISPLAY_HASH_CHARS :]
|
|
95
|
+
|
|
96
|
+
if value[:2] == "0x":
|
|
97
|
+
if len(value[2:]) <= DISPLAY_HASH_CHARS * 2:
|
|
98
|
+
return value
|
|
99
|
+
head = value[2 : DISPLAY_HASH_CHARS + 2]
|
|
100
|
+
return f"0x{head}..{tail}"
|
|
101
|
+
else:
|
|
102
|
+
if len(value) <= DISPLAY_HASH_CHARS * 2:
|
|
103
|
+
return value
|
|
104
|
+
head = value[:DISPLAY_HASH_CHARS]
|
|
105
|
+
return f"{head}..{tail}"
|
|
78
106
|
|
|
79
107
|
|
|
80
108
|
def humanize_hash(value: Hash32) -> str:
|
|
@@ -84,15 +112,15 @@ def humanize_hash(value: Hash32) -> str:
|
|
|
84
112
|
def humanize_ipfs_uri(uri: URI) -> str:
|
|
85
113
|
if not is_ipfs_uri(uri):
|
|
86
114
|
raise TypeError(
|
|
87
|
-
"
|
|
88
|
-
"only CIDv0 hash schemes are supported."
|
|
115
|
+
f"{uri} does not look like a valid IPFS uri. Currently, "
|
|
116
|
+
"only CIDv0 hash schemes are supported."
|
|
89
117
|
)
|
|
90
118
|
|
|
91
119
|
parsed = parse.urlparse(uri)
|
|
92
120
|
ipfs_hash = parsed.netloc
|
|
93
121
|
head = ipfs_hash[:DISPLAY_HASH_CHARS]
|
|
94
122
|
tail = ipfs_hash[-1 * DISPLAY_HASH_CHARS :]
|
|
95
|
-
return "ipfs://{
|
|
123
|
+
return f"ipfs://{head}..{tail}"
|
|
96
124
|
|
|
97
125
|
|
|
98
126
|
def is_ipfs_uri(value: Any) -> bool:
|
|
@@ -112,9 +140,9 @@ def _is_CIDv0_ipfs_hash(ipfs_hash: str) -> bool:
|
|
|
112
140
|
return False
|
|
113
141
|
|
|
114
142
|
|
|
115
|
-
def _find_breakpoints(
|
|
143
|
+
def _find_breakpoints(values: Tuple[int, ...]) -> Iterator[int]:
|
|
116
144
|
yield 0
|
|
117
|
-
for index, (left, right) in enumerate(sliding_window(2, values), 1):
|
|
145
|
+
for index, (left, right) in enumerate(toolz.sliding_window(2, values), 1):
|
|
118
146
|
if left + 1 == right:
|
|
119
147
|
continue
|
|
120
148
|
else:
|
|
@@ -122,7 +150,7 @@ def _find_breakpoints(*values: int) -> Iterator[int]:
|
|
|
122
150
|
yield len(values)
|
|
123
151
|
|
|
124
152
|
|
|
125
|
-
def _extract_integer_ranges(
|
|
153
|
+
def _extract_integer_ranges(values: Tuple[int, ...]) -> Iterator[Tuple[int, int]]:
|
|
126
154
|
"""
|
|
127
155
|
Return a tuple of consecutive ranges of integers.
|
|
128
156
|
|
|
@@ -132,7 +160,7 @@ def _extract_integer_ranges(*values: int) -> Iterator[Tuple[int, int]]:
|
|
|
132
160
|
- fn(1, 2, 3, 7, 8, 9) -> ((1, 3), (7, 9))
|
|
133
161
|
- fn(1, 7, 8, 9) -> ((1, 1), (7, 9))
|
|
134
162
|
"""
|
|
135
|
-
for left, right in sliding_window(2, _find_breakpoints(
|
|
163
|
+
for left, right in toolz.sliding_window(2, _find_breakpoints(values)):
|
|
136
164
|
chunk = values[left:right]
|
|
137
165
|
yield chunk[0], chunk[-1]
|
|
138
166
|
|
|
@@ -142,7 +170,7 @@ def _humanize_range(bounds: Tuple[int, int]) -> str:
|
|
|
142
170
|
if left == right:
|
|
143
171
|
return str(left)
|
|
144
172
|
else:
|
|
145
|
-
return "{left}-{right}"
|
|
173
|
+
return f"{left}-{right}"
|
|
146
174
|
|
|
147
175
|
|
|
148
176
|
def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
|
|
@@ -158,7 +186,7 @@ def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
|
|
|
158
186
|
if not values:
|
|
159
187
|
return "(empty)"
|
|
160
188
|
else:
|
|
161
|
-
return "|".join(
|
|
189
|
+
return "|".join(_humanize_range(range) for range in _extract_integer_ranges(values))
|
|
162
190
|
|
|
163
191
|
|
|
164
192
|
def humanize_wei(number: int) -> str:
|
faster_eth_utils/logging.py
CHANGED
|
@@ -1,14 +1,23 @@
|
|
|
1
1
|
import contextlib
|
|
2
|
+
from functools import (
|
|
3
|
+
cached_property,
|
|
4
|
+
)
|
|
2
5
|
import logging
|
|
3
|
-
import
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
6
|
+
from typing import (
|
|
7
|
+
Any,
|
|
8
|
+
Dict,
|
|
9
|
+
Iterator,
|
|
10
|
+
Tuple,
|
|
11
|
+
Type,
|
|
12
|
+
TypeVar,
|
|
13
|
+
Union,
|
|
14
|
+
cast,
|
|
15
|
+
overload,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
from .toolz import (
|
|
19
|
+
assoc,
|
|
20
|
+
)
|
|
12
21
|
|
|
13
22
|
DEBUG2_LEVEL_NUM = 8
|
|
14
23
|
|
|
@@ -45,8 +54,7 @@ def setup_DEBUG2_logging() -> None:
|
|
|
45
54
|
"""
|
|
46
55
|
if not hasattr(logging, "DEBUG2"):
|
|
47
56
|
logging.addLevelName(DEBUG2_LEVEL_NUM, "DEBUG2")
|
|
48
|
-
|
|
49
|
-
|
|
57
|
+
logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore [attr-defined]
|
|
50
58
|
|
|
51
59
|
@contextlib.contextmanager
|
|
52
60
|
def _use_logger_class(logger_class: Type[logging.Logger]) -> Iterator[None]:
|
|
@@ -58,24 +66,26 @@ def _use_logger_class(logger_class: Type[logging.Logger]) -> Iterator[None]:
|
|
|
58
66
|
logging.setLoggerClass(original_logger_class)
|
|
59
67
|
|
|
60
68
|
|
|
61
|
-
|
|
69
|
+
@overload
|
|
70
|
+
def get_logger(name: str, logger_class: Type[TLogger]) -> TLogger: ...
|
|
71
|
+
@overload
|
|
72
|
+
def get_logger(name: str, logger_class: None = None) -> logging.Logger: ...
|
|
73
|
+
def get_logger(name: str, logger_class: Union[Type[TLogger], None] = None) -> Union[TLogger, logging.Logger]:
|
|
62
74
|
if logger_class is None:
|
|
75
|
+
return logging.getLogger(name)
|
|
76
|
+
|
|
77
|
+
with _use_logger_class(logger_class):
|
|
78
|
+
# The logging module caches logger instances. The following code
|
|
79
|
+
# ensures that if there is a cached instance that we don't
|
|
80
|
+
# accidentally return the incorrect logger type because the logging
|
|
81
|
+
# module does not *update* the cached instance in the event that
|
|
82
|
+
# the global logging class changes.
|
|
83
|
+
manager = logging.Logger.manager
|
|
84
|
+
logger_dict = manager.loggerDict
|
|
85
|
+
cached_logger = logger_dict.get(name)
|
|
86
|
+
if cached_logger is not None and type(cached_logger) is not logger_class:
|
|
87
|
+
del logger_dict[name]
|
|
63
88
|
return cast(TLogger, logging.getLogger(name))
|
|
64
|
-
else:
|
|
65
|
-
with _use_logger_class(logger_class):
|
|
66
|
-
# The logging module caches logger instances. The following code
|
|
67
|
-
# ensures that if there is a cached instance that we don't
|
|
68
|
-
# accidentally return the incorrect logger type because the logging
|
|
69
|
-
# module does not *update* the cached instance in the event that
|
|
70
|
-
# the global logging class changes.
|
|
71
|
-
#
|
|
72
|
-
# types ignored b/c mypy doesn't identify presence of
|
|
73
|
-
# manager on logging.Logger
|
|
74
|
-
manager = logging.Logger.manager
|
|
75
|
-
if name in manager.loggerDict:
|
|
76
|
-
if type(manager.loggerDict[name]) is not logger_class:
|
|
77
|
-
del manager.loggerDict[name]
|
|
78
|
-
return cast(TLogger, logging.getLogger(name))
|
|
79
89
|
|
|
80
90
|
|
|
81
91
|
def get_extended_debug_logger(name: str) -> ExtendedDebugLogger:
|
|
@@ -107,9 +117,8 @@ class HasLoggerMeta(type):
|
|
|
107
117
|
return super().__new__(mcls, name, bases, namespace)
|
|
108
118
|
if "__qualname__" not in namespace:
|
|
109
119
|
raise AttributeError("Missing __qualname__")
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
logger = logging.getLogger(namespace["__qualname__"])
|
|
120
|
+
|
|
121
|
+
logger = get_logger(namespace["__qualname__"], mcls.logger_class)
|
|
113
122
|
|
|
114
123
|
return super().__new__(mcls, name, bases, assoc(namespace, "logger", logger))
|
|
115
124
|
|
|
@@ -126,26 +135,12 @@ class HasLoggerMeta(type):
|
|
|
126
135
|
return type(mcls.__name__, (mcls, other), {})
|
|
127
136
|
|
|
128
137
|
|
|
129
|
-
class
|
|
130
|
-
|
|
131
|
-
# python3.5 is deprecated this can be removed in favor of a simple type
|
|
132
|
-
# annotation on the main class.
|
|
133
|
-
logger = logging.Logger("") # type: logging.Logger
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
class HasLogger(_BaseHasLogger):
|
|
137
|
-
pass
|
|
138
|
+
class HasLogger(metaclass=HasLoggerMeta):
|
|
139
|
+
logger: logging.Logger
|
|
138
140
|
|
|
139
141
|
|
|
140
142
|
HasExtendedDebugLoggerMeta = HasLoggerMeta.replace_logger_class(ExtendedDebugLogger)
|
|
141
143
|
|
|
142
144
|
|
|
143
|
-
class
|
|
144
|
-
|
|
145
|
-
# python3.5 is deprecated this can be removed in favor of a simple type
|
|
146
|
-
# annotation on the main class.
|
|
147
|
-
logger = ExtendedDebugLogger("") # type: ExtendedDebugLogger
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
class HasExtendedDebugLogger(_BaseHasExtendedDebugLogger):
|
|
151
|
-
pass
|
|
145
|
+
class HasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore [metaclass,misc]
|
|
146
|
+
logger: ExtendedDebugLogger
|
|
Binary file
|
|
@@ -1,5 +1,9 @@
|
|
|
1
|
-
from importlib import
|
|
2
|
-
|
|
1
|
+
from importlib import (
|
|
2
|
+
import_module,
|
|
3
|
+
)
|
|
4
|
+
from typing import (
|
|
5
|
+
Any,
|
|
6
|
+
)
|
|
3
7
|
|
|
4
8
|
|
|
5
9
|
def import_string(dotted_path: str) -> Any:
|
|
@@ -15,7 +19,7 @@ def import_string(dotted_path: str) -> Any:
|
|
|
15
19
|
try:
|
|
16
20
|
module_path, class_name = dotted_path.rsplit(".", 1)
|
|
17
21
|
except ValueError:
|
|
18
|
-
msg = "
|
|
22
|
+
msg = f"{dotted_path} doesn't look like a module path"
|
|
19
23
|
raise ImportError(msg)
|
|
20
24
|
|
|
21
25
|
module = import_module(module_path)
|
|
@@ -23,8 +27,5 @@ def import_string(dotted_path: str) -> Any:
|
|
|
23
27
|
try:
|
|
24
28
|
return getattr(module, class_name)
|
|
25
29
|
except AttributeError:
|
|
26
|
-
msg = 'Module "
|
|
27
|
-
module_path,
|
|
28
|
-
class_name,
|
|
29
|
-
)
|
|
30
|
+
msg = f'Module "{module_path}" does not define a "{class_name}" attribute/class'
|
|
30
31
|
raise ImportError(msg)
|
|
Binary file
|
faster_eth_utils/network.py
CHANGED
|
@@ -1,12 +1,27 @@
|
|
|
1
|
-
from dataclasses import
|
|
1
|
+
from dataclasses import (
|
|
2
|
+
dataclass,
|
|
3
|
+
)
|
|
2
4
|
import json
|
|
3
5
|
import os
|
|
4
|
-
|
|
5
|
-
import
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import (
|
|
8
|
+
Path,
|
|
9
|
+
)
|
|
10
|
+
from typing import (
|
|
11
|
+
Final,
|
|
12
|
+
List,
|
|
13
|
+
)
|
|
6
14
|
|
|
7
|
-
from eth_typing import
|
|
15
|
+
from eth_typing import (
|
|
16
|
+
ChainId,
|
|
17
|
+
)
|
|
8
18
|
|
|
9
|
-
from faster_eth_utils import
|
|
19
|
+
from faster_eth_utils import (
|
|
20
|
+
ValidationError,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
FASTER_ETH_UTILS_FOLDER: Final = Path(sys.modules["faster_eth_utils"].__file__).parent # type: ignore [arg-type]
|
|
10
25
|
|
|
11
26
|
|
|
12
27
|
@dataclass
|
|
@@ -21,11 +36,10 @@ def initialize_network_objects() -> List[Network]:
|
|
|
21
36
|
networks_obj = []
|
|
22
37
|
|
|
23
38
|
networks_json_path = os.path.abspath(
|
|
24
|
-
os.path.join(
|
|
39
|
+
os.path.join(str(FASTER_ETH_UTILS_FOLDER), "__json")
|
|
25
40
|
)
|
|
26
41
|
with open(
|
|
27
42
|
os.path.join(networks_json_path, "eth_networks.json"),
|
|
28
|
-
"r",
|
|
29
43
|
encoding="UTF-8",
|
|
30
44
|
) as open_file:
|
|
31
45
|
network_data = json.load(open_file)
|
|
@@ -40,13 +54,11 @@ def initialize_network_objects() -> List[Network]:
|
|
|
40
54
|
)
|
|
41
55
|
networks_obj.append(network)
|
|
42
56
|
except ValueError:
|
|
43
|
-
#
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
f"a valid ChainId. eth-typing should be updated with the latest "
|
|
47
|
-
f"networks."
|
|
48
|
-
)
|
|
57
|
+
# Chain does not have a valid ChainId, network files in eth-utils and
|
|
58
|
+
# eth-typing should to be updated. Run `python update_networks.py` in the
|
|
59
|
+
# project root.
|
|
49
60
|
pass
|
|
61
|
+
|
|
50
62
|
return networks_obj
|
|
51
63
|
|
|
52
64
|
|
|
Binary file
|
faster_eth_utils/numeric.py
CHANGED
|
@@ -1,7 +1,14 @@
|
|
|
1
|
-
from abc import
|
|
1
|
+
from abc import (
|
|
2
|
+
ABC,
|
|
3
|
+
abstractmethod,
|
|
4
|
+
)
|
|
2
5
|
import decimal
|
|
3
6
|
import numbers
|
|
4
|
-
from typing import
|
|
7
|
+
from typing import (
|
|
8
|
+
Any,
|
|
9
|
+
TypeVar,
|
|
10
|
+
Union,
|
|
11
|
+
)
|
|
5
12
|
|
|
6
13
|
|
|
7
14
|
class Comparable(ABC):
|
|
@@ -22,11 +29,11 @@ TValue = TypeVar("TValue", bound=TComparable)
|
|
|
22
29
|
|
|
23
30
|
def clamp(lower_bound: TValue, upper_bound: TValue, value: TValue) -> TValue:
|
|
24
31
|
# The `mypy` ignore statements here are due to doing a comparison of
|
|
25
|
-
# `Union` types which isn't allowed. (per cburgdorf).
|
|
32
|
+
# `Union` types which isn't allowed. (per cburgdorf). This approach was
|
|
26
33
|
# chosen over using `typing.overload` to define multiple signatures for
|
|
27
34
|
# each comparison type here since the added value of "proper" typing
|
|
28
35
|
# doesn't seem to justify the complexity of having a bunch of different
|
|
29
|
-
# signatures defined.
|
|
36
|
+
# signatures defined. The external library perspective on this function
|
|
30
37
|
# should still be adequate under this approach
|
|
31
38
|
if value < lower_bound: # type: ignore
|
|
32
39
|
return lower_bound
|