faster-eth-utils 5.3.19__cp310-cp310-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of faster-eth-utils might be problematic. Click here for more details.

Files changed (53) hide show
  1. faster_eth_utils/__init__.py +144 -0
  2. faster_eth_utils/__json/eth_networks.json +1 -0
  3. faster_eth_utils/__main__.py +5 -0
  4. faster_eth_utils/abi.cp310-win32.pyd +0 -0
  5. faster_eth_utils/abi.py +847 -0
  6. faster_eth_utils/address.cp310-win32.pyd +0 -0
  7. faster_eth_utils/address.py +145 -0
  8. faster_eth_utils/applicators.cp310-win32.pyd +0 -0
  9. faster_eth_utils/applicators.py +209 -0
  10. faster_eth_utils/conversions.cp310-win32.pyd +0 -0
  11. faster_eth_utils/conversions.py +191 -0
  12. faster_eth_utils/crypto.cp310-win32.pyd +0 -0
  13. faster_eth_utils/crypto.py +21 -0
  14. faster_eth_utils/currency.cp310-win32.pyd +0 -0
  15. faster_eth_utils/currency.py +141 -0
  16. faster_eth_utils/curried/__init__.py +306 -0
  17. faster_eth_utils/debug.cp310-win32.pyd +0 -0
  18. faster_eth_utils/debug.py +20 -0
  19. faster_eth_utils/decorators.cp310-win32.pyd +0 -0
  20. faster_eth_utils/decorators.py +119 -0
  21. faster_eth_utils/encoding.cp310-win32.pyd +0 -0
  22. faster_eth_utils/encoding.py +6 -0
  23. faster_eth_utils/exceptions.cp310-win32.pyd +0 -0
  24. faster_eth_utils/exceptions.py +11 -0
  25. faster_eth_utils/functional.cp310-win32.pyd +0 -0
  26. faster_eth_utils/functional.py +89 -0
  27. faster_eth_utils/hexadecimal.cp310-win32.pyd +0 -0
  28. faster_eth_utils/hexadecimal.py +80 -0
  29. faster_eth_utils/humanize.cp310-win32.pyd +0 -0
  30. faster_eth_utils/humanize.py +201 -0
  31. faster_eth_utils/logging.py +146 -0
  32. faster_eth_utils/module_loading.cp310-win32.pyd +0 -0
  33. faster_eth_utils/module_loading.py +31 -0
  34. faster_eth_utils/network.cp310-win32.pyd +0 -0
  35. faster_eth_utils/network.py +92 -0
  36. faster_eth_utils/numeric.cp310-win32.pyd +0 -0
  37. faster_eth_utils/numeric.py +43 -0
  38. faster_eth_utils/py.typed +0 -0
  39. faster_eth_utils/pydantic.py +101 -0
  40. faster_eth_utils/toolz.cp310-win32.pyd +0 -0
  41. faster_eth_utils/toolz.py +84 -0
  42. faster_eth_utils/types.cp310-win32.pyd +0 -0
  43. faster_eth_utils/types.py +68 -0
  44. faster_eth_utils/typing/__init__.py +18 -0
  45. faster_eth_utils/typing/misc.py +14 -0
  46. faster_eth_utils/units.cp310-win32.pyd +0 -0
  47. faster_eth_utils/units.py +31 -0
  48. faster_eth_utils-5.3.19.dist-info/METADATA +193 -0
  49. faster_eth_utils-5.3.19.dist-info/RECORD +53 -0
  50. faster_eth_utils-5.3.19.dist-info/WHEEL +5 -0
  51. faster_eth_utils-5.3.19.dist-info/licenses/LICENSE +21 -0
  52. faster_eth_utils-5.3.19.dist-info/top_level.txt +3 -0
  53. faster_eth_utils__mypyc.cp310-win32.pyd +0 -0
@@ -0,0 +1,80 @@
1
+ # String encodings and numeric representations
2
+
3
+ import binascii
4
+ import re
5
+ from typing import (
6
+ Any,
7
+ AnyStr,
8
+ Final,
9
+ Union,
10
+ )
11
+
12
+ from eth_typing import (
13
+ HexStr,
14
+ )
15
+ from typing_extensions import (
16
+ TypeGuard,
17
+ )
18
+
19
+ _HEX_REGEXP_MATCH: Final = re.compile("(0[xX])?[0-9a-fA-F]*").fullmatch
20
+
21
+ hexlify: Final = binascii.hexlify
22
+ unhexlify: Final = binascii.unhexlify
23
+
24
+
25
+
26
+ def decode_hex(value: str) -> bytes:
27
+ if not isinstance(value, str):
28
+ raise TypeError("Value must be an instance of str")
29
+ non_prefixed = remove_0x_prefix(HexStr(value))
30
+ # unhexlify will only accept bytes type someday
31
+ ascii_hex = non_prefixed.encode("ascii")
32
+ return unhexlify(ascii_hex)
33
+
34
+
35
+ def encode_hex(value: AnyStr) -> HexStr:
36
+ ascii_bytes: Union[bytes, bytearray]
37
+ if isinstance(value, (bytes, bytearray)):
38
+ ascii_bytes = value
39
+ elif isinstance(value, str):
40
+ ascii_bytes = value.encode("ascii")
41
+ else:
42
+ raise TypeError("Value must be an instance of str or unicode")
43
+
44
+ binary_hex = hexlify(ascii_bytes)
45
+ return add_0x_prefix(HexStr(binary_hex.decode("ascii")))
46
+
47
+
48
+ def is_0x_prefixed(value: str) -> bool:
49
+ # this check is not needed in the compiled version
50
+ # if not isinstance(value, str):
51
+ # raise TypeError(
52
+ # f"is_0x_prefixed requires text typed arguments. Got: {repr(value)}"
53
+ # )
54
+ return value.startswith(("0x", "0X"))
55
+
56
+
57
+ def remove_0x_prefix(value: HexStr) -> HexStr:
58
+ if is_0x_prefixed(value):
59
+ return HexStr(value[2:])
60
+ return value
61
+
62
+
63
+ def add_0x_prefix(value: HexStr) -> HexStr:
64
+ if is_0x_prefixed(value):
65
+ return value
66
+ return HexStr("0x" + value)
67
+
68
+
69
+ def is_hexstr(value: Any) -> TypeGuard[HexStr]:
70
+ if not isinstance(value, str) or not value:
71
+ return False
72
+ return _HEX_REGEXP_MATCH(value) is not None
73
+
74
+
75
+ def is_hex(value: Any) -> TypeGuard[HexStr]:
76
+ if not isinstance(value, str):
77
+ raise TypeError(f"is_hex requires text typed arguments. Got: {repr(value)}")
78
+ if not value:
79
+ return False
80
+ return _HEX_REGEXP_MATCH(value) is not None
@@ -0,0 +1,201 @@
1
+ from typing import (
2
+ Any,
3
+ Final,
4
+ Iterable,
5
+ Iterator,
6
+ Tuple,
7
+ Union,
8
+ )
9
+ from urllib import (
10
+ parse,
11
+ )
12
+
13
+ from eth_typing import (
14
+ URI,
15
+ Hash32,
16
+ )
17
+
18
+ from faster_eth_utils.currency import (
19
+ denoms,
20
+ from_wei,
21
+ )
22
+
23
+ from . import toolz
24
+
25
+
26
+ def humanize_seconds(seconds: Union[float, int]) -> str:
27
+ if int(seconds) == 0:
28
+ return "0s"
29
+
30
+ unit_values = _consume_leading_zero_units(_humanize_seconds(int(seconds)))
31
+
32
+ return "".join((f"{amount}{unit}" for amount, unit in toolz.take(3, unit_values)))
33
+
34
+
35
+ SECOND: Final = 1
36
+ MINUTE: Final = 60
37
+ HOUR: Final = 60 * 60
38
+ DAY: Final = 24 * HOUR
39
+ YEAR: Final = 365 * DAY
40
+ MONTH: Final = YEAR // 12
41
+ WEEK: Final = 7 * DAY
42
+
43
+
44
+ UNITS: Final = (
45
+ (YEAR, "y"),
46
+ (MONTH, "m"),
47
+ (WEEK, "w"),
48
+ (DAY, "d"),
49
+ (HOUR, "h"),
50
+ (MINUTE, "m"),
51
+ (SECOND, "s"),
52
+ )
53
+
54
+
55
+ def _consume_leading_zero_units(
56
+ units_iter: Iterator[Tuple[int, str]]
57
+ ) -> Iterator[Tuple[int, str]]:
58
+ for amount, unit in units_iter:
59
+ if amount == 0:
60
+ continue
61
+ else:
62
+ yield (amount, unit)
63
+ break
64
+
65
+ yield from units_iter
66
+
67
+
68
+ def _humanize_seconds(seconds: int) -> Iterator[Tuple[int, str]]:
69
+ remainder = seconds
70
+
71
+ for duration, unit in UNITS:
72
+ if not remainder:
73
+ break
74
+
75
+ num = remainder // duration
76
+ yield num, unit
77
+
78
+ remainder %= duration
79
+
80
+
81
+ DISPLAY_HASH_CHARS: Final = 4
82
+
83
+
84
+ def humanize_bytes(value: bytes) -> str:
85
+ if len(value) <= DISPLAY_HASH_CHARS + 1:
86
+ return value.hex()
87
+ value_as_hex = value.hex()
88
+ head = value_as_hex[:DISPLAY_HASH_CHARS]
89
+ tail = value_as_hex[-1 * DISPLAY_HASH_CHARS :]
90
+ return f"{head}..{tail}"
91
+
92
+
93
+ def humanize_hexstr(value: str) -> str:
94
+ tail = value[-1 * DISPLAY_HASH_CHARS :]
95
+
96
+ if value[:2] == "0x":
97
+ if len(value[2:]) <= DISPLAY_HASH_CHARS * 2:
98
+ return value
99
+ head = value[2 : DISPLAY_HASH_CHARS + 2]
100
+ return f"0x{head}..{tail}"
101
+ else:
102
+ if len(value) <= DISPLAY_HASH_CHARS * 2:
103
+ return value
104
+ head = value[:DISPLAY_HASH_CHARS]
105
+ return f"{head}..{tail}"
106
+
107
+
108
+ def humanize_hash(value: Hash32) -> str:
109
+ return humanize_bytes(value)
110
+
111
+
112
+ def humanize_ipfs_uri(uri: URI) -> str:
113
+ if not is_ipfs_uri(uri):
114
+ raise TypeError(
115
+ f"{uri} does not look like a valid IPFS uri. Currently, "
116
+ "only CIDv0 hash schemes are supported."
117
+ )
118
+
119
+ parsed = parse.urlparse(uri)
120
+ ipfs_hash = parsed.netloc
121
+ head = ipfs_hash[:DISPLAY_HASH_CHARS]
122
+ tail = ipfs_hash[-1 * DISPLAY_HASH_CHARS :]
123
+ return f"ipfs://{head}..{tail}"
124
+
125
+
126
+ def is_ipfs_uri(value: Any) -> bool:
127
+ if not isinstance(value, str):
128
+ return False
129
+
130
+ parsed = parse.urlparse(value)
131
+ if parsed.scheme != "ipfs" or not parsed.netloc:
132
+ return False
133
+
134
+ return _is_CIDv0_ipfs_hash(parsed.netloc)
135
+
136
+
137
+ def _is_CIDv0_ipfs_hash(ipfs_hash: str) -> bool:
138
+ if ipfs_hash.startswith("Qm") and len(ipfs_hash) == 46:
139
+ return True
140
+ return False
141
+
142
+
143
+ def _find_breakpoints(values: Tuple[int, ...]) -> Iterator[int]:
144
+ yield 0
145
+ for index, (left, right) in enumerate(toolz.sliding_window(2, values), 1):
146
+ if left + 1 == right:
147
+ continue
148
+ else:
149
+ yield index
150
+ yield len(values)
151
+
152
+
153
+ def _extract_integer_ranges(values: Tuple[int, ...]) -> Iterator[Tuple[int, int]]:
154
+ """
155
+ Return a tuple of consecutive ranges of integers.
156
+
157
+ :param values: a sequence of ordered integers
158
+
159
+ - fn(1, 2, 3) -> ((1, 3),)
160
+ - fn(1, 2, 3, 7, 8, 9) -> ((1, 3), (7, 9))
161
+ - fn(1, 7, 8, 9) -> ((1, 1), (7, 9))
162
+ """
163
+ for left, right in toolz.sliding_window(2, _find_breakpoints(values)):
164
+ chunk = values[left:right]
165
+ yield chunk[0], chunk[-1]
166
+
167
+
168
+ def _humanize_range(bounds: Tuple[int, int]) -> str:
169
+ left, right = bounds
170
+ if left == right:
171
+ return str(left)
172
+ else:
173
+ return f"{left}-{right}"
174
+
175
+
176
+ def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
177
+ """
178
+ Return a concise, human-readable string representing a sequence of integers.
179
+
180
+ - fn((1, 2, 3)) -> '1-3'
181
+ - fn((1, 2, 3, 7, 8, 9)) -> '1-3|7-9'
182
+ - fn((1, 2, 3, 5, 7, 8, 9)) -> '1-3|5|7-9'
183
+ - fn((1, 7, 8, 9)) -> '1|7-9'
184
+ """
185
+ values = tuple(values_iter)
186
+ if not values:
187
+ return "(empty)"
188
+ else:
189
+ return "|".join(_humanize_range(range) for range in _extract_integer_ranges(values))
190
+
191
+
192
+ def humanize_wei(number: int) -> str:
193
+ if number >= denoms.finney:
194
+ unit = "ether"
195
+ elif number >= denoms.mwei:
196
+ unit = "gwei"
197
+ else:
198
+ unit = "wei"
199
+ amount = from_wei(number, unit)
200
+ x = f"{str(amount)} {unit}"
201
+ return x
@@ -0,0 +1,146 @@
1
+ import contextlib
2
+ from functools import (
3
+ cached_property,
4
+ )
5
+ import logging
6
+ from typing import (
7
+ Any,
8
+ Dict,
9
+ Iterator,
10
+ Tuple,
11
+ Type,
12
+ TypeVar,
13
+ Union,
14
+ cast,
15
+ overload,
16
+ )
17
+
18
+ from .toolz import (
19
+ assoc,
20
+ )
21
+
22
+ DEBUG2_LEVEL_NUM = 8
23
+
24
+ TLogger = TypeVar("TLogger", bound=logging.Logger)
25
+
26
+
27
+ class ExtendedDebugLogger(logging.Logger):
28
+ """
29
+ Logging class that can be used for lower level debug logging.
30
+ """
31
+
32
+ @cached_property
33
+ def show_debug2(self) -> bool:
34
+ return self.isEnabledFor(DEBUG2_LEVEL_NUM)
35
+
36
+ def debug2(self, message: str, *args: Any, **kwargs: Any) -> None:
37
+ if self.show_debug2:
38
+ self.log(DEBUG2_LEVEL_NUM, message, *args, **kwargs)
39
+ else:
40
+ # When we find that `DEBUG2` isn't enabled we completely replace
41
+ # the `debug2` function in this instance of the logger with a noop
42
+ # lambda to further speed up
43
+ self.__dict__["debug2"] = lambda message, *args, **kwargs: None
44
+
45
+ def __reduce__(self) -> Tuple[Any, ...]:
46
+ # This is needed because our parent's implementation could
47
+ # cause us to become a regular Logger on unpickling.
48
+ return get_extended_debug_logger, (self.name,)
49
+
50
+
51
+ def setup_DEBUG2_logging() -> None:
52
+ """
53
+ Installs the `DEBUG2` level logging levels to the main logging module.
54
+ """
55
+ if not hasattr(logging, "DEBUG2"):
56
+ logging.addLevelName(DEBUG2_LEVEL_NUM, "DEBUG2")
57
+ logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore [attr-defined]
58
+
59
+ @contextlib.contextmanager
60
+ def _use_logger_class(logger_class: Type[logging.Logger]) -> Iterator[None]:
61
+ original_logger_class = logging.getLoggerClass()
62
+ logging.setLoggerClass(logger_class)
63
+ try:
64
+ yield
65
+ finally:
66
+ logging.setLoggerClass(original_logger_class)
67
+
68
+
69
+ @overload
70
+ def get_logger(name: str, logger_class: Type[TLogger]) -> TLogger: ...
71
+ @overload
72
+ def get_logger(name: str, logger_class: None = None) -> logging.Logger: ...
73
+ def get_logger(name: str, logger_class: Union[Type[TLogger], None] = None) -> Union[TLogger, logging.Logger]:
74
+ if logger_class is None:
75
+ return logging.getLogger(name)
76
+
77
+ with _use_logger_class(logger_class):
78
+ # The logging module caches logger instances. The following code
79
+ # ensures that if there is a cached instance that we don't
80
+ # accidentally return the incorrect logger type because the logging
81
+ # module does not *update* the cached instance in the event that
82
+ # the global logging class changes.
83
+ manager = logging.Logger.manager
84
+ logger_dict = manager.loggerDict
85
+ cached_logger = logger_dict.get(name)
86
+ if cached_logger is not None and type(cached_logger) is not logger_class:
87
+ del logger_dict[name]
88
+ return cast(TLogger, logging.getLogger(name))
89
+
90
+
91
+ def get_extended_debug_logger(name: str) -> ExtendedDebugLogger:
92
+ return get_logger(name, ExtendedDebugLogger)
93
+
94
+
95
+ THasLoggerMeta = TypeVar("THasLoggerMeta", bound="HasLoggerMeta")
96
+
97
+
98
+ class HasLoggerMeta(type):
99
+ """
100
+ Assigns a logger instance to a class, derived from the import path and name.
101
+
102
+ This metaclass uses `__qualname__` to identify a unique and meaningful name
103
+ to use when creating the associated logger for a given class.
104
+ """
105
+
106
+ logger_class = logging.Logger
107
+
108
+ def __new__(
109
+ mcls: Type[THasLoggerMeta],
110
+ name: str,
111
+ bases: Tuple[Type[Any]],
112
+ namespace: Dict[str, Any],
113
+ ) -> THasLoggerMeta:
114
+ if "logger" in namespace:
115
+ # If a logger was explicitly declared we shouldn't do anything to
116
+ # replace it.
117
+ return super().__new__(mcls, name, bases, namespace)
118
+ if "__qualname__" not in namespace:
119
+ raise AttributeError("Missing __qualname__")
120
+
121
+ logger = get_logger(namespace["__qualname__"], mcls.logger_class)
122
+
123
+ return super().__new__(mcls, name, bases, assoc(namespace, "logger", logger))
124
+
125
+ @classmethod
126
+ def replace_logger_class(
127
+ mcls: Type[THasLoggerMeta], value: Type[logging.Logger]
128
+ ) -> Type[THasLoggerMeta]:
129
+ return type(mcls.__name__, (mcls,), {"logger_class": value})
130
+
131
+ @classmethod
132
+ def meta_compat(
133
+ mcls: Type[THasLoggerMeta], other: Type[type]
134
+ ) -> Type[THasLoggerMeta]:
135
+ return type(mcls.__name__, (mcls, other), {})
136
+
137
+
138
+ class HasLogger(metaclass=HasLoggerMeta):
139
+ logger: logging.Logger
140
+
141
+
142
+ HasExtendedDebugLoggerMeta = HasLoggerMeta.replace_logger_class(ExtendedDebugLogger)
143
+
144
+
145
+ class HasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore [metaclass,misc]
146
+ logger: ExtendedDebugLogger
@@ -0,0 +1,31 @@
1
+ from importlib import (
2
+ import_module,
3
+ )
4
+ from typing import (
5
+ Any,
6
+ )
7
+
8
+
9
+ def import_string(dotted_path: str) -> Any:
10
+ """
11
+ Import a variable using its path and name.
12
+
13
+ :param dotted_path: dotted module path and variable/class name
14
+ :return: the attribute/class designated by the last name in the path
15
+ :raise: ImportError, if the import failed
16
+
17
+ Source: django.utils.module_loading
18
+ """
19
+ try:
20
+ module_path, class_name = dotted_path.rsplit(".", 1)
21
+ except ValueError:
22
+ msg = f"{dotted_path} doesn't look like a module path"
23
+ raise ImportError(msg)
24
+
25
+ module = import_module(module_path)
26
+
27
+ try:
28
+ return getattr(module, class_name)
29
+ except AttributeError:
30
+ msg = f'Module "{module_path}" does not define a "{class_name}" attribute/class'
31
+ raise ImportError(msg)
Binary file
@@ -0,0 +1,92 @@
1
+ from dataclasses import (
2
+ dataclass,
3
+ )
4
+ import json
5
+ import os
6
+ import sys
7
+ from pathlib import (
8
+ Path,
9
+ )
10
+ from typing import (
11
+ Final,
12
+ List,
13
+ )
14
+
15
+ from eth_typing import (
16
+ ChainId,
17
+ )
18
+
19
+ from faster_eth_utils import (
20
+ ValidationError,
21
+ )
22
+
23
+
24
+ FASTER_ETH_UTILS_FOLDER: Final = Path(sys.modules["faster_eth_utils"].__file__).parent # type: ignore [arg-type]
25
+
26
+
27
+ @dataclass
28
+ class Network:
29
+ chain_id: int
30
+ name: str
31
+ shortName: str
32
+ symbol: ChainId
33
+
34
+
35
+ def initialize_network_objects() -> List[Network]:
36
+ networks_obj = []
37
+
38
+ networks_json_path = os.path.abspath(
39
+ os.path.join(str(FASTER_ETH_UTILS_FOLDER), "__json")
40
+ )
41
+ with open(
42
+ os.path.join(networks_json_path, "eth_networks.json"),
43
+ encoding="UTF-8",
44
+ ) as open_file:
45
+ network_data = json.load(open_file)
46
+
47
+ for entry in network_data:
48
+ try:
49
+ network = Network(
50
+ chain_id=entry["chainId"],
51
+ name=entry["name"],
52
+ shortName=entry["shortName"],
53
+ symbol=ChainId(entry["chainId"]),
54
+ )
55
+ networks_obj.append(network)
56
+ except ValueError:
57
+ # Chain does not have a valid ChainId, network files in eth-utils and
58
+ # eth-typing should to be updated. Run `python update_networks.py` in the
59
+ # project root.
60
+ pass
61
+
62
+ return networks_obj
63
+
64
+
65
+ networks = initialize_network_objects()
66
+
67
+ networks_by_id = {network.chain_id: network for network in networks}
68
+ network_names_by_id = {network.chain_id: network.name for network in networks}
69
+ network_short_names_by_id = {
70
+ network.chain_id: network.shortName for network in networks
71
+ }
72
+
73
+
74
+ def network_from_chain_id(chain_id: int) -> Network:
75
+ try:
76
+ return networks_by_id[chain_id]
77
+ except KeyError:
78
+ raise ValidationError(f"chain_id is not recognized: {chain_id}")
79
+
80
+
81
+ def name_from_chain_id(chain_id: int) -> str:
82
+ try:
83
+ return network_names_by_id[chain_id]
84
+ except KeyError:
85
+ raise ValidationError(f"chain_id is not recognized: {chain_id}")
86
+
87
+
88
+ def short_name_from_chain_id(chain_id: int) -> str:
89
+ try:
90
+ return network_short_names_by_id[chain_id]
91
+ except KeyError:
92
+ raise ValidationError(f"chain_id is not recognized: {chain_id}")
Binary file
@@ -0,0 +1,43 @@
1
+ from abc import (
2
+ ABC,
3
+ abstractmethod,
4
+ )
5
+ import decimal
6
+ import numbers
7
+ from typing import (
8
+ Any,
9
+ TypeVar,
10
+ Union,
11
+ )
12
+
13
+
14
+ class Comparable(ABC):
15
+ @abstractmethod
16
+ def __lt__(self, other: Any) -> bool:
17
+ ...
18
+
19
+ @abstractmethod
20
+ def __gt__(self, other: Any) -> bool:
21
+ ...
22
+
23
+
24
+ TComparable = Union[Comparable, numbers.Real, int, float, decimal.Decimal]
25
+
26
+
27
+ TValue = TypeVar("TValue", bound=TComparable)
28
+
29
+
30
+ def clamp(lower_bound: TValue, upper_bound: TValue, value: TValue) -> TValue:
31
+ # The `mypy` ignore statements here are due to doing a comparison of
32
+ # `Union` types which isn't allowed. (per cburgdorf). This approach was
33
+ # chosen over using `typing.overload` to define multiple signatures for
34
+ # each comparison type here since the added value of "proper" typing
35
+ # doesn't seem to justify the complexity of having a bunch of different
36
+ # signatures defined. The external library perspective on this function
37
+ # should still be adequate under this approach
38
+ if value < lower_bound: # type: ignore
39
+ return lower_bound
40
+ elif value > upper_bound: # type: ignore
41
+ return upper_bound
42
+ else:
43
+ return value
File without changes
@@ -0,0 +1,101 @@
1
+ from typing import (
2
+ Any,
3
+ Dict,
4
+ Type,
5
+ )
6
+
7
+ from pydantic import (
8
+ BaseModel,
9
+ ConfigDict,
10
+ )
11
+ from pydantic._internal._core_utils import (
12
+ CoreSchemaField,
13
+ )
14
+ from pydantic.alias_generators import (
15
+ to_camel,
16
+ )
17
+ from pydantic.json_schema import (
18
+ DEFAULT_REF_TEMPLATE,
19
+ GenerateJsonSchema,
20
+ JsonSchemaMode,
21
+ )
22
+
23
+
24
+ class OmitJsonSchema(GenerateJsonSchema):
25
+ """
26
+ Custom JSON schema generator that omits the schema generation for fields that are
27
+ invalid. Excluded fields (``Field(exclude=True)``) are generally useful as
28
+ properties of the model but are not meant to be serialized to JSON.
29
+ """
30
+
31
+ def field_is_present(self, field: CoreSchemaField) -> bool:
32
+ # override ``field_is_present`` and omit excluded fields from the schema
33
+ if field.get("serialization_exclude", False):
34
+ return False
35
+ return super().field_is_present(field)
36
+
37
+
38
+ class CamelModel(BaseModel):
39
+ """
40
+ Camel-case pydantic model. This model is used to ensure serialization in a
41
+ consistent manner, aliasing as camelCase serialization. This is useful for models
42
+ that are used in JSON-RPC requests and responses, marking useful fields for the
43
+ model, but that are not part of the JSON-RPC object, with ``Field(exclude=True)``.
44
+ To serialize a model to the expected JSON-RPC format, or camelCase, use
45
+ ``model_dump(by_alias=True)``.
46
+
47
+ .. code-block:: python
48
+
49
+ >>> from eth_utils.pydantic import CamelModel
50
+ >>> from pydantic import Field
51
+
52
+ >>> class SignedSetCodeAuthorization(CamelModel):
53
+ ... chain_id: int
54
+ ... address: bytes
55
+ ... nonce: int
56
+ ...
57
+ ... # useful fields for the object but excluded from serialization
58
+ ... # (not part of the JSON-RPC object)
59
+ ... authorization_hash: bytes = Field(exclude=True)
60
+ ... signature: bytes = Field(exclude=True)
61
+
62
+ >>> auth = SignedSetCodeAuthorization(
63
+ ... chain_id=1,
64
+ ... address=b"0x0000000000000000000000000000000000000000",
65
+ ... nonce=0,
66
+ ... authorization_hash=generated_hash,
67
+ ... signature=generated_signature,
68
+ ... )
69
+ >>> auth.model_dump(by_alias=True)
70
+ {'chainId': 1, 'address': '0x000000000000000000000000000000000000', 'nonce': 0}
71
+ """
72
+
73
+ model_config = ConfigDict(
74
+ arbitrary_types_allowed=True,
75
+ # populate by snake_case (python) args
76
+ populate_by_name=True,
77
+ # serialize by camelCase (json-rpc) keys
78
+ alias_generator=to_camel,
79
+ # validate default values
80
+ validate_default=True,
81
+ )
82
+
83
+ @classmethod
84
+ def model_json_schema( # type: ignore [override]
85
+ cls,
86
+ by_alias: bool = True,
87
+ ref_template: str = DEFAULT_REF_TEMPLATE,
88
+ # default to ``OmitJsonSchema`` to prevent errors from excluded fields
89
+ schema_generator: Type[GenerateJsonSchema] = OmitJsonSchema,
90
+ mode: JsonSchemaMode = "validation",
91
+ ) -> Dict[str, Any]:
92
+ """
93
+ Omits excluded fields from the JSON schema, preventing errors that would
94
+ otherwise be raised by the default schema generator.
95
+ """
96
+ return super().model_json_schema(
97
+ by_alias=by_alias,
98
+ ref_template=ref_template,
99
+ schema_generator=schema_generator,
100
+ mode=mode,
101
+ )