faster-eth-utils 5.3.21__cp310-cp310-musllinux_1_2_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. faster_eth_utils/__init__.py +144 -0
  2. faster_eth_utils/__json/eth_networks.json +1 -0
  3. faster_eth_utils/__main__.py +5 -0
  4. faster_eth_utils/abi.cpython-310-x86_64-linux-gnu.so +0 -0
  5. faster_eth_utils/abi.py +868 -0
  6. faster_eth_utils/address.cpython-310-x86_64-linux-gnu.so +0 -0
  7. faster_eth_utils/address.py +138 -0
  8. faster_eth_utils/applicators.cpython-310-x86_64-linux-gnu.so +0 -0
  9. faster_eth_utils/applicators.py +196 -0
  10. faster_eth_utils/conversions.cpython-310-x86_64-linux-gnu.so +0 -0
  11. faster_eth_utils/conversions.py +189 -0
  12. faster_eth_utils/crypto.cpython-310-x86_64-linux-gnu.so +0 -0
  13. faster_eth_utils/crypto.py +16 -0
  14. faster_eth_utils/currency.cpython-310-x86_64-linux-gnu.so +0 -0
  15. faster_eth_utils/currency.py +144 -0
  16. faster_eth_utils/curried/__init__.py +297 -0
  17. faster_eth_utils/debug.cpython-310-x86_64-linux-gnu.so +0 -0
  18. faster_eth_utils/debug.py +20 -0
  19. faster_eth_utils/decorators.cpython-310-x86_64-linux-gnu.so +0 -0
  20. faster_eth_utils/decorators.py +115 -0
  21. faster_eth_utils/encoding.cpython-310-x86_64-linux-gnu.so +0 -0
  22. faster_eth_utils/encoding.py +6 -0
  23. faster_eth_utils/exceptions.cpython-310-x86_64-linux-gnu.so +0 -0
  24. faster_eth_utils/exceptions.py +11 -0
  25. faster_eth_utils/functional.cpython-310-x86_64-linux-gnu.so +0 -0
  26. faster_eth_utils/functional.py +87 -0
  27. faster_eth_utils/hexadecimal.cpython-310-x86_64-linux-gnu.so +0 -0
  28. faster_eth_utils/hexadecimal.py +76 -0
  29. faster_eth_utils/humanize.cpython-310-x86_64-linux-gnu.so +0 -0
  30. faster_eth_utils/humanize.py +201 -0
  31. faster_eth_utils/logging.py +152 -0
  32. faster_eth_utils/module_loading.cpython-310-x86_64-linux-gnu.so +0 -0
  33. faster_eth_utils/module_loading.py +31 -0
  34. faster_eth_utils/network.cpython-310-x86_64-linux-gnu.so +0 -0
  35. faster_eth_utils/network.py +91 -0
  36. faster_eth_utils/numeric.cpython-310-x86_64-linux-gnu.so +0 -0
  37. faster_eth_utils/numeric.py +43 -0
  38. faster_eth_utils/py.typed +0 -0
  39. faster_eth_utils/pydantic.py +103 -0
  40. faster_eth_utils/toolz.cpython-310-x86_64-linux-gnu.so +0 -0
  41. faster_eth_utils/toolz.py +84 -0
  42. faster_eth_utils/types.cpython-310-x86_64-linux-gnu.so +0 -0
  43. faster_eth_utils/types.py +65 -0
  44. faster_eth_utils/typing/__init__.py +18 -0
  45. faster_eth_utils/typing/misc.py +14 -0
  46. faster_eth_utils/units.cpython-310-x86_64-linux-gnu.so +0 -0
  47. faster_eth_utils/units.py +31 -0
  48. faster_eth_utils-5.3.21.dist-info/METADATA +192 -0
  49. faster_eth_utils-5.3.21.dist-info/RECORD +53 -0
  50. faster_eth_utils-5.3.21.dist-info/WHEEL +5 -0
  51. faster_eth_utils-5.3.21.dist-info/licenses/LICENSE +21 -0
  52. faster_eth_utils-5.3.21.dist-info/top_level.txt +3 -0
  53. faster_eth_utils__mypyc.cpython-310-x86_64-linux-gnu.so +0 -0
@@ -0,0 +1,201 @@
1
+ from collections.abc import (
2
+ Iterable,
3
+ Iterator,
4
+ )
5
+ from typing import (
6
+ Any,
7
+ Final,
8
+ )
9
+ from urllib import (
10
+ parse,
11
+ )
12
+
13
+ from eth_typing import (
14
+ URI,
15
+ Hash32,
16
+ )
17
+
18
+ from faster_eth_utils.currency import (
19
+ denoms,
20
+ from_wei,
21
+ )
22
+
23
+ from . import toolz
24
+
25
+
26
+ def humanize_seconds(seconds: float | int) -> str:
27
+ seconds_int = int(seconds)
28
+ if seconds_int == 0:
29
+ return "0s"
30
+
31
+ unit_values = _consume_leading_zero_units(_humanize_seconds(seconds_int))
32
+
33
+ return "".join(f"{amount}{unit}" for amount, unit in toolz.take(3, unit_values))
34
+
35
+
36
+ SECOND: Final = 1
37
+ MINUTE: Final = 60
38
+ HOUR: Final = 60 * 60
39
+ DAY: Final = 24 * HOUR
40
+ YEAR: Final = 365 * DAY
41
+ MONTH: Final = YEAR // 12
42
+ WEEK: Final = 7 * DAY
43
+
44
+
45
+ UNITS: Final = (
46
+ (YEAR, "y"),
47
+ (MONTH, "m"),
48
+ (WEEK, "w"),
49
+ (DAY, "d"),
50
+ (HOUR, "h"),
51
+ (MINUTE, "m"),
52
+ (SECOND, "s"),
53
+ )
54
+
55
+
56
+ def _consume_leading_zero_units(
57
+ units_iter: Iterator[tuple[int, str]]
58
+ ) -> Iterator[tuple[int, str]]:
59
+ for amount, unit in units_iter:
60
+ if amount == 0:
61
+ continue
62
+ else:
63
+ yield (amount, unit)
64
+ break
65
+
66
+ yield from units_iter
67
+
68
+
69
+ def _humanize_seconds(seconds: int) -> Iterator[tuple[int, str]]:
70
+ remainder = seconds
71
+
72
+ for duration, unit in UNITS:
73
+ if not remainder:
74
+ break
75
+
76
+ num = remainder // duration
77
+ yield num, unit
78
+
79
+ remainder %= duration
80
+
81
+
82
+ DISPLAY_HASH_CHARS: Final = 4
83
+
84
+
85
+ def humanize_bytes(value: bytes) -> str:
86
+ if len(value) <= DISPLAY_HASH_CHARS + 1:
87
+ return value.hex()
88
+ value_as_hex = value.hex()
89
+ head = value_as_hex[:DISPLAY_HASH_CHARS]
90
+ tail = value_as_hex[-1 * DISPLAY_HASH_CHARS :]
91
+ return f"{head}..{tail}"
92
+
93
+
94
+ def humanize_hexstr(value: str) -> str:
95
+ tail = value[-1 * DISPLAY_HASH_CHARS :]
96
+
97
+ if value[:2] == "0x":
98
+ if len(value[2:]) <= DISPLAY_HASH_CHARS * 2:
99
+ return value
100
+ head = value[2 : DISPLAY_HASH_CHARS + 2]
101
+ return f"0x{head}..{tail}"
102
+ else:
103
+ if len(value) <= DISPLAY_HASH_CHARS * 2:
104
+ return value
105
+ head = value[:DISPLAY_HASH_CHARS]
106
+ return f"{head}..{tail}"
107
+
108
+
109
+ def humanize_hash(value: Hash32) -> str:
110
+ return humanize_bytes(value)
111
+
112
+
113
+ def humanize_ipfs_uri(uri: URI) -> str:
114
+ if not is_ipfs_uri(uri):
115
+ raise TypeError(
116
+ f"{uri} does not look like a valid IPFS uri. Currently, "
117
+ "only CIDv0 hash schemes are supported."
118
+ )
119
+
120
+ parsed = parse.urlparse(uri)
121
+ ipfs_hash = parsed.netloc
122
+ head = ipfs_hash[:DISPLAY_HASH_CHARS]
123
+ tail = ipfs_hash[-1 * DISPLAY_HASH_CHARS :]
124
+ return f"ipfs://{head}..{tail}"
125
+
126
+
127
+ def is_ipfs_uri(value: Any) -> bool:
128
+ if not isinstance(value, str):
129
+ return False
130
+
131
+ parsed = parse.urlparse(value)
132
+ if parsed.scheme != "ipfs" or not parsed.netloc:
133
+ return False
134
+
135
+ return _is_CIDv0_ipfs_hash(parsed.netloc)
136
+
137
+
138
+ def _is_CIDv0_ipfs_hash(ipfs_hash: str) -> bool:
139
+ if ipfs_hash.startswith("Qm") and len(ipfs_hash) == 46:
140
+ return True
141
+ return False
142
+
143
+
144
+ def _find_breakpoints(values: tuple[int, ...]) -> Iterator[int]:
145
+ yield 0
146
+ for index, (left, right) in enumerate(toolz.sliding_window(2, values), 1):
147
+ if left + 1 == right:
148
+ continue
149
+ else:
150
+ yield index
151
+ yield len(values)
152
+
153
+
154
+ def _extract_integer_ranges(values: tuple[int, ...]) -> Iterator[tuple[int, int]]:
155
+ """
156
+ Return a tuple of consecutive ranges of integers.
157
+
158
+ :param values: a sequence of ordered integers
159
+
160
+ - fn(1, 2, 3) -> ((1, 3),)
161
+ - fn(1, 2, 3, 7, 8, 9) -> ((1, 3), (7, 9))
162
+ - fn(1, 7, 8, 9) -> ((1, 1), (7, 9))
163
+ """
164
+ for left, right in toolz.sliding_window(2, _find_breakpoints(values)):
165
+ chunk = values[left:right]
166
+ yield chunk[0], chunk[-1]
167
+
168
+
169
+ def _humanize_range(bounds: tuple[int, int]) -> str:
170
+ left, right = bounds
171
+ if left == right:
172
+ return str(left)
173
+ else:
174
+ return f"{left}-{right}"
175
+
176
+
177
+ def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
178
+ """
179
+ Return a concise, human-readable string representing a sequence of integers.
180
+
181
+ - fn((1, 2, 3)) -> '1-3'
182
+ - fn((1, 2, 3, 7, 8, 9)) -> '1-3|7-9'
183
+ - fn((1, 2, 3, 5, 7, 8, 9)) -> '1-3|5|7-9'
184
+ - fn((1, 7, 8, 9)) -> '1|7-9'
185
+ """
186
+ values = tuple(values_iter)
187
+ if not values:
188
+ return "(empty)"
189
+ else:
190
+ return "|".join(_humanize_range(range) for range in _extract_integer_ranges(values))
191
+
192
+
193
+ def humanize_wei(number: int) -> str:
194
+ if number >= denoms.finney:
195
+ unit = "ether"
196
+ elif number >= denoms.mwei:
197
+ unit = "gwei"
198
+ else:
199
+ unit = "wei"
200
+ amount = from_wei(number, unit)
201
+ return f"{str(amount)} {unit}"
@@ -0,0 +1,152 @@
1
+ import logging
2
+ from collections.abc import (
3
+ Iterator,
4
+ )
5
+ from contextlib import (
6
+ contextmanager,
7
+ )
8
+ from functools import (
9
+ cached_property,
10
+ )
11
+ from typing import (
12
+ Any,
13
+ Final,
14
+ TypeVar,
15
+ cast,
16
+ overload,
17
+ )
18
+
19
+ from .toolz import (
20
+ assoc,
21
+ )
22
+
23
+ DEBUG2_LEVEL_NUM = 8
24
+
25
+ TLogger = TypeVar("TLogger", bound=logging.Logger)
26
+
27
+ Logger: Final = logging.Logger
28
+ getLogger: Final = logging.getLogger
29
+ getLoggerClass: Final = logging.getLoggerClass
30
+ setLoggerClass: Final = logging.setLoggerClass
31
+
32
+
33
+ class ExtendedDebugLogger(logging.Logger):
34
+ """
35
+ Logging class that can be used for lower level debug logging.
36
+ """
37
+
38
+ @cached_property
39
+ def show_debug2(self) -> bool:
40
+ return self.isEnabledFor(DEBUG2_LEVEL_NUM)
41
+
42
+ def debug2(self, message: str, *args: Any, **kwargs: Any) -> None:
43
+ if self.show_debug2:
44
+ self.log(DEBUG2_LEVEL_NUM, message, *args, **kwargs)
45
+ else:
46
+ # When we find that `DEBUG2` isn't enabled we completely replace
47
+ # the `debug2` function in this instance of the logger with a noop
48
+ # lambda to further speed up
49
+ self.__dict__["debug2"] = lambda message, *args, **kwargs: None
50
+
51
+ def __reduce__(self) -> tuple[Any, ...]:
52
+ # This is needed because our parent's implementation could
53
+ # cause us to become a regular Logger on unpickling.
54
+ return get_extended_debug_logger, (self.name,)
55
+
56
+
57
+ def setup_DEBUG2_logging() -> None:
58
+ """
59
+ Installs the `DEBUG2` level logging levels to the main logging module.
60
+ """
61
+ if not hasattr(logging, "DEBUG2"):
62
+ logging.addLevelName(DEBUG2_LEVEL_NUM, "DEBUG2")
63
+ logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore [attr-defined]
64
+
65
+ @contextmanager
66
+ def _use_logger_class(logger_class: type[logging.Logger]) -> Iterator[None]:
67
+ original_logger_class = getLoggerClass()
68
+ setLoggerClass(logger_class)
69
+ try:
70
+ yield
71
+ finally:
72
+ setLoggerClass(original_logger_class)
73
+
74
+
75
+ @overload
76
+ def get_logger(name: str, logger_class: type[TLogger]) -> TLogger: ...
77
+ @overload
78
+ def get_logger(name: str, logger_class: None = None) -> logging.Logger: ...
79
+ def get_logger(name: str, logger_class: type[TLogger] | None = None) -> TLogger | logging.Logger:
80
+ if logger_class is None:
81
+ return getLogger(name)
82
+
83
+ with _use_logger_class(logger_class):
84
+ # The logging module caches logger instances. The following code
85
+ # ensures that if there is a cached instance that we don't
86
+ # accidentally return the incorrect logger type because the logging
87
+ # module does not *update* the cached instance in the event that
88
+ # the global logging class changes.
89
+ manager = Logger.manager
90
+ logger_dict = manager.loggerDict
91
+ cached_logger = logger_dict.get(name)
92
+ if cached_logger is not None and type(cached_logger) is not logger_class:
93
+ del logger_dict[name]
94
+ return cast(TLogger, getLogger(name))
95
+
96
+
97
+ def get_extended_debug_logger(name: str) -> ExtendedDebugLogger:
98
+ return get_logger(name, ExtendedDebugLogger)
99
+
100
+
101
+ THasLoggerMeta = TypeVar("THasLoggerMeta", bound="HasLoggerMeta")
102
+
103
+
104
+ class HasLoggerMeta(type):
105
+ """
106
+ Assigns a logger instance to a class, derived from the import path and name.
107
+
108
+ This metaclass uses `__qualname__` to identify a unique and meaningful name
109
+ to use when creating the associated logger for a given class.
110
+ """
111
+
112
+ logger_class = Logger
113
+
114
+ def __new__(
115
+ mcls: type[THasLoggerMeta],
116
+ name: str,
117
+ bases: tuple[type[Any]],
118
+ namespace: dict[str, Any],
119
+ ) -> THasLoggerMeta:
120
+ if "logger" in namespace:
121
+ # If a logger was explicitly declared we shouldn't do anything to
122
+ # replace it.
123
+ return super().__new__(mcls, name, bases, namespace)
124
+ if "__qualname__" not in namespace:
125
+ raise AttributeError("Missing __qualname__")
126
+
127
+ logger = get_logger(namespace["__qualname__"], mcls.logger_class)
128
+
129
+ return super().__new__(mcls, name, bases, assoc(namespace, "logger", logger))
130
+
131
+ @classmethod
132
+ def replace_logger_class(
133
+ mcls: type[THasLoggerMeta], value: type[logging.Logger]
134
+ ) -> type[THasLoggerMeta]:
135
+ return type(mcls.__name__, (mcls,), {"logger_class": value})
136
+
137
+ @classmethod
138
+ def meta_compat(
139
+ mcls: type[THasLoggerMeta], other: type[type]
140
+ ) -> type[THasLoggerMeta]:
141
+ return type(mcls.__name__, (mcls, other), {})
142
+
143
+
144
+ class HasLogger(metaclass=HasLoggerMeta):
145
+ logger: logging.Logger
146
+
147
+
148
+ HasExtendedDebugLoggerMeta = HasLoggerMeta.replace_logger_class(ExtendedDebugLogger)
149
+
150
+
151
+ class HasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore[metaclass]
152
+ logger: ExtendedDebugLogger
@@ -0,0 +1,31 @@
1
+ from importlib import (
2
+ import_module,
3
+ )
4
+ from typing import (
5
+ Any,
6
+ )
7
+
8
+
9
+ def import_string(dotted_path: str) -> Any:
10
+ """
11
+ Import a variable using its path and name.
12
+
13
+ :param dotted_path: dotted module path and variable/class name
14
+ :return: the attribute/class designated by the last name in the path
15
+ :raise: ImportError, if the import failed
16
+
17
+ Source: django.utils.module_loading
18
+ """
19
+ try:
20
+ module_path, class_name = dotted_path.rsplit(".", 1)
21
+ except ValueError:
22
+ msg = f"{dotted_path} doesn't look like a module path"
23
+ raise ImportError(msg)
24
+
25
+ module = import_module(module_path)
26
+
27
+ try:
28
+ return getattr(module, class_name)
29
+ except AttributeError:
30
+ msg = f'Module "{module_path}" does not define a "{class_name}" attribute/class'
31
+ raise ImportError(msg)
@@ -0,0 +1,91 @@
1
+ from dataclasses import (
2
+ dataclass,
3
+ )
4
+ import json
5
+ import os
6
+ import sys
7
+ from pathlib import (
8
+ Path,
9
+ )
10
+ from typing import (
11
+ Final,
12
+ )
13
+
14
+ from eth_typing import (
15
+ ChainId,
16
+ )
17
+
18
+ from faster_eth_utils.exceptions import (
19
+ ValidationError,
20
+ )
21
+
22
+
23
+ FASTER_ETH_UTILS_FOLDER: Final = Path(sys.modules["faster_eth_utils"].__file__).parent # type: ignore [arg-type]
24
+
25
+
26
+ @dataclass
27
+ class Network:
28
+ chain_id: int
29
+ name: str
30
+ shortName: str
31
+ symbol: ChainId
32
+
33
+
34
+ def initialize_network_objects() -> list[Network]:
35
+ networks_obj = []
36
+
37
+ networks_json_path = os.path.abspath(
38
+ os.path.join(str(FASTER_ETH_UTILS_FOLDER), "__json")
39
+ )
40
+ with open(
41
+ os.path.join(networks_json_path, "eth_networks.json"),
42
+ encoding="UTF-8",
43
+ ) as open_file:
44
+ network_data = json.load(open_file)
45
+
46
+ for entry in network_data:
47
+ try:
48
+ network = Network(
49
+ chain_id=entry["chainId"],
50
+ name=entry["name"],
51
+ shortName=entry["shortName"],
52
+ symbol=ChainId(entry["chainId"]),
53
+ )
54
+ networks_obj.append(network)
55
+ except ValueError:
56
+ # Chain does not have a valid ChainId, network files in eth-utils and
57
+ # eth-typing should to be updated. Run `python update_networks.py` in the
58
+ # project root.
59
+ pass
60
+
61
+ return networks_obj
62
+
63
+
64
+ networks = initialize_network_objects()
65
+
66
+ networks_by_id = {network.chain_id: network for network in networks}
67
+ network_names_by_id = {network.chain_id: network.name for network in networks}
68
+ network_short_names_by_id = {
69
+ network.chain_id: network.shortName for network in networks
70
+ }
71
+
72
+
73
+ def network_from_chain_id(chain_id: int) -> Network:
74
+ try:
75
+ return networks_by_id[chain_id]
76
+ except KeyError:
77
+ raise ValidationError(f"chain_id is not recognized: {chain_id}")
78
+
79
+
80
+ def name_from_chain_id(chain_id: int) -> str:
81
+ try:
82
+ return network_names_by_id[chain_id]
83
+ except KeyError:
84
+ raise ValidationError(f"chain_id is not recognized: {chain_id}")
85
+
86
+
87
+ def short_name_from_chain_id(chain_id: int) -> str:
88
+ try:
89
+ return network_short_names_by_id[chain_id]
90
+ except KeyError:
91
+ raise ValidationError(f"chain_id is not recognized: {chain_id}")
@@ -0,0 +1,43 @@
1
+ from abc import (
2
+ ABC,
3
+ abstractmethod,
4
+ )
5
+ import decimal
6
+ import numbers
7
+ from typing import (
8
+ Any,
9
+ TypeVar,
10
+ Union,
11
+ )
12
+
13
+
14
+ class Comparable(ABC):
15
+ @abstractmethod
16
+ def __lt__(self, other: Any) -> bool:
17
+ ...
18
+
19
+ @abstractmethod
20
+ def __gt__(self, other: Any) -> bool:
21
+ ...
22
+
23
+
24
+ TComparable = Union[Comparable, numbers.Real, int, float, decimal.Decimal]
25
+
26
+
27
+ TValue = TypeVar("TValue", bound=TComparable)
28
+
29
+
30
+ def clamp(lower_bound: TValue, upper_bound: TValue, value: TValue) -> TValue:
31
+ # The `mypy` ignore statements here are due to doing a comparison of
32
+ # `Union` types which isn't allowed. (per cburgdorf). This approach was
33
+ # chosen over using `typing.overload` to define multiple signatures for
34
+ # each comparison type here since the added value of "proper" typing
35
+ # doesn't seem to justify the complexity of having a bunch of different
36
+ # signatures defined. The external library perspective on this function
37
+ # should still be adequate under this approach
38
+ if value < lower_bound: # type: ignore
39
+ return lower_bound
40
+ elif value > upper_bound: # type: ignore
41
+ return upper_bound
42
+ else:
43
+ return value
File without changes
@@ -0,0 +1,103 @@
1
+ from typing import (
2
+ Any,
3
+ cast,
4
+ )
5
+
6
+ from pydantic import (
7
+ BaseModel,
8
+ ConfigDict,
9
+ )
10
+ from pydantic._internal._core_utils import (
11
+ CoreSchemaField,
12
+ )
13
+ from pydantic.alias_generators import (
14
+ to_camel,
15
+ )
16
+ from pydantic.json_schema import (
17
+ DEFAULT_REF_TEMPLATE,
18
+ GenerateJsonSchema,
19
+ JsonSchemaMode,
20
+ )
21
+
22
+
23
+ class OmitJsonSchema(GenerateJsonSchema): # type: ignore[misc]
24
+ """
25
+ Custom JSON schema generator that omits the schema generation for fields that are
26
+ invalid. Excluded fields (``Field(exclude=True)``) are generally useful as
27
+ properties of the model but are not meant to be serialized to JSON.
28
+ """
29
+
30
+ def field_is_present(self, field: CoreSchemaField) -> bool:
31
+ # override ``field_is_present`` and omit excluded fields from the schema
32
+ if field.get("serialization_exclude", False):
33
+ return False
34
+ return cast(bool, super().field_is_present(field))
35
+
36
+
37
+ class CamelModel(BaseModel): # type: ignore[misc]
38
+ """
39
+ Camel-case pydantic model. This model is used to ensure serialization in a
40
+ consistent manner, aliasing as camelCase serialization. This is useful for models
41
+ that are used in JSON-RPC requests and responses, marking useful fields for the
42
+ model, but that are not part of the JSON-RPC object, with ``Field(exclude=True)``.
43
+ To serialize a model to the expected JSON-RPC format, or camelCase, use
44
+ ``model_dump(by_alias=True)``.
45
+
46
+ .. code-block:: python
47
+
48
+ >>> from eth_utils.pydantic import CamelModel
49
+ >>> from pydantic import Field
50
+
51
+ >>> class SignedSetCodeAuthorization(CamelModel):
52
+ ... chain_id: int
53
+ ... address: bytes
54
+ ... nonce: int
55
+ ...
56
+ ... # useful fields for the object but excluded from serialization
57
+ ... # (not part of the JSON-RPC object)
58
+ ... authorization_hash: bytes = Field(exclude=True)
59
+ ... signature: bytes = Field(exclude=True)
60
+
61
+ >>> auth = SignedSetCodeAuthorization(
62
+ ... chain_id=1,
63
+ ... address=b"0x0000000000000000000000000000000000000000",
64
+ ... nonce=0,
65
+ ... authorization_hash=generated_hash,
66
+ ... signature=generated_signature,
67
+ ... )
68
+ >>> auth.model_dump(by_alias=True)
69
+ {'chainId': 1, 'address': '0x000000000000000000000000000000000000', 'nonce': 0}
70
+ """
71
+
72
+ model_config = ConfigDict(
73
+ arbitrary_types_allowed=True,
74
+ # populate by snake_case (python) args
75
+ populate_by_name=True,
76
+ # serialize by camelCase (json-rpc) keys
77
+ alias_generator=to_camel,
78
+ # validate default values
79
+ validate_default=True,
80
+ )
81
+
82
+ @classmethod
83
+ def model_json_schema( # type: ignore [override]
84
+ cls,
85
+ by_alias: bool = True,
86
+ ref_template: str = DEFAULT_REF_TEMPLATE,
87
+ # default to ``OmitJsonSchema`` to prevent errors from excluded fields
88
+ schema_generator: type[GenerateJsonSchema] = OmitJsonSchema,
89
+ mode: JsonSchemaMode = "validation",
90
+ ) -> dict[str, Any]:
91
+ """
92
+ Omits excluded fields from the JSON schema, preventing errors that would
93
+ otherwise be raised by the default schema generator.
94
+ """
95
+ return cast(
96
+ dict[str, Any],
97
+ super().model_json_schema(
98
+ by_alias=by_alias,
99
+ ref_template=ref_template,
100
+ schema_generator=schema_generator,
101
+ mode=mode,
102
+ ),
103
+ )