faster-eth-abi 5.2.12__cp312-cp312-win_amd64.whl → 5.2.13__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of faster-eth-abi might be problematic. Click here for more details.

Files changed (35) hide show
  1. benchmarks/__init__.py +1 -0
  2. benchmarks/batch.py +9 -0
  3. benchmarks/data.py +313 -0
  4. benchmarks/test_abi_benchmarks.py +82 -0
  5. benchmarks/test_decoding_benchmarks.py +109 -0
  6. benchmarks/test_encoding_benchmarks.py +99 -0
  7. benchmarks/test_grammar_benchmarks.py +38 -0
  8. benchmarks/test_io_benchmarks.py +99 -0
  9. benchmarks/test_packed_benchmarks.py +41 -0
  10. benchmarks/test_registry_benchmarks.py +42 -0
  11. benchmarks/type_strings.py +26 -0
  12. faster_eth_abi/_codec.cp312-win_amd64.pyd +0 -0
  13. faster_eth_abi/_decoding.cp312-win_amd64.pyd +0 -0
  14. faster_eth_abi/_decoding.py +136 -5
  15. faster_eth_abi/_encoding.cp312-win_amd64.pyd +0 -0
  16. faster_eth_abi/_grammar.cp312-win_amd64.pyd +0 -0
  17. faster_eth_abi/abi.cp312-win_amd64.pyd +0 -0
  18. faster_eth_abi/constants.cp312-win_amd64.pyd +0 -0
  19. faster_eth_abi/decoding.py +66 -80
  20. faster_eth_abi/from_type_str.cp312-win_amd64.pyd +0 -0
  21. faster_eth_abi/packed.cp312-win_amd64.pyd +0 -0
  22. faster_eth_abi/tools/__init__.cp312-win_amd64.pyd +0 -0
  23. faster_eth_abi/tools/_strategies.cp312-win_amd64.pyd +0 -0
  24. faster_eth_abi/utils/__init__.cp312-win_amd64.pyd +0 -0
  25. faster_eth_abi/utils/numeric.cp312-win_amd64.pyd +0 -0
  26. faster_eth_abi/utils/padding.cp312-win_amd64.pyd +0 -0
  27. faster_eth_abi/utils/string.cp312-win_amd64.pyd +0 -0
  28. faster_eth_abi/utils/validation.cp312-win_amd64.pyd +0 -0
  29. {faster_eth_abi-5.2.12.dist-info → faster_eth_abi-5.2.13.dist-info}/METADATA +14 -2
  30. faster_eth_abi-5.2.13.dist-info/RECORD +57 -0
  31. {faster_eth_abi-5.2.12.dist-info → faster_eth_abi-5.2.13.dist-info}/top_level.txt +1 -0
  32. faster_eth_abi__mypyc.cp312-win_amd64.pyd +0 -0
  33. faster_eth_abi-5.2.12.dist-info/RECORD +0 -46
  34. {faster_eth_abi-5.2.12.dist-info → faster_eth_abi-5.2.13.dist-info}/WHEEL +0 -0
  35. {faster_eth_abi-5.2.12.dist-info → faster_eth_abi-5.2.13.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,99 @@
1
+ """
2
+ Benchmarks for faster_eth_abi.decoding.ContextFramesBytesIO
3
+
4
+ This file benchmarks the performance of ContextFramesBytesIO, a subclass of BytesIO
5
+ that supports contextual frame management for nested ABI decoding.
6
+ """
7
+
8
+ import pytest
9
+
10
+ import eth_abi.decoding
11
+ from pytest_codspeed import (
12
+ BenchmarkFixture,
13
+ )
14
+
15
+ from benchmarks.batch import (
16
+ batch,
17
+ )
18
+ import faster_eth_abi.decoding
19
+
20
+ # Test parameters
21
+ BUFFER_SIZES = [0, 32, 1024, 4096, 65536]
22
+ FRAME_DEPTHS = [1, 5, 10, 50]
23
+
24
+
25
+ @pytest.mark.benchmark(group="ContextFramesBytesIO-init")
26
+ @pytest.mark.parametrize("size", BUFFER_SIZES)
27
+ def test_contextframesbytesio_init(benchmark: BenchmarkFixture, size):
28
+ data = b"\x01" * size
29
+ benchmark(batch, 1000, eth_abi.decoding.ContextFramesBytesIO, data)
30
+
31
+
32
+ @pytest.mark.benchmark(group="ContextFramesBytesIO-init")
33
+ @pytest.mark.parametrize("size", BUFFER_SIZES)
34
+ def test_faster_contextframesbytesio_init(benchmark: BenchmarkFixture, size):
35
+ data = b"\x01" * size
36
+ benchmark(batch, 1000, faster_eth_abi.decoding.ContextFramesBytesIO, data)
37
+
38
+
39
+ @pytest.mark.benchmark(group="ContextFramesBytesIO-push-pop")
40
+ @pytest.mark.parametrize("depth", FRAME_DEPTHS)
41
+ def test_contextframesbytesio_push_pop(benchmark: BenchmarkFixture, depth):
42
+ data = b"\x01" * 1024
43
+ stream = eth_abi.decoding.ContextFramesBytesIO(data)
44
+
45
+ def push_pop():
46
+ for i in range(depth):
47
+ stream.push_frame(i * 10)
48
+ for _ in range(depth):
49
+ stream.pop_frame()
50
+
51
+ benchmark(batch, 100, push_pop)
52
+
53
+
54
+ @pytest.mark.benchmark(group="ContextFramesBytesIO-push-pop")
55
+ @pytest.mark.parametrize("depth", FRAME_DEPTHS)
56
+ def test_faster_contextframesbytesio_push_pop(benchmark: BenchmarkFixture, depth):
57
+ data = b"\x01" * 1024
58
+ stream = faster_eth_abi.decoding.ContextFramesBytesIO(data)
59
+ ints = list(range(depth))
60
+
61
+ def push_pop():
62
+ for i in ints:
63
+ stream.push_frame(i * 10)
64
+ for _ in ints:
65
+ stream.pop_frame()
66
+
67
+ benchmark(batch, 100, push_pop)
68
+
69
+
70
+ @pytest.mark.benchmark(group="ContextFramesBytesIO-seek-in-frame")
71
+ @pytest.mark.parametrize("depth", FRAME_DEPTHS)
72
+ def test_contextframesbytesio_seek_in_frame(benchmark: BenchmarkFixture, depth):
73
+ data = b"\x01" * 1024
74
+ stream = eth_abi.decoding.ContextFramesBytesIO(data)
75
+ # Set up the frame stack before timing
76
+ for i in range(depth):
77
+ stream.push_frame(i * 10)
78
+
79
+ def seek_in_frame_ops():
80
+ for i in range(depth):
81
+ stream.seek_in_frame(i)
82
+
83
+ benchmark(batch, 100, seek_in_frame_ops)
84
+
85
+
86
+ @pytest.mark.benchmark(group="ContextFramesBytesIO-seek-in-frame")
87
+ @pytest.mark.parametrize("depth", FRAME_DEPTHS)
88
+ def test_faster_contextframesbytesio_seek_in_frame(benchmark: BenchmarkFixture, depth):
89
+ data = b"\x01" * 1024
90
+ stream = faster_eth_abi.decoding.ContextFramesBytesIO(data)
91
+ # Set up the frame stack before timing
92
+ for i in range(depth):
93
+ stream.push_frame(i * 10)
94
+
95
+ def seek_in_frame_ops():
96
+ for i in range(depth):
97
+ stream.seek_in_frame(i)
98
+
99
+ benchmark(batch, 100, seek_in_frame_ops)
@@ -0,0 +1,41 @@
1
+ import pytest
2
+
3
+ import eth_abi.packed
4
+ from pytest_codspeed import (
5
+ BenchmarkFixture,
6
+ )
7
+
8
+ from benchmarks.batch import (
9
+ batch,
10
+ )
11
+ from benchmarks.data import (
12
+ packed_cases,
13
+ packed_ids,
14
+ )
15
+ import faster_eth_abi.packed
16
+
17
+
18
+ # Packed encoding
19
+ @pytest.mark.benchmark(group="PackedEncoder")
20
+ @pytest.mark.parametrize("abi_type,value", packed_cases, ids=packed_ids)
21
+ def test_encode_packed(benchmark: BenchmarkFixture, abi_type, value):
22
+ benchmark(batch, 100, eth_abi.packed.encode_packed, [abi_type], [value])
23
+
24
+
25
+ @pytest.mark.benchmark(group="PackedEncoder")
26
+ @pytest.mark.parametrize("abi_type,value", packed_cases, ids=packed_ids)
27
+ def test_faster_encode_packed(benchmark: BenchmarkFixture, abi_type, value):
28
+ benchmark(batch, 100, faster_eth_abi.packed.encode_packed, [abi_type], [value])
29
+
30
+
31
+ # Packed is_encodable
32
+ @pytest.mark.benchmark(group="PackedIsEncodable")
33
+ @pytest.mark.parametrize("abi_type,value", packed_cases, ids=packed_ids)
34
+ def test_is_encodable_packed(benchmark: BenchmarkFixture, abi_type, value):
35
+ benchmark(batch, 100, eth_abi.packed.is_encodable_packed, abi_type, value)
36
+
37
+
38
+ @pytest.mark.benchmark(group="PackedIsEncodable")
39
+ @pytest.mark.parametrize("abi_type,value", packed_cases, ids=packed_ids)
40
+ def test_faster_is_encodable_packed(benchmark: BenchmarkFixture, abi_type, value):
41
+ benchmark(batch, 100, faster_eth_abi.packed.is_encodable_packed, abi_type, value)
@@ -0,0 +1,42 @@
1
+ import pytest
2
+
3
+ from eth_abi.registry import (
4
+ registry,
5
+ )
6
+ from pytest_codspeed import (
7
+ BenchmarkFixture,
8
+ )
9
+
10
+ from benchmarks.batch import (
11
+ batch,
12
+ )
13
+ from benchmarks.type_strings import (
14
+ type_strings,
15
+ )
16
+ from faster_eth_abi.registry import (
17
+ registry as faster_registry,
18
+ )
19
+
20
+
21
+ @pytest.mark.benchmark(group="RegistryGetEncoder")
22
+ @pytest.mark.parametrize("type_str", type_strings)
23
+ def test_get_encoder(benchmark: BenchmarkFixture, type_str):
24
+ benchmark(batch, 1000, registry.get_encoder, type_str)
25
+
26
+
27
+ @pytest.mark.benchmark(group="RegistryGetEncoder")
28
+ @pytest.mark.parametrize("type_str", type_strings)
29
+ def test_faster_get_encoder(benchmark: BenchmarkFixture, type_str):
30
+ benchmark(batch, 1000, faster_registry.get_encoder, type_str)
31
+
32
+
33
+ @pytest.mark.benchmark(group="RegistryGetDecoder")
34
+ @pytest.mark.parametrize("type_str", type_strings)
35
+ def test_get_decoder(benchmark: BenchmarkFixture, type_str):
36
+ benchmark(batch, 1000, registry.get_decoder, type_str)
37
+
38
+
39
+ @pytest.mark.benchmark(group="RegistryGetDecoder")
40
+ @pytest.mark.parametrize("type_str", type_strings)
41
+ def test_faster_get_decoder(benchmark: BenchmarkFixture, type_str):
42
+ benchmark(batch, 1000, faster_registry.get_decoder, type_str)
@@ -0,0 +1,26 @@
1
+ # Shared list of all ABI type strings used in benchmarks
2
+
3
+ type_strings = [
4
+ "uint256",
5
+ "int8",
6
+ "address",
7
+ "bytes32",
8
+ "string",
9
+ "bool",
10
+ "uint256[2]",
11
+ "string[]",
12
+ "(uint256,bool)",
13
+ "(address,uint8)",
14
+ "(string,bytes)",
15
+ "(uint256[2],string)",
16
+ "(uint8,(bool,string))",
17
+ "((uint8,uint8),uint8)",
18
+ "(uint8[2],(string,bool[2]))",
19
+ "(uint256[],(string[],bool))",
20
+ "((uint8[2],(string,bool)),bytes32)",
21
+ "(uint8[2][2],(string[2],bool[2]))",
22
+ "uint8[]",
23
+ "bytes",
24
+ "fixed128x18",
25
+ "ufixed128x18",
26
+ ]
Binary file
@@ -1,6 +1,8 @@
1
1
  from typing import (
2
2
  TYPE_CHECKING,
3
3
  Any,
4
+ Dict,
5
+ Final,
4
6
  Tuple,
5
7
  )
6
8
 
@@ -10,6 +12,7 @@ from faster_eth_utils import (
10
12
 
11
13
  from faster_eth_abi.exceptions import (
12
14
  InsufficientDataBytes,
15
+ InvalidPointer,
13
16
  NonEmptyPaddingBytes,
14
17
  )
15
18
  from faster_eth_abi.io import (
@@ -19,9 +22,11 @@ from faster_eth_abi.io import (
19
22
 
20
23
  if TYPE_CHECKING:
21
24
  from .decoding import (
25
+ BaseArrayDecoder,
22
26
  DynamicArrayDecoder,
23
27
  FixedByteSizeDecoder,
24
28
  HeadTailDecoder,
29
+ SignedIntegerDecoder,
25
30
  SizedArrayDecoder,
26
31
  TupleDecoder,
27
32
  )
@@ -66,8 +71,107 @@ def decode_head_tail(self: "HeadTailDecoder", stream: ContextFramesBytesIO) -> A
66
71
 
67
72
  # TupleDecoder
68
73
  def decode_tuple(self: "TupleDecoder", stream: ContextFramesBytesIO) -> Tuple[Any, ...]:
69
- self.validate_pointers(stream)
70
- return tuple(decoder(stream) for decoder in self.decoders)
74
+ # NOTE: the original implementation would do this but it's
75
+ # kinda wasteful, so we rebuilt the logic within this function
76
+ # validate_pointers_tuple(self, stream)
77
+
78
+ current_location = stream.tell()
79
+ if self._no_head_tail:
80
+ # TODO: if all(isinstance(d, TupleDecoder) for d in self._decoders)
81
+ # return tuple(decode_tuple(stream) for _ in range(len(self.decoders))
82
+ # and other types with compiled decode funcs
83
+ return tuple(decoder(stream) for decoder in self.decoders)
84
+
85
+ end_of_offsets = current_location + 32 * self.len_of_head
86
+ total_stream_length = len(stream.getbuffer())
87
+ items = []
88
+ for decoder, is_head_tail in zip(self.decoders, self._is_head_tail):
89
+ if is_head_tail:
90
+ # the next 32 bytes are a pointer that we should validate
91
+ # checkpoint the stream location so we can reset it after validation
92
+ step_location = stream.tell()
93
+
94
+ offset = decode_uint_256(stream)
95
+ indicated_idx = current_location + offset
96
+ if indicated_idx < end_of_offsets or indicated_idx >= total_stream_length:
97
+ # the pointer is indicating its data is located either within the
98
+ # offsets section of the stream or beyond the end of the stream,
99
+ # both of which are invalid
100
+ raise InvalidPointer(
101
+ "Invalid pointer in tuple at location "
102
+ f"{stream.tell() - 32} in payload"
103
+ )
104
+
105
+ # reset the stream so we can decode
106
+ stream.seek(step_location)
107
+
108
+ items.append(decoder(stream))
109
+
110
+ # return the stream to its original location for actual decoding
111
+ stream.seek(current_location)
112
+
113
+ return tuple(items)
114
+
115
+
116
+ def validate_pointers_tuple(
117
+ self: "TupleDecoder",
118
+ stream: ContextFramesBytesIO,
119
+ ) -> None:
120
+ """
121
+ Verify that all pointers point to a valid location in the stream.
122
+ """
123
+ current_location = stream.tell()
124
+ if self._no_head_tail:
125
+ for decoder in self.decoders:
126
+ decoder(stream)
127
+ else:
128
+ end_of_offsets = current_location + 32 * self.len_of_head
129
+ total_stream_length = len(stream.getbuffer())
130
+ for decoder, is_head_tail in zip(self.decoders, self._is_head_tail):
131
+ if not is_head_tail:
132
+ # the next 32 bytes are not a pointer, so progress the stream per the decoder
133
+ decoder(stream)
134
+ else:
135
+ # the next 32 bytes are a pointer
136
+ offset = decode_uint_256(stream)
137
+ indicated_idx = current_location + offset
138
+ if (
139
+ indicated_idx < end_of_offsets
140
+ or indicated_idx >= total_stream_length
141
+ ):
142
+ # the pointer is indicating its data is located either within the
143
+ # offsets section of the stream or beyond the end of the stream,
144
+ # both of which are invalid
145
+ raise InvalidPointer(
146
+ "Invalid pointer in tuple at location "
147
+ f"{stream.tell() - 32} in payload"
148
+ )
149
+ # return the stream to its original location for actual decoding
150
+ stream.seek(current_location)
151
+
152
+
153
+ # BaseArrayDecoder
154
+ def validate_pointers_array(
155
+ self: "BaseArrayDecoder", stream: ContextFramesBytesIO, array_size: int
156
+ ) -> None:
157
+ """
158
+ Verify that all pointers point to a valid location in the stream.
159
+ """
160
+ current_location = stream.tell()
161
+ end_of_offsets = current_location + 32 * array_size
162
+ total_stream_length = len(stream.getbuffer())
163
+ for _ in range(array_size):
164
+ offset = decode_uint_256(stream)
165
+ indicated_idx = current_location + offset
166
+ if indicated_idx < end_of_offsets or indicated_idx >= total_stream_length:
167
+ # the pointer is indicating its data is located either within the
168
+ # offsets section of the stream or beyond the end of the stream,
169
+ # both of which are invalid
170
+ raise InvalidPointer(
171
+ "Invalid pointer in array at location "
172
+ f"{stream.tell() - 32} in payload"
173
+ )
174
+ stream.seek(current_location)
71
175
 
72
176
 
73
177
  # SizedArrayDecoder
@@ -139,10 +243,37 @@ def validate_padding_bytes_fixed_byte_size(
139
243
  value: Any,
140
244
  padding_bytes: bytes,
141
245
  ) -> None:
142
- value_byte_size = get_value_byte_size(self)
143
- padding_size = self.data_byte_size - value_byte_size
246
+ if padding_bytes != get_expected_padding_bytes(self, b"\x00"):
247
+ raise NonEmptyPaddingBytes(f"Padding bytes were not empty: {padding_bytes!r}")
248
+
249
+
250
+ _expected_padding_bytes_cache: Final[
251
+ Dict["FixedByteSizeDecoder", Dict[bytes, bytes]]
252
+ ] = {}
253
+
254
+
255
+ def get_expected_padding_bytes(self: "FixedByteSizeDecoder", chunk: bytes) -> bytes:
256
+ instance_cache = _expected_padding_bytes_cache.setdefault(self, {})
257
+ expected_padding_bytes = instance_cache.get(chunk)
258
+ if expected_padding_bytes is None:
259
+ value_byte_size = get_value_byte_size(self)
260
+ padding_size = self.data_byte_size - value_byte_size
261
+ expected_padding_bytes = chunk * padding_size
262
+ instance_cache[chunk] = expected_padding_bytes
263
+ return expected_padding_bytes
264
+
265
+
266
+ def validate_padding_bytes_signed_integer(
267
+ self: "SignedIntegerDecoder",
268
+ value: int,
269
+ padding_bytes: bytes,
270
+ ) -> None:
271
+ if value >= 0:
272
+ expected_padding_bytes = get_expected_padding_bytes(self, b"\x00")
273
+ else:
274
+ expected_padding_bytes = get_expected_padding_bytes(self, b"\xff")
144
275
 
145
- if padding_bytes != b"\x00" * padding_size:
276
+ if padding_bytes != expected_padding_bytes:
146
277
  raise NonEmptyPaddingBytes(f"Padding bytes were not empty: {padding_bytes!r}")
147
278
 
148
279
 
Binary file
Binary file
@@ -1,5 +1,11 @@
1
1
  import abc
2
2
  import decimal
3
+ from functools import (
4
+ cached_property,
5
+ )
6
+ from types import (
7
+ MethodType,
8
+ )
3
9
  from typing import (
4
10
  Any,
5
11
  Callable,
@@ -7,6 +13,7 @@ from typing import (
7
13
  Optional,
8
14
  Tuple,
9
15
  Union,
16
+ final,
10
17
  )
11
18
 
12
19
  from faster_eth_utils import (
@@ -24,13 +31,14 @@ from faster_eth_abi._decoding import (
24
31
  read_fixed_byte_size_data_from_stream,
25
32
  split_data_and_padding_fixed_byte_size,
26
33
  validate_padding_bytes_fixed_byte_size,
34
+ validate_padding_bytes_signed_integer,
35
+ validate_pointers_array,
27
36
  )
28
37
  from faster_eth_abi.base import (
29
38
  BaseCoder,
30
39
  )
31
40
  from faster_eth_abi.exceptions import (
32
41
  InsufficientDataBytes,
33
- InvalidPointer,
34
42
  NonEmptyPaddingBytes,
35
43
  )
36
44
  from faster_eth_abi.from_type_str import (
@@ -111,6 +119,10 @@ class TupleDecoder(BaseDecoder):
111
119
  self.len_of_head = sum(
112
120
  getattr(decoder, "array_size", 1) for decoder in decoders
113
121
  )
122
+ self._is_head_tail = tuple(
123
+ isinstance(decoder, HeadTailDecoder) for decoder in decoders
124
+ )
125
+ self._no_head_tail = not any(self._is_head_tail)
114
126
 
115
127
  def validate(self) -> None:
116
128
  super().validate()
@@ -118,35 +130,9 @@ class TupleDecoder(BaseDecoder):
118
130
  if self.decoders is None:
119
131
  raise ValueError("No `decoders` set")
120
132
 
133
+ @final
121
134
  def validate_pointers(self, stream: ContextFramesBytesIO) -> None:
122
- """
123
- Verify that all pointers point to a valid location in the stream.
124
- """
125
- current_location = stream.tell()
126
- end_of_offsets = current_location + 32 * self.len_of_head
127
- total_stream_length = len(stream.getbuffer())
128
- for decoder in self.decoders:
129
- if isinstance(decoder, HeadTailDecoder):
130
- # the next 32 bytes are a pointer
131
- offset = decode_uint_256(stream)
132
- indicated_idx = current_location + offset
133
- if (
134
- indicated_idx < end_of_offsets
135
- or indicated_idx >= total_stream_length
136
- ):
137
- # the pointer is indicating its data is located either within the
138
- # offsets section of the stream or beyond the end of the stream,
139
- # both of which are invalid
140
- raise InvalidPointer(
141
- "Invalid pointer in tuple at location "
142
- f"{stream.tell() - 32} in payload"
143
- )
144
- else:
145
- # the next 32 bytes are not a pointer, so progress the stream per
146
- # the decoder
147
- decoder(stream)
148
- # return the stream to its original location for actual decoding
149
- stream.seek(current_location)
135
+ raise NotImplementedError("didnt call __init__")
150
136
 
151
137
  def decode(self, stream: ContextFramesBytesIO) -> Tuple[Any, ...]:
152
138
  return decode_tuple(self, stream)
@@ -171,10 +157,10 @@ class SingleDecoder(BaseDecoder):
171
157
  if self.decoder_fn is None:
172
158
  raise ValueError("No `decoder_fn` set")
173
159
 
174
- def validate_padding_bytes(self, value, padding_bytes):
160
+ def validate_padding_bytes(self, value: Any, padding_bytes: bytes) -> None:
175
161
  raise NotImplementedError("Must be implemented by subclasses")
176
162
 
177
- def decode(self, stream):
163
+ def decode(self, stream: ContextFramesBytesIO) -> Any:
178
164
  raw_data = self.read_data_from_stream(stream)
179
165
  data, padding_bytes = self.split_data_and_padding(raw_data)
180
166
  decoder_fn = self.decoder_fn
@@ -204,6 +190,15 @@ class BaseArrayDecoder(BaseDecoder):
204
190
  item_decoder = self.item_decoder
205
191
  if item_decoder.is_dynamic:
206
192
  self.item_decoder = HeadTailDecoder(tail_decoder=item_decoder)
193
+ else:
194
+
195
+ def noop(stream: ContextFramesBytesIO, array_size: int) -> None:
196
+ ...
197
+
198
+ self.validate_pointers = noop
199
+
200
+ def decode(self, stream: ContextFramesBytesIO) -> Tuple[Any, ...]:
201
+ raise NotImplementedError # this is a type stub
207
202
 
208
203
  def validate(self) -> None:
209
204
  super().validate()
@@ -230,25 +225,7 @@ class BaseArrayDecoder(BaseDecoder):
230
225
  """
231
226
  Verify that all pointers point to a valid location in the stream.
232
227
  """
233
- if isinstance(self.item_decoder, HeadTailDecoder):
234
- current_location = stream.tell()
235
- end_of_offsets = current_location + 32 * array_size
236
- total_stream_length = len(stream.getbuffer())
237
- for _ in range(array_size):
238
- offset = decode_uint_256(stream)
239
- indicated_idx = current_location + offset
240
- if (
241
- indicated_idx < end_of_offsets
242
- or indicated_idx >= total_stream_length
243
- ):
244
- # the pointer is indicating its data is located either within the
245
- # offsets section of the stream or beyond the end of the stream,
246
- # both of which are invalid
247
- raise InvalidPointer(
248
- "Invalid pointer in array at location "
249
- f"{stream.tell() - 32} in payload"
250
- )
251
- stream.seek(current_location)
228
+ validate_pointers_array(self, stream, array_size)
252
229
 
253
230
 
254
231
  class SizedArrayDecoder(BaseArrayDecoder):
@@ -281,6 +258,23 @@ class FixedByteSizeDecoder(SingleDecoder):
281
258
  data_byte_size: int = None
282
259
  is_big_endian: bool = None
283
260
 
261
+ def __init__(self, *args, **kwargs):
262
+ super().__init__(*args, **kwargs)
263
+
264
+ self.read_data_from_stream = MethodType(
265
+ read_fixed_byte_size_data_from_stream, self
266
+ )
267
+ self.split_data_and_padding = MethodType(
268
+ split_data_and_padding_fixed_byte_size, self
269
+ )
270
+ self._get_value_byte_size = MethodType(get_value_byte_size, self)
271
+
272
+ # Only assign validate_padding_bytes if not overridden in subclass
273
+ if type(self).validate_padding_bytes is SingleDecoder.validate_padding_bytes:
274
+ self.validate_padding_bytes = MethodType(
275
+ validate_padding_bytes_fixed_byte_size, self
276
+ )
277
+
284
278
  def validate(self) -> None:
285
279
  super().validate()
286
280
 
@@ -304,17 +298,14 @@ class FixedByteSizeDecoder(SingleDecoder):
304
298
  raise ValueError("Value byte size exceeds data size")
305
299
 
306
300
  def read_data_from_stream(self, stream: ContextFramesBytesIO) -> bytes:
307
- return read_fixed_byte_size_data_from_stream(self, stream)
301
+ raise NotImplementedError("didnt call __init__")
308
302
 
309
303
  def split_data_and_padding(self, raw_data: bytes) -> Tuple[bytes, bytes]:
310
- return split_data_and_padding_fixed_byte_size(self, raw_data)
311
-
312
- def validate_padding_bytes(self, value: Any, padding_bytes: bytes) -> None:
313
- validate_padding_bytes_fixed_byte_size(self, value, padding_bytes)
304
+ raise NotImplementedError("didnt call __init__")
314
305
 
306
+ # This is unused, but it is kept in to preserve the eth-abi api
315
307
  def _get_value_byte_size(self) -> int:
316
- # This is unused, but it is kept in to preserve the eth-abi api
317
- return get_value_byte_size(self)
308
+ raise NotImplementedError("didnt call __init__")
318
309
 
319
310
 
320
311
  class Fixed32ByteSizeDecoder(FixedByteSizeDecoder):
@@ -363,27 +354,22 @@ decode_uint_256 = UnsignedIntegerDecoder(value_bit_size=256)
363
354
  class SignedIntegerDecoder(Fixed32ByteSizeDecoder):
364
355
  is_big_endian = True
365
356
 
366
- def decoder_fn(self, data):
367
- value = big_endian_to_int(data)
368
- value_bit_size = self.value_bit_size
369
- if value >= 2 ** (value_bit_size - 1):
370
- return value - 2**value_bit_size
371
- else:
372
- return value
357
+ @cached_property
358
+ def neg_threshold(self) -> int:
359
+ return int(2 ** (self.value_bit_size - 1))
373
360
 
374
- def validate_padding_bytes(self, value: Any, padding_bytes: bytes) -> None:
375
- value_byte_size = get_value_byte_size(self)
376
- padding_size = self.data_byte_size - value_byte_size
361
+ @cached_property
362
+ def neg_offset(self) -> int:
363
+ return int(2**self.value_bit_size)
377
364
 
378
- if value >= 0:
379
- expected_padding_bytes = b"\x00" * padding_size
380
- else:
381
- expected_padding_bytes = b"\xff" * padding_size
365
+ def decoder_fn(self, data: bytes) -> int:
366
+ value = big_endian_to_int(data)
367
+ if value >= self.neg_threshold:
368
+ value -= self.neg_offset
369
+ return value
382
370
 
383
- if padding_bytes != expected_padding_bytes:
384
- raise NonEmptyPaddingBytes(
385
- f"Padding bytes were not empty: {padding_bytes!r}"
386
- )
371
+ def validate_padding_bytes(self, value: Any, padding_bytes: bytes) -> None:
372
+ return validate_padding_bytes_signed_integer(self, value, padding_bytes)
387
373
 
388
374
  @parse_type_str("int")
389
375
  def from_type_str(cls, abi_type, registry):
@@ -397,7 +383,7 @@ class BytesDecoder(Fixed32ByteSizeDecoder):
397
383
  is_big_endian = False
398
384
 
399
385
  @staticmethod
400
- def decoder_fn(data):
386
+ def decoder_fn(data: bytes) -> bytes:
401
387
  return data
402
388
 
403
389
  @parse_type_str("bytes")
@@ -417,11 +403,11 @@ class BaseFixedDecoder(Fixed32ByteSizeDecoder):
417
403
  raise ValueError("must specify `frac_places`")
418
404
 
419
405
  if frac_places <= 0 or frac_places > 80:
420
- raise ValueError("`frac_places` must be in range (0, 80]")
406
+ raise ValueError("`frac_places` must be in range (0, 80)")
421
407
 
422
408
 
423
409
  class UnsignedFixedDecoder(BaseFixedDecoder):
424
- def decoder_fn(self, data):
410
+ def decoder_fn(self, data: bytes) -> decimal.Decimal:
425
411
  value = big_endian_to_int(data)
426
412
 
427
413
  with decimal.localcontext(abi_decimal_context):
@@ -437,7 +423,7 @@ class UnsignedFixedDecoder(BaseFixedDecoder):
437
423
 
438
424
 
439
425
  class SignedFixedDecoder(BaseFixedDecoder):
440
- def decoder_fn(self, data):
426
+ def decoder_fn(self, data: bytes) -> decimal.Decimal:
441
427
  value = big_endian_to_int(data)
442
428
  value_bit_size = self.value_bit_size
443
429
  if value >= 2 ** (value_bit_size - 1):
@@ -478,7 +464,7 @@ class ByteStringDecoder(SingleDecoder):
478
464
  is_dynamic = True
479
465
 
480
466
  @staticmethod
481
- def decoder_fn(data):
467
+ def decoder_fn(data: bytes) -> bytes:
482
468
  return data
483
469
 
484
470
  def read_data_from_stream(self, stream: ContextFramesBytesIO) -> bytes:
Binary file