web3 7.0.0b2__py3-none-any.whl → 7.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. ens/__init__.py +13 -2
  2. ens/_normalization.py +4 -4
  3. ens/async_ens.py +27 -15
  4. ens/base_ens.py +3 -1
  5. ens/contract_data.py +2 -2
  6. ens/ens.py +10 -7
  7. ens/exceptions.py +16 -29
  8. ens/specs/nf.json +1 -1
  9. ens/specs/normalization_spec.json +1 -1
  10. ens/utils.py +24 -32
  11. web3/__init__.py +23 -12
  12. web3/_utils/abi.py +157 -263
  13. web3/_utils/async_transactions.py +34 -20
  14. web3/_utils/batching.py +217 -0
  15. web3/_utils/blocks.py +6 -2
  16. web3/_utils/caching/__init__.py +12 -0
  17. web3/_utils/caching/caching_utils.py +433 -0
  18. web3/_utils/caching/request_caching_validation.py +287 -0
  19. web3/_utils/compat/__init__.py +2 -3
  20. web3/_utils/contract_sources/compile_contracts.py +1 -1
  21. web3/_utils/contract_sources/contract_data/ambiguous_function_contract.py +42 -0
  22. web3/_utils/contract_sources/contract_data/arrays_contract.py +3 -3
  23. web3/_utils/contract_sources/contract_data/bytes_contracts.py +5 -5
  24. web3/_utils/contract_sources/contract_data/constructor_contracts.py +7 -7
  25. web3/_utils/contract_sources/contract_data/contract_caller_tester.py +3 -3
  26. web3/_utils/contract_sources/contract_data/emitter_contract.py +3 -3
  27. web3/_utils/contract_sources/contract_data/event_contracts.py +50 -5
  28. web3/_utils/contract_sources/contract_data/extended_resolver.py +3 -3
  29. web3/_utils/contract_sources/contract_data/fallback_function_contract.py +3 -3
  30. web3/_utils/contract_sources/contract_data/function_name_tester_contract.py +3 -3
  31. web3/_utils/contract_sources/contract_data/math_contract.py +3 -3
  32. web3/_utils/contract_sources/contract_data/offchain_lookup.py +3 -3
  33. web3/_utils/contract_sources/contract_data/offchain_resolver.py +3 -3
  34. web3/_utils/contract_sources/contract_data/panic_errors_contract.py +3 -3
  35. web3/_utils/contract_sources/contract_data/payable_tester.py +3 -3
  36. web3/_utils/contract_sources/contract_data/receive_function_contracts.py +5 -5
  37. web3/_utils/contract_sources/contract_data/reflector_contracts.py +3 -3
  38. web3/_utils/contract_sources/contract_data/revert_contract.py +3 -3
  39. web3/_utils/contract_sources/contract_data/simple_resolver.py +3 -3
  40. web3/_utils/contract_sources/contract_data/storage_contract.py +3 -3
  41. web3/_utils/contract_sources/contract_data/string_contract.py +3 -3
  42. web3/_utils/contract_sources/contract_data/tuple_contracts.py +5 -5
  43. web3/_utils/contracts.py +172 -220
  44. web3/_utils/datatypes.py +5 -1
  45. web3/_utils/decorators.py +6 -1
  46. web3/_utils/empty.py +1 -1
  47. web3/_utils/encoding.py +16 -12
  48. web3/_utils/error_formatters_utils.py +5 -3
  49. web3/_utils/events.py +78 -72
  50. web3/_utils/fee_utils.py +1 -3
  51. web3/_utils/filters.py +24 -22
  52. web3/_utils/formatters.py +2 -2
  53. web3/_utils/http.py +8 -2
  54. web3/_utils/http_session_manager.py +314 -0
  55. web3/_utils/math.py +14 -15
  56. web3/_utils/method_formatters.py +161 -34
  57. web3/_utils/module.py +2 -1
  58. web3/_utils/module_testing/__init__.py +3 -2
  59. web3/_utils/module_testing/eth_module.py +736 -583
  60. web3/_utils/module_testing/go_ethereum_debug_module.py +128 -0
  61. web3/_utils/module_testing/module_testing_utils.py +81 -24
  62. web3/_utils/module_testing/persistent_connection_provider.py +702 -220
  63. web3/_utils/module_testing/utils.py +114 -33
  64. web3/_utils/module_testing/web3_module.py +438 -17
  65. web3/_utils/normalizers.py +13 -11
  66. web3/_utils/rpc_abi.py +10 -22
  67. web3/_utils/threads.py +8 -7
  68. web3/_utils/transactions.py +32 -25
  69. web3/_utils/type_conversion.py +5 -1
  70. web3/_utils/validation.py +20 -17
  71. web3/beacon/__init__.py +5 -0
  72. web3/beacon/api_endpoints.py +3 -0
  73. web3/beacon/async_beacon.py +29 -6
  74. web3/beacon/beacon.py +24 -6
  75. web3/contract/__init__.py +7 -0
  76. web3/contract/async_contract.py +285 -82
  77. web3/contract/base_contract.py +556 -258
  78. web3/contract/contract.py +295 -84
  79. web3/contract/utils.py +251 -55
  80. web3/datastructures.py +49 -34
  81. web3/eth/__init__.py +7 -0
  82. web3/eth/async_eth.py +89 -69
  83. web3/eth/base_eth.py +7 -3
  84. web3/eth/eth.py +43 -66
  85. web3/exceptions.py +158 -83
  86. web3/gas_strategies/time_based.py +8 -6
  87. web3/geth.py +53 -184
  88. web3/main.py +77 -17
  89. web3/manager.py +362 -95
  90. web3/method.py +43 -15
  91. web3/middleware/__init__.py +17 -0
  92. web3/middleware/attrdict.py +12 -22
  93. web3/middleware/base.py +55 -2
  94. web3/middleware/filter.py +45 -23
  95. web3/middleware/formatting.py +6 -3
  96. web3/middleware/names.py +4 -1
  97. web3/middleware/signing.py +15 -6
  98. web3/middleware/stalecheck.py +2 -1
  99. web3/module.py +61 -25
  100. web3/providers/__init__.py +21 -0
  101. web3/providers/async_base.py +87 -32
  102. web3/providers/base.py +77 -32
  103. web3/providers/eth_tester/__init__.py +5 -0
  104. web3/providers/eth_tester/defaults.py +2 -55
  105. web3/providers/eth_tester/main.py +41 -15
  106. web3/providers/eth_tester/middleware.py +16 -17
  107. web3/providers/ipc.py +41 -17
  108. web3/providers/legacy_websocket.py +26 -1
  109. web3/providers/persistent/__init__.py +7 -0
  110. web3/providers/persistent/async_ipc.py +61 -121
  111. web3/providers/persistent/persistent.py +323 -16
  112. web3/providers/persistent/persistent_connection.py +54 -5
  113. web3/providers/persistent/request_processor.py +136 -56
  114. web3/providers/persistent/subscription_container.py +56 -0
  115. web3/providers/persistent/subscription_manager.py +233 -0
  116. web3/providers/persistent/websocket.py +29 -92
  117. web3/providers/rpc/__init__.py +5 -0
  118. web3/providers/rpc/async_rpc.py +73 -18
  119. web3/providers/rpc/rpc.py +73 -30
  120. web3/providers/rpc/utils.py +1 -13
  121. web3/scripts/install_pre_releases.py +33 -0
  122. web3/scripts/parse_pygeth_version.py +16 -0
  123. web3/testing.py +4 -4
  124. web3/tracing.py +9 -5
  125. web3/types.py +141 -74
  126. web3/utils/__init__.py +64 -5
  127. web3/utils/abi.py +790 -10
  128. web3/utils/address.py +8 -0
  129. web3/utils/async_exception_handling.py +20 -11
  130. web3/utils/caching.py +34 -4
  131. web3/utils/exception_handling.py +9 -12
  132. web3/utils/subscriptions.py +285 -0
  133. {web3-7.0.0b2.dist-info → web3-7.7.0.dist-info}/LICENSE +1 -1
  134. web3-7.7.0.dist-info/METADATA +130 -0
  135. web3-7.7.0.dist-info/RECORD +171 -0
  136. {web3-7.0.0b2.dist-info → web3-7.7.0.dist-info}/WHEEL +1 -1
  137. web3/_utils/caching.py +0 -155
  138. web3/_utils/contract_sources/contract_data/address_reflector.py +0 -29
  139. web3/_utils/module_testing/go_ethereum_personal_module.py +0 -300
  140. web3/_utils/request.py +0 -265
  141. web3-7.0.0b2.dist-info/METADATA +0 -106
  142. web3-7.0.0b2.dist-info/RECORD +0 -163
  143. /web3/_utils/{function_identifiers.py → abi_element_identifiers.py} +0 -0
  144. {web3-7.0.0b2.dist-info → web3-7.7.0.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,6 @@
1
1
  import asyncio
2
2
  import errno
3
3
  import json
4
- from json import (
5
- JSONDecodeError,
6
- )
7
4
  import logging
8
5
  from pathlib import (
9
6
  Path,
@@ -16,10 +13,6 @@ from typing import (
16
13
  Union,
17
14
  )
18
15
 
19
- from eth_utils import (
20
- to_text,
21
- )
22
-
23
16
  from web3.types import (
24
17
  RPCEndpoint,
25
18
  RPCResponse,
@@ -28,11 +21,11 @@ from web3.types import (
28
21
  from . import (
29
22
  PersistentConnectionProvider,
30
23
  )
31
- from ..._utils.caching import (
32
- async_handle_request_caching,
33
- )
34
24
  from ...exceptions import (
25
+ PersistentConnectionClosedOK,
35
26
  ProviderConnectionError,
27
+ ReadBufferLimitReached,
28
+ Web3TypeError,
36
29
  )
37
30
  from ..ipc import (
38
31
  get_default_ipc_path,
@@ -40,7 +33,7 @@ from ..ipc import (
40
33
 
41
34
 
42
35
  async def async_get_ipc_socket(
43
- ipc_path: str,
36
+ ipc_path: str, read_buffer_limit: int
44
37
  ) -> Tuple[asyncio.StreamReader, asyncio.StreamWriter]:
45
38
  if sys.platform == "win32":
46
39
  # On Windows named pipe is used. Simulate socket with it.
@@ -50,7 +43,7 @@ async def async_get_ipc_socket(
50
43
 
51
44
  return NamedPipe(ipc_path)
52
45
  else:
53
- return await asyncio.open_unix_connection(ipc_path)
46
+ return await asyncio.open_unix_connection(ipc_path, limit=read_buffer_limit)
54
47
 
55
48
 
56
49
  class AsyncIPCProvider(PersistentConnectionProvider):
@@ -58,23 +51,24 @@ class AsyncIPCProvider(PersistentConnectionProvider):
58
51
 
59
52
  _reader: Optional[asyncio.StreamReader] = None
60
53
  _writer: Optional[asyncio.StreamWriter] = None
54
+ _decoder: json.JSONDecoder = json.JSONDecoder()
61
55
 
62
56
  def __init__(
63
57
  self,
64
58
  ipc_path: Optional[Union[str, Path]] = None,
65
- max_connection_retries: int = 5,
59
+ read_buffer_limit: int = 20 * 1024 * 1024, # 20 MB
66
60
  # `PersistentConnectionProvider` kwargs can be passed through
67
61
  **kwargs: Any,
68
62
  ) -> None:
63
+ # initialize the ipc_path before calling the super constructor
69
64
  if ipc_path is None:
70
65
  self.ipc_path = get_default_ipc_path()
71
66
  elif isinstance(ipc_path, str) or isinstance(ipc_path, Path):
72
67
  self.ipc_path = str(Path(ipc_path).expanduser().resolve())
73
68
  else:
74
- raise TypeError("ipc_path must be of type string or pathlib.Path")
75
-
76
- self._max_connection_retries = max_connection_retries
69
+ raise Web3TypeError("ipc_path must be of type string or pathlib.Path")
77
70
  super().__init__(**kwargs)
71
+ self.read_buffer_limit = read_buffer_limit
78
72
 
79
73
  def __str__(self) -> str:
80
74
  return f"<{self.__class__.__name__} {self.ipc_path}>"
@@ -84,81 +78,46 @@ class AsyncIPCProvider(PersistentConnectionProvider):
84
78
  return False
85
79
 
86
80
  try:
87
- request_data = self.encode_rpc_request(
88
- RPCEndpoint("web3_clientVersions"), []
89
- )
90
- self._writer.write(request_data)
91
- current_request_id = json.loads(request_data)["id"]
92
- await self._get_response_for_request_id(current_request_id, timeout=2)
81
+ await self.make_request(RPCEndpoint("web3_clientVersion"), [])
93
82
  return True
94
- except (OSError, BrokenPipeError, ProviderConnectionError) as e:
83
+ except (OSError, ProviderConnectionError) as e:
95
84
  if show_traceback:
96
85
  raise ProviderConnectionError(
97
86
  f"Problem connecting to provider with error: {type(e)}: {e}"
98
87
  )
99
88
  return False
100
89
 
101
- async def connect(self) -> None:
102
- _connection_attempts = 0
103
- _backoff_rate_change = 1.75
104
- _backoff_time = 1.75
105
-
106
- while _connection_attempts != self._max_connection_retries:
107
- try:
108
- _connection_attempts += 1
109
- self._reader, self._writer = await async_get_ipc_socket(self.ipc_path)
110
- self._message_listener_task = asyncio.create_task(
111
- self._message_listener()
112
- )
113
- break
114
- except OSError as e:
115
- if _connection_attempts == self._max_connection_retries:
116
- raise ProviderConnectionError(
117
- f"Could not connect to endpoint: {self.endpoint_uri}. "
118
- f"Retries exceeded max of {self._max_connection_retries}."
119
- ) from e
120
- self.logger.info(
121
- f"Could not connect to endpoint: {self.endpoint_uri}. Retrying in "
122
- f"{round(_backoff_time, 1)} seconds.",
123
- exc_info=True,
124
- )
125
- await asyncio.sleep(_backoff_time)
126
- _backoff_time *= _backoff_rate_change
127
-
128
- async def disconnect(self) -> None:
129
- if self._writer and not self._writer.is_closing():
130
- self._writer.close()
131
- await self._writer.wait_closed()
132
- self._writer = None
133
- self.logger.debug(
134
- f'Successfully disconnected from endpoint: "{self.endpoint_uri}'
135
- )
136
-
137
- try:
138
- self._message_listener_task.cancel()
139
- await self._message_listener_task
140
- self._reader = None
141
- except (asyncio.CancelledError, StopAsyncIteration):
142
- pass
143
-
144
- self._request_processor.clear_caches()
145
-
146
- async def _reset_socket(self) -> None:
147
- self._writer.close()
148
- await self._writer.wait_closed()
149
- self._reader, self._writer = await async_get_ipc_socket(self.ipc_path)
150
-
151
- @async_handle_request_caching
152
- async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
153
- request_data = self.encode_rpc_request(method, params)
154
-
90
+ async def socket_send(self, request_data: bytes) -> None:
155
91
  if self._writer is None:
156
92
  raise ProviderConnectionError(
157
93
  "Connection to ipc socket has not been initiated for the provider."
158
94
  )
159
95
 
96
+ return await asyncio.wait_for(
97
+ self._socket_send(request_data), timeout=self.request_timeout
98
+ )
99
+
100
+ async def socket_recv(self) -> RPCResponse:
160
101
  try:
161
- self._writer.write(request_data)
102
+ data = await self._reader.readline()
103
+ except ValueError as e:
104
+ if all(kw in str(e) for kw in ("limit", "chunk")):
105
+ raise ReadBufferLimitReached(
106
+ f"Read buffer limit of `{self.read_buffer_limit}` bytes was "
107
+ "reached. Consider increasing the ``read_buffer_limit`` on the "
108
+ "AsyncIPCProvider."
109
+ ) from e
110
+ raise
111
+
112
+ if not data:
113
+ raise PersistentConnectionClosedOK("Socket reader received end of stream.")
114
+ return self.decode_rpc_response(data)
115
+
116
+ # -- private methods -- #
117
+
118
+ async def _socket_send(self, request_data: bytes) -> None:
119
+ try:
120
+ self._writer.write(request_data + b"\n")
162
121
  await self._writer.drain()
163
122
  except OSError as e:
164
123
  # Broken pipe
@@ -168,48 +127,29 @@ class AsyncIPCProvider(PersistentConnectionProvider):
168
127
  self._writer.write(request_data)
169
128
  await self._writer.drain()
170
129
 
171
- current_request_id = json.loads(request_data)["id"]
172
- response = await self._get_response_for_request_id(current_request_id)
173
-
174
- return response
130
+ async def _reset_socket(self) -> None:
131
+ self._writer.close()
132
+ await self._writer.wait_closed()
133
+ self._reader, self._writer = await async_get_ipc_socket(
134
+ self.ipc_path, self.read_buffer_limit
135
+ )
175
136
 
176
- async def _message_listener(self) -> None:
177
- self.logger.info(
178
- "IPC socket listener background task started. Storing all messages in "
179
- "appropriate request processor queues / caches to be processed."
137
+ async def _provider_specific_connect(self) -> None:
138
+ self._reader, self._writer = await async_get_ipc_socket(
139
+ self.ipc_path, self.read_buffer_limit
180
140
  )
181
- raw_message = ""
182
- decoder = json.JSONDecoder()
183
-
184
- while True:
185
- # the use of sleep(0) seems to be the most efficient way to yield control
186
- # back to the event loop to share the loop with other tasks.
187
- await asyncio.sleep(0)
188
-
189
- try:
190
- raw_message += to_text(await self._reader.read(4096)).lstrip()
191
-
192
- while raw_message:
193
- try:
194
- response, pos = decoder.raw_decode(raw_message)
195
- except JSONDecodeError:
196
- break
197
-
198
- is_subscription = response.get("method") == "eth_subscription"
199
- await self._request_processor.cache_raw_response(
200
- response, subscription=is_subscription
201
- )
202
- raw_message = raw_message[pos:].lstrip()
203
- except Exception as e:
204
- if not self.silence_listener_task_exceptions:
205
- loop = asyncio.get_event_loop()
206
- for task in asyncio.all_tasks(loop=loop):
207
- task.cancel()
208
- raise e
209
-
210
- self.logger.error(
211
- "Exception caught in listener, error logging and keeping listener "
212
- f"background task alive.\n error={e}"
213
- )
214
- # if only error logging, reset the ``raw_message`` buffer and continue
215
- raw_message = ""
141
+
142
+ async def _provider_specific_disconnect(self) -> None:
143
+ # this should remain idempotent
144
+ if self._writer and not self._writer.is_closing():
145
+ self._writer.close()
146
+ await self._writer.wait_closed()
147
+ self._writer = None
148
+ if self._reader:
149
+ self._reader = None
150
+
151
+ async def _provider_specific_socket_reader(self) -> RPCResponse:
152
+ return await self.socket_recv()
153
+
154
+ def _error_log_listener_task_exception(self, e: Exception) -> None:
155
+ super()._error_log_listener_task_exception(e)
@@ -1,17 +1,44 @@
1
1
  from abc import (
2
2
  ABC,
3
+ abstractmethod,
3
4
  )
4
5
  import asyncio
5
6
  import logging
7
+ import signal
6
8
  from typing import (
9
+ TYPE_CHECKING,
10
+ Any,
11
+ Callable,
12
+ Coroutine,
13
+ List,
7
14
  Optional,
15
+ Tuple,
16
+ Union,
17
+ cast,
8
18
  )
9
19
 
20
+ from websockets import (
21
+ ConnectionClosed,
22
+ WebSocketException,
23
+ )
24
+
25
+ from web3._utils.batching import (
26
+ BATCH_REQUEST_ID,
27
+ sort_batch_response_by_response_ids,
28
+ )
10
29
  from web3._utils.caching import (
11
30
  generate_cache_key,
12
31
  )
32
+ from web3._utils.caching.caching_utils import (
33
+ async_handle_recv_caching,
34
+ async_handle_send_caching,
35
+ )
13
36
  from web3.exceptions import (
37
+ PersistentConnectionClosedOK,
38
+ ProviderConnectionError,
39
+ TaskNotRunning,
14
40
  TimeExhausted,
41
+ Web3AttributeError,
15
42
  )
16
43
  from web3.providers.async_base import (
17
44
  AsyncJSONBaseProvider,
@@ -20,47 +47,327 @@ from web3.providers.persistent.request_processor import (
20
47
  RequestProcessor,
21
48
  )
22
49
  from web3.types import (
50
+ RPCEndpoint,
23
51
  RPCId,
52
+ RPCRequest,
24
53
  RPCResponse,
25
54
  )
26
55
 
56
+ if TYPE_CHECKING:
57
+ from web3 import AsyncWeb3 # noqa: F401
58
+ from web3.middleware.base import MiddlewareOnion # noqa: F401
59
+
60
+
27
61
  DEFAULT_PERSISTENT_CONNECTION_TIMEOUT = 30.0
28
62
 
29
63
 
30
64
  class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
31
65
  logger = logging.getLogger("web3.providers.PersistentConnectionProvider")
32
66
  has_persistent_connection = True
33
- endpoint_uri: Optional[str] = None
34
67
 
35
- _request_processor: RequestProcessor
36
- _message_listener_task: Optional["asyncio.Task[None]"] = None
37
- _listen_event: asyncio.Event = asyncio.Event()
68
+ _send_func_cache: Tuple[int, Callable[..., Coroutine[Any, Any, RPCRequest]]] = (
69
+ None,
70
+ None,
71
+ )
72
+ _recv_func_cache: Tuple[int, Callable[..., Coroutine[Any, Any, RPCResponse]]] = (
73
+ None,
74
+ None,
75
+ )
38
76
 
39
77
  def __init__(
40
78
  self,
41
79
  request_timeout: float = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,
42
80
  subscription_response_queue_size: int = 500,
43
81
  silence_listener_task_exceptions: bool = False,
82
+ max_connection_retries: int = 5,
83
+ **kwargs: Any,
44
84
  ) -> None:
45
- super().__init__()
85
+ super().__init__(**kwargs)
46
86
  self._request_processor = RequestProcessor(
47
87
  self,
48
88
  subscription_response_queue_size=subscription_response_queue_size,
49
89
  )
90
+ self._message_listener_task: Optional["asyncio.Task[None]"] = None
91
+ self._batch_request_counter: Optional[int] = None
92
+ self._listen_event: asyncio.Event = asyncio.Event()
93
+ self._max_connection_retries = max_connection_retries
94
+
50
95
  self.request_timeout = request_timeout
51
96
  self.silence_listener_task_exceptions = silence_listener_task_exceptions
52
97
 
98
+ async def send_func(
99
+ self, async_w3: "AsyncWeb3", middleware_onion: "MiddlewareOnion"
100
+ ) -> Callable[..., Coroutine[Any, Any, RPCRequest]]:
101
+ """
102
+ Cache the middleware chain for `send`.
103
+ """
104
+ middleware = middleware_onion.as_tuple_of_middleware()
105
+ cache_key = hash(tuple(id(mw) for mw in middleware))
106
+
107
+ if cache_key != self._send_func_cache[0]:
108
+
109
+ async def send_function(method: RPCEndpoint, params: Any) -> RPCRequest:
110
+ for mw in middleware:
111
+ initialized = mw(async_w3)
112
+ method, params = await initialized.async_request_processor(
113
+ method, params
114
+ )
115
+
116
+ return await self.send_request(method, params)
117
+
118
+ self._send_func_cache = (cache_key, send_function)
119
+
120
+ return self._send_func_cache[1]
121
+
122
+ async def recv_func(
123
+ self, async_w3: "AsyncWeb3", middleware_onion: "MiddlewareOnion"
124
+ ) -> Any:
125
+ """
126
+ Cache and compose the middleware stack for `recv`.
127
+ """
128
+ middleware = middleware_onion.as_tuple_of_middleware()
129
+ cache_key = hash(tuple(id(mw) for mw in middleware))
130
+
131
+ if cache_key != self._recv_func_cache[0]:
132
+
133
+ async def recv_function(rpc_request: RPCRequest) -> RPCResponse:
134
+ # first, retrieve the response
135
+ response = await self.recv_for_request(rpc_request)
136
+ method = rpc_request["method"]
137
+ for mw in reversed(middleware):
138
+ initialized = mw(async_w3)
139
+ response = await initialized.async_response_processor(
140
+ method, response
141
+ )
142
+ return response
143
+
144
+ self._recv_func_cache = (cache_key, recv_function)
145
+
146
+ return self._recv_func_cache[1]
147
+
148
+ def get_endpoint_uri_or_ipc_path(self) -> str:
149
+ if hasattr(self, "endpoint_uri"):
150
+ return str(self.endpoint_uri)
151
+ elif hasattr(self, "ipc_path"):
152
+ return str(self.ipc_path)
153
+ else:
154
+ raise Web3AttributeError(
155
+ "`PersistentConnectionProvider` must have either `endpoint_uri` or "
156
+ "`ipc_path` attribute."
157
+ )
158
+
53
159
  async def connect(self) -> None:
54
- raise NotImplementedError("Must be implemented by subclasses")
160
+ _connection_attempts = 0
161
+ _backoff_rate_change = 1.75
162
+ _backoff_time = 1.75
163
+
164
+ while _connection_attempts != self._max_connection_retries:
165
+ try:
166
+ _connection_attempts += 1
167
+ self.logger.info(
168
+ f"Connecting to: {self.get_endpoint_uri_or_ipc_path()}"
169
+ )
170
+ await self._provider_specific_connect()
171
+ self._message_listener_task = asyncio.create_task(
172
+ self._message_listener()
173
+ )
174
+ self._message_listener_task.add_done_callback(
175
+ self._message_listener_callback
176
+ )
177
+ self.logger.info(
178
+ f"Successfully connected to: {self.get_endpoint_uri_or_ipc_path()}"
179
+ )
180
+ break
181
+ except (WebSocketException, OSError) as e:
182
+ if _connection_attempts == self._max_connection_retries:
183
+ raise ProviderConnectionError(
184
+ f"Could not connect to: {self.get_endpoint_uri_or_ipc_path()}. "
185
+ f"Retries exceeded max of {self._max_connection_retries}."
186
+ ) from e
187
+ self.logger.info(
188
+ f"Could not connect to: {self.get_endpoint_uri_or_ipc_path()}. "
189
+ f"Retrying in {round(_backoff_time, 1)} seconds.",
190
+ exc_info=True,
191
+ )
192
+ await asyncio.sleep(_backoff_time)
193
+ _backoff_time *= _backoff_rate_change
55
194
 
56
195
  async def disconnect(self) -> None:
196
+ # this should remain idempotent
197
+ try:
198
+ if self._message_listener_task:
199
+ self._message_listener_task.cancel()
200
+ await self._message_listener_task
201
+ except (asyncio.CancelledError, StopAsyncIteration, ConnectionClosed):
202
+ pass
203
+ finally:
204
+ self._message_listener_task = None
205
+ self.logger.info("Message listener background task successfully shut down.")
206
+
207
+ await self._provider_specific_disconnect()
208
+ self._request_processor.clear_caches()
209
+ self.logger.info(
210
+ f"Successfully disconnected from: {self.get_endpoint_uri_or_ipc_path()}"
211
+ )
212
+
213
+ @async_handle_send_caching
214
+ async def send_request(self, method: RPCEndpoint, params: Any) -> RPCRequest:
215
+ request_dict = self.form_request(method, params)
216
+ await self.socket_send(self.encode_rpc_dict(request_dict))
217
+ return request_dict
218
+
219
+ @async_handle_recv_caching
220
+ async def recv_for_request(self, rpc_request: RPCRequest) -> RPCResponse:
221
+ return await self._get_response_for_request_id(rpc_request["id"])
222
+
223
+ async def make_request(
224
+ self,
225
+ method: RPCEndpoint,
226
+ params: Any,
227
+ ) -> RPCResponse:
228
+ rpc_request = await self.send_request(method, params)
229
+ return await self.recv_for_request(rpc_request)
230
+
231
+ async def make_batch_request(
232
+ self, requests: List[Tuple[RPCEndpoint, Any]]
233
+ ) -> List[RPCResponse]:
234
+ request_data = self.encode_batch_rpc_request(requests)
235
+ await self.socket_send(request_data)
236
+
237
+ response = cast(
238
+ List[RPCResponse], await self._get_response_for_request_id(BATCH_REQUEST_ID)
239
+ )
240
+ return response
241
+
242
+ # -- abstract methods -- #
243
+
244
+ @abstractmethod
245
+ async def socket_send(self, request_data: bytes) -> None:
246
+ """
247
+ Send an encoded RPC request to the provider over the persistent connection.
248
+ """
57
249
  raise NotImplementedError("Must be implemented by subclasses")
58
250
 
59
- async def _message_listener(self) -> None:
251
+ @abstractmethod
252
+ async def socket_recv(self) -> RPCResponse:
253
+ """
254
+ Receive, decode, and return an RPC response from the provider over the
255
+ persistent connection.
256
+ """
257
+ raise NotImplementedError("Must be implemented by subclasses")
258
+
259
+ # -- private methods -- #
260
+
261
+ async def _provider_specific_connect(self) -> None:
60
262
  raise NotImplementedError("Must be implemented by subclasses")
61
263
 
264
+ async def _provider_specific_disconnect(self) -> None:
265
+ # this method should be idempotent
266
+ raise NotImplementedError("Must be implemented by subclasses")
267
+
268
+ async def _provider_specific_socket_reader(self) -> RPCResponse:
269
+ raise NotImplementedError("Must be implemented by subclasses")
270
+
271
+ def _set_signal_handlers(self) -> None:
272
+ def extended_handler(sig: int, frame: Any, existing_handler: Any) -> None:
273
+ loop = asyncio.get_event_loop()
274
+
275
+ # invoke the existing handler, if callable
276
+ if callable(existing_handler):
277
+ existing_handler(sig, frame)
278
+ loop.create_task(self.disconnect())
279
+
280
+ existing_sigint_handler = signal.getsignal(signal.SIGINT)
281
+ existing_sigterm_handler = signal.getsignal(signal.SIGTERM)
282
+
283
+ # extend the existing signal handlers to include the disconnect method
284
+ signal.signal(
285
+ signal.SIGINT,
286
+ lambda sig, frame: extended_handler(sig, frame, existing_sigint_handler),
287
+ )
288
+ signal.signal(
289
+ signal.SIGTERM,
290
+ lambda sig, frame: extended_handler(sig, frame, existing_sigterm_handler),
291
+ )
292
+
293
+ def _message_listener_callback(
294
+ self, message_listener_task: "asyncio.Task[None]"
295
+ ) -> None:
296
+ # Puts a `TaskNotRunning` in appropriate queues to signal the end of the
297
+ # listener task to any listeners relying on the queues.
298
+ self._request_processor._subscription_response_queue.put_nowait(
299
+ TaskNotRunning(message_listener_task)
300
+ )
301
+ self._request_processor._handler_subscription_queue.put_nowait(
302
+ TaskNotRunning(message_listener_task)
303
+ )
304
+
305
+ async def _message_listener(self) -> None:
306
+ self.logger.info(
307
+ f"{self.__class__.__qualname__} listener background task started. Storing "
308
+ "all messages in appropriate request processor queues / caches to be "
309
+ "processed."
310
+ )
311
+ while True:
312
+ # the use of sleep(0) seems to be the most efficient way to yield control
313
+ # back to the event loop to share the loop with other tasks.
314
+ await asyncio.sleep(0)
315
+
316
+ try:
317
+ response = await self._provider_specific_socket_reader()
318
+
319
+ if isinstance(response, list):
320
+ response = sort_batch_response_by_response_ids(response)
321
+
322
+ subscription = (
323
+ response.get("method") == "eth_subscription"
324
+ if not isinstance(response, list)
325
+ else False
326
+ )
327
+ await self._request_processor.cache_raw_response(
328
+ response, subscription=subscription
329
+ )
330
+ except PersistentConnectionClosedOK as e:
331
+ self.logger.info(
332
+ "Message listener background task has ended gracefully: "
333
+ f"{e.user_message}"
334
+ )
335
+ # trigger a return to end the listener task and initiate the callback fn
336
+ return
337
+ except Exception as e:
338
+ if not self.silence_listener_task_exceptions:
339
+ raise e
340
+ else:
341
+ self._error_log_listener_task_exception(e)
342
+
343
+ def _error_log_listener_task_exception(self, e: Exception) -> None:
344
+ """
345
+ When silencing listener task exceptions, this method is used to log the
346
+ exception and keep the listener task alive. Override this method to fine-tune
347
+ error logging behavior for the implementation class.
348
+ """
349
+ self.logger.error(
350
+ "Exception caught in listener, error logging and keeping "
351
+ "listener background task alive."
352
+ f"\n error={e.__class__.__name__}: {e}"
353
+ )
354
+
355
+ def _handle_listener_task_exceptions(self) -> None:
356
+ """
357
+ Should be called every time a `PersistentConnectionProvider` is polling for
358
+ messages in the main loop. If the message listener task has completed and an
359
+ exception was recorded, raise the exception in the main loop.
360
+ """
361
+ msg_listener_task = getattr(self, "_message_listener_task", None)
362
+ if (
363
+ msg_listener_task
364
+ and msg_listener_task.done()
365
+ and msg_listener_task.exception()
366
+ ):
367
+ raise msg_listener_task.exception()
368
+
62
369
  async def _get_response_for_request_id(
63
- self, request_id: RPCId, timeout: Optional[float] = None
370
+ self, request_id: Union[RPCId, List[RPCId]], timeout: Optional[float] = None
64
371
  ) -> RPCResponse:
65
372
  if timeout is None:
66
373
  timeout = self.request_timeout
@@ -69,24 +376,24 @@ class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
69
376
  request_cache_key = generate_cache_key(request_id)
70
377
 
71
378
  while True:
72
- # sleep(0) here seems to be the most efficient way to yield control
73
- # back to the event loop while waiting for the response to be in the
74
- # queue.
75
- await asyncio.sleep(0)
76
-
77
379
  if request_cache_key in self._request_processor._request_response_cache:
78
380
  self.logger.debug(
79
381
  f"Popping response for id {request_id} from cache."
80
382
  )
81
- popped_response = self._request_processor.pop_raw_response(
383
+ popped_response = await self._request_processor.pop_raw_response(
82
384
  cache_key=request_cache_key,
83
385
  )
84
386
  return popped_response
387
+ else:
388
+ # check if an exception was recorded in the listener task and raise
389
+ # it in the main loop if so
390
+ self._handle_listener_task_exceptions()
391
+ await asyncio.sleep(0)
85
392
 
86
393
  try:
87
394
  # Add the request timeout around the while loop that checks the request
88
- # cache and tried to recv(). If the request is neither in the cache, nor
89
- # received within the request_timeout, raise ``TimeExhausted``.
395
+ # cache. If the request is not in the cache within the request_timeout,
396
+ # raise ``TimeExhausted``.
90
397
  return await asyncio.wait_for(_match_response_id_to_request_id(), timeout)
91
398
  except asyncio.TimeoutError:
92
399
  raise TimeExhausted(