web3 7.0.0b5__py3-none-any.whl → 7.0.0b7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ens/__init__.py +13 -2
- web3/__init__.py +21 -5
- web3/_utils/batching.py +217 -0
- web3/_utils/caching.py +26 -2
- web3/_utils/compat/__init__.py +1 -0
- web3/_utils/contract_sources/contract_data/arrays_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/bytes_contracts.py +5 -5
- web3/_utils/contract_sources/contract_data/constructor_contracts.py +7 -7
- web3/_utils/contract_sources/contract_data/contract_caller_tester.py +3 -3
- web3/_utils/contract_sources/contract_data/emitter_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/event_contracts.py +5 -5
- web3/_utils/contract_sources/contract_data/extended_resolver.py +3 -3
- web3/_utils/contract_sources/contract_data/fallback_function_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/function_name_tester_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/math_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/offchain_lookup.py +3 -3
- web3/_utils/contract_sources/contract_data/offchain_resolver.py +3 -3
- web3/_utils/contract_sources/contract_data/panic_errors_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/payable_tester.py +3 -3
- web3/_utils/contract_sources/contract_data/receive_function_contracts.py +5 -5
- web3/_utils/contract_sources/contract_data/reflector_contracts.py +3 -3
- web3/_utils/contract_sources/contract_data/revert_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/simple_resolver.py +3 -3
- web3/_utils/contract_sources/contract_data/storage_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/string_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/tuple_contracts.py +5 -5
- web3/_utils/events.py +2 -2
- web3/_utils/http.py +3 -0
- web3/_utils/http_session_manager.py +280 -0
- web3/_utils/method_formatters.py +0 -2
- web3/_utils/module_testing/eth_module.py +92 -119
- web3/_utils/module_testing/module_testing_utils.py +27 -9
- web3/_utils/module_testing/persistent_connection_provider.py +1 -0
- web3/_utils/module_testing/web3_module.py +438 -17
- web3/_utils/rpc_abi.py +0 -3
- web3/beacon/__init__.py +5 -0
- web3/beacon/async_beacon.py +9 -5
- web3/beacon/beacon.py +7 -5
- web3/contract/__init__.py +7 -0
- web3/contract/base_contract.py +10 -1
- web3/contract/utils.py +112 -4
- web3/eth/__init__.py +7 -0
- web3/eth/async_eth.py +5 -37
- web3/eth/eth.py +7 -57
- web3/exceptions.py +20 -0
- web3/gas_strategies/time_based.py +2 -2
- web3/main.py +21 -9
- web3/manager.py +113 -8
- web3/method.py +29 -9
- web3/middleware/__init__.py +17 -0
- web3/middleware/base.py +43 -0
- web3/module.py +47 -7
- web3/providers/__init__.py +21 -0
- web3/providers/async_base.py +55 -23
- web3/providers/base.py +59 -26
- web3/providers/eth_tester/__init__.py +5 -0
- web3/providers/eth_tester/defaults.py +0 -6
- web3/providers/eth_tester/middleware.py +3 -8
- web3/providers/ipc.py +23 -8
- web3/providers/legacy_websocket.py +26 -1
- web3/providers/persistent/__init__.py +7 -0
- web3/providers/persistent/async_ipc.py +60 -76
- web3/providers/persistent/persistent.py +134 -10
- web3/providers/persistent/request_processor.py +98 -14
- web3/providers/persistent/websocket.py +43 -66
- web3/providers/rpc/__init__.py +5 -0
- web3/providers/rpc/async_rpc.py +34 -12
- web3/providers/rpc/rpc.py +34 -12
- web3/providers/rpc/utils.py +0 -3
- web3/tools/benchmark/main.py +7 -6
- web3/tools/benchmark/node.py +1 -1
- web3/types.py +7 -1
- web3/utils/__init__.py +14 -5
- web3/utils/async_exception_handling.py +19 -7
- web3/utils/exception_handling.py +7 -5
- {web3-7.0.0b5.dist-info → web3-7.0.0b7.dist-info}/LICENSE +1 -1
- {web3-7.0.0b5.dist-info → web3-7.0.0b7.dist-info}/METADATA +33 -20
- {web3-7.0.0b5.dist-info → web3-7.0.0b7.dist-info}/RECORD +80 -80
- {web3-7.0.0b5.dist-info → web3-7.0.0b7.dist-info}/WHEEL +1 -1
- web3/_utils/contract_sources/contract_data/address_reflector.py +0 -29
- web3/_utils/request.py +0 -265
- {web3-7.0.0b5.dist-info → web3-7.0.0b7.dist-info}/top_level.txt +0 -0
web3/providers/ipc.py
CHANGED
|
@@ -14,8 +14,11 @@ from types import (
|
|
|
14
14
|
)
|
|
15
15
|
from typing import (
|
|
16
16
|
Any,
|
|
17
|
+
List,
|
|
18
|
+
Tuple,
|
|
17
19
|
Type,
|
|
18
20
|
Union,
|
|
21
|
+
cast,
|
|
19
22
|
)
|
|
20
23
|
|
|
21
24
|
from web3._utils.threads import (
|
|
@@ -26,6 +29,9 @@ from web3.types import (
|
|
|
26
29
|
RPCResponse,
|
|
27
30
|
)
|
|
28
31
|
|
|
32
|
+
from .._utils.batching import (
|
|
33
|
+
sort_batch_response_by_response_ids,
|
|
34
|
+
)
|
|
29
35
|
from ..exceptions import (
|
|
30
36
|
Web3TypeError,
|
|
31
37
|
Web3ValueError,
|
|
@@ -135,7 +141,6 @@ class IPCProvider(JSONBaseProvider):
|
|
|
135
141
|
self,
|
|
136
142
|
ipc_path: Union[str, Path] = None,
|
|
137
143
|
timeout: int = 30,
|
|
138
|
-
*args: Any,
|
|
139
144
|
**kwargs: Any,
|
|
140
145
|
) -> None:
|
|
141
146
|
if ipc_path is None:
|
|
@@ -148,17 +153,12 @@ class IPCProvider(JSONBaseProvider):
|
|
|
148
153
|
self.timeout = timeout
|
|
149
154
|
self._lock = threading.Lock()
|
|
150
155
|
self._socket = PersistantSocket(self.ipc_path)
|
|
151
|
-
super().__init__()
|
|
156
|
+
super().__init__(**kwargs)
|
|
152
157
|
|
|
153
158
|
def __str__(self) -> str:
|
|
154
159
|
return f"<{self.__class__.__name__} {self.ipc_path}>"
|
|
155
160
|
|
|
156
|
-
def
|
|
157
|
-
self.logger.debug(
|
|
158
|
-
f"Making request IPC. Path: {self.ipc_path}, Method: {method}"
|
|
159
|
-
)
|
|
160
|
-
request = self.encode_rpc_request(method, params)
|
|
161
|
-
|
|
161
|
+
def _make_request(self, request: bytes) -> RPCResponse:
|
|
162
162
|
with self._lock, self._socket as sock:
|
|
163
163
|
try:
|
|
164
164
|
sock.sendall(request)
|
|
@@ -189,6 +189,21 @@ class IPCProvider(JSONBaseProvider):
|
|
|
189
189
|
timeout.sleep(0)
|
|
190
190
|
continue
|
|
191
191
|
|
|
192
|
+
def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
|
193
|
+
self.logger.debug(
|
|
194
|
+
f"Making request IPC. Path: {self.ipc_path}, Method: {method}"
|
|
195
|
+
)
|
|
196
|
+
request = self.encode_rpc_request(method, params)
|
|
197
|
+
return self._make_request(request)
|
|
198
|
+
|
|
199
|
+
def make_batch_request(
|
|
200
|
+
self, requests: List[Tuple[RPCEndpoint, Any]]
|
|
201
|
+
) -> List[RPCResponse]:
|
|
202
|
+
self.logger.debug(f"Making batch request IPC. Path: {self.ipc_path}")
|
|
203
|
+
request_data = self.encode_batch_rpc_request(requests)
|
|
204
|
+
response = cast(List[RPCResponse], self._make_request(request_data))
|
|
205
|
+
return sort_batch_response_by_response_ids(response)
|
|
206
|
+
|
|
192
207
|
|
|
193
208
|
# A valid JSON RPC response can only end in } or ] http://www.jsonrpc.org/specification
|
|
194
209
|
def has_valid_json_rpc_ending(raw_response: bytes) -> bool:
|
|
@@ -10,9 +10,12 @@ from types import (
|
|
|
10
10
|
)
|
|
11
11
|
from typing import (
|
|
12
12
|
Any,
|
|
13
|
+
List,
|
|
13
14
|
Optional,
|
|
15
|
+
Tuple,
|
|
14
16
|
Type,
|
|
15
17
|
Union,
|
|
18
|
+
cast,
|
|
16
19
|
)
|
|
17
20
|
|
|
18
21
|
from eth_typing import (
|
|
@@ -25,6 +28,12 @@ from websockets.legacy.client import (
|
|
|
25
28
|
WebSocketClientProtocol,
|
|
26
29
|
)
|
|
27
30
|
|
|
31
|
+
from web3._utils.batching import (
|
|
32
|
+
sort_batch_response_by_response_ids,
|
|
33
|
+
)
|
|
34
|
+
from web3._utils.caching import (
|
|
35
|
+
handle_request_caching,
|
|
36
|
+
)
|
|
28
37
|
from web3.exceptions import (
|
|
29
38
|
Web3ValidationError,
|
|
30
39
|
)
|
|
@@ -91,6 +100,7 @@ class LegacyWebSocketProvider(JSONBaseProvider):
|
|
|
91
100
|
endpoint_uri: Optional[Union[URI, str]] = None,
|
|
92
101
|
websocket_kwargs: Optional[Any] = None,
|
|
93
102
|
websocket_timeout: int = DEFAULT_WEBSOCKET_TIMEOUT,
|
|
103
|
+
**kwargs: Any,
|
|
94
104
|
) -> None:
|
|
95
105
|
self.endpoint_uri = URI(endpoint_uri)
|
|
96
106
|
self.websocket_timeout = websocket_timeout
|
|
@@ -110,7 +120,7 @@ class LegacyWebSocketProvider(JSONBaseProvider):
|
|
|
110
120
|
f"in websocket_kwargs, found: {found_restricted_keys}"
|
|
111
121
|
)
|
|
112
122
|
self.conn = PersistentWebSocket(self.endpoint_uri, websocket_kwargs)
|
|
113
|
-
super().__init__()
|
|
123
|
+
super().__init__(**kwargs)
|
|
114
124
|
|
|
115
125
|
def __str__(self) -> str:
|
|
116
126
|
return f"WS connection {self.endpoint_uri}"
|
|
@@ -124,6 +134,7 @@ class LegacyWebSocketProvider(JSONBaseProvider):
|
|
|
124
134
|
await asyncio.wait_for(conn.recv(), timeout=self.websocket_timeout)
|
|
125
135
|
)
|
|
126
136
|
|
|
137
|
+
@handle_request_caching
|
|
127
138
|
def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
|
128
139
|
self.logger.debug(
|
|
129
140
|
f"Making request WebSocket. URI: {self.endpoint_uri}, " f"Method: {method}"
|
|
@@ -133,3 +144,17 @@ class LegacyWebSocketProvider(JSONBaseProvider):
|
|
|
133
144
|
self.coro_make_request(request_data), LegacyWebSocketProvider._loop
|
|
134
145
|
)
|
|
135
146
|
return future.result()
|
|
147
|
+
|
|
148
|
+
def make_batch_request(
|
|
149
|
+
self, requests: List[Tuple[RPCEndpoint, Any]]
|
|
150
|
+
) -> List[RPCResponse]:
|
|
151
|
+
self.logger.debug(
|
|
152
|
+
f"Making batch request WebSocket. URI: {self.endpoint_uri}, "
|
|
153
|
+
f"Methods: {requests}"
|
|
154
|
+
)
|
|
155
|
+
request_data = self.encode_batch_rpc_request(requests)
|
|
156
|
+
future = asyncio.run_coroutine_threadsafe(
|
|
157
|
+
self.coro_make_request(request_data), LegacyWebSocketProvider._loop
|
|
158
|
+
)
|
|
159
|
+
response = cast(List[RPCResponse], future.result())
|
|
160
|
+
return sort_batch_response_by_response_ids(response)
|
|
@@ -11,9 +11,11 @@ from pathlib import (
|
|
|
11
11
|
import sys
|
|
12
12
|
from typing import (
|
|
13
13
|
Any,
|
|
14
|
+
List,
|
|
14
15
|
Optional,
|
|
15
16
|
Tuple,
|
|
16
17
|
Union,
|
|
18
|
+
cast,
|
|
17
19
|
)
|
|
18
20
|
|
|
19
21
|
from eth_utils import (
|
|
@@ -28,6 +30,10 @@ from web3.types import (
|
|
|
28
30
|
from . import (
|
|
29
31
|
PersistentConnectionProvider,
|
|
30
32
|
)
|
|
33
|
+
from ..._utils.batching import (
|
|
34
|
+
BATCH_REQUEST_ID,
|
|
35
|
+
sort_batch_response_by_response_ids,
|
|
36
|
+
)
|
|
31
37
|
from ..._utils.caching import (
|
|
32
38
|
async_handle_request_caching,
|
|
33
39
|
)
|
|
@@ -59,11 +65,12 @@ class AsyncIPCProvider(PersistentConnectionProvider):
|
|
|
59
65
|
|
|
60
66
|
_reader: Optional[asyncio.StreamReader] = None
|
|
61
67
|
_writer: Optional[asyncio.StreamWriter] = None
|
|
68
|
+
_decoder: json.JSONDecoder = json.JSONDecoder()
|
|
69
|
+
_raw_message: str = ""
|
|
62
70
|
|
|
63
71
|
def __init__(
|
|
64
72
|
self,
|
|
65
73
|
ipc_path: Optional[Union[str, Path]] = None,
|
|
66
|
-
max_connection_retries: int = 5,
|
|
67
74
|
# `PersistentConnectionProvider` kwargs can be passed through
|
|
68
75
|
**kwargs: Any,
|
|
69
76
|
) -> None:
|
|
@@ -74,7 +81,6 @@ class AsyncIPCProvider(PersistentConnectionProvider):
|
|
|
74
81
|
else:
|
|
75
82
|
raise Web3TypeError("ipc_path must be of type string or pathlib.Path")
|
|
76
83
|
|
|
77
|
-
self._max_connection_retries = max_connection_retries
|
|
78
84
|
super().__init__(**kwargs)
|
|
79
85
|
|
|
80
86
|
def __str__(self) -> str:
|
|
@@ -99,48 +105,16 @@ class AsyncIPCProvider(PersistentConnectionProvider):
|
|
|
99
105
|
)
|
|
100
106
|
return False
|
|
101
107
|
|
|
102
|
-
async def
|
|
103
|
-
|
|
104
|
-
_backoff_rate_change = 1.75
|
|
105
|
-
_backoff_time = 1.75
|
|
106
|
-
|
|
107
|
-
while _connection_attempts != self._max_connection_retries:
|
|
108
|
-
try:
|
|
109
|
-
_connection_attempts += 1
|
|
110
|
-
self._reader, self._writer = await async_get_ipc_socket(self.ipc_path)
|
|
111
|
-
self._message_listener_task = asyncio.create_task(
|
|
112
|
-
self._message_listener()
|
|
113
|
-
)
|
|
114
|
-
break
|
|
115
|
-
except OSError as e:
|
|
116
|
-
if _connection_attempts == self._max_connection_retries:
|
|
117
|
-
raise ProviderConnectionError(
|
|
118
|
-
f"Could not connect to: {self.ipc_path}. "
|
|
119
|
-
f"Retries exceeded max of {self._max_connection_retries}."
|
|
120
|
-
) from e
|
|
121
|
-
self.logger.info(
|
|
122
|
-
f"Could not connect to: {self.ipc_path}. Retrying in "
|
|
123
|
-
f"{round(_backoff_time, 1)} seconds.",
|
|
124
|
-
exc_info=True,
|
|
125
|
-
)
|
|
126
|
-
await asyncio.sleep(_backoff_time)
|
|
127
|
-
_backoff_time *= _backoff_rate_change
|
|
108
|
+
async def _provider_specific_connect(self) -> None:
|
|
109
|
+
self._reader, self._writer = await async_get_ipc_socket(self.ipc_path)
|
|
128
110
|
|
|
129
|
-
async def
|
|
111
|
+
async def _provider_specific_disconnect(self) -> None:
|
|
130
112
|
if self._writer and not self._writer.is_closing():
|
|
131
113
|
self._writer.close()
|
|
132
114
|
await self._writer.wait_closed()
|
|
133
115
|
self._writer = None
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
try:
|
|
137
|
-
self._message_listener_task.cancel()
|
|
138
|
-
await self._message_listener_task
|
|
116
|
+
if self._reader:
|
|
139
117
|
self._reader = None
|
|
140
|
-
except (asyncio.CancelledError, StopAsyncIteration):
|
|
141
|
-
pass
|
|
142
|
-
|
|
143
|
-
self._request_processor.clear_caches()
|
|
144
118
|
|
|
145
119
|
async def _reset_socket(self) -> None:
|
|
146
120
|
self._writer.close()
|
|
@@ -149,13 +123,12 @@ class AsyncIPCProvider(PersistentConnectionProvider):
|
|
|
149
123
|
|
|
150
124
|
@async_handle_request_caching
|
|
151
125
|
async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
|
152
|
-
request_data = self.encode_rpc_request(method, params)
|
|
153
|
-
|
|
154
126
|
if self._writer is None:
|
|
155
127
|
raise ProviderConnectionError(
|
|
156
128
|
"Connection to ipc socket has not been initiated for the provider."
|
|
157
129
|
)
|
|
158
130
|
|
|
131
|
+
request_data = self.encode_rpc_request(method, params)
|
|
159
132
|
try:
|
|
160
133
|
self._writer.write(request_data)
|
|
161
134
|
await self._writer.drain()
|
|
@@ -172,43 +145,54 @@ class AsyncIPCProvider(PersistentConnectionProvider):
|
|
|
172
145
|
|
|
173
146
|
return response
|
|
174
147
|
|
|
175
|
-
async def
|
|
176
|
-
self
|
|
177
|
-
|
|
178
|
-
|
|
148
|
+
async def make_batch_request(
|
|
149
|
+
self, requests: List[Tuple[RPCEndpoint, Any]]
|
|
150
|
+
) -> List[RPCResponse]:
|
|
151
|
+
if self._writer is None:
|
|
152
|
+
raise ProviderConnectionError(
|
|
153
|
+
"Connection to ipc socket has not been initiated for the provider."
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
request_data = self.encode_batch_rpc_request(requests)
|
|
157
|
+
try:
|
|
158
|
+
self._writer.write(request_data)
|
|
159
|
+
await self._writer.drain()
|
|
160
|
+
except OSError as e:
|
|
161
|
+
# Broken pipe
|
|
162
|
+
if e.errno == errno.EPIPE:
|
|
163
|
+
# one extra attempt, then give up
|
|
164
|
+
await self._reset_socket()
|
|
165
|
+
self._writer.write(request_data)
|
|
166
|
+
await self._writer.drain()
|
|
167
|
+
|
|
168
|
+
response = cast(
|
|
169
|
+
List[RPCResponse], await self._get_response_for_request_id(BATCH_REQUEST_ID)
|
|
179
170
|
)
|
|
180
|
-
|
|
181
|
-
decoder = json.JSONDecoder()
|
|
171
|
+
return response
|
|
182
172
|
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
# back to the event loop to share the loop with other tasks.
|
|
186
|
-
await asyncio.sleep(0)
|
|
173
|
+
async def _provider_specific_message_listener(self) -> None:
|
|
174
|
+
self._raw_message += to_text(await self._reader.read(4096)).lstrip()
|
|
187
175
|
|
|
176
|
+
while self._raw_message:
|
|
188
177
|
try:
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
"Exception caught in listener, error logging and keeping listener "
|
|
211
|
-
f"background task alive.\n error={e}"
|
|
212
|
-
)
|
|
213
|
-
# if only error logging, reset the ``raw_message`` buffer and continue
|
|
214
|
-
raw_message = ""
|
|
178
|
+
response, pos = self._decoder.raw_decode(self._raw_message)
|
|
179
|
+
except JSONDecodeError:
|
|
180
|
+
break
|
|
181
|
+
|
|
182
|
+
if isinstance(response, list):
|
|
183
|
+
response = sort_batch_response_by_response_ids(response)
|
|
184
|
+
|
|
185
|
+
is_subscription = (
|
|
186
|
+
response.get("method") == "eth_subscription"
|
|
187
|
+
if not isinstance(response, list)
|
|
188
|
+
else False
|
|
189
|
+
)
|
|
190
|
+
await self._request_processor.cache_raw_response(
|
|
191
|
+
response, subscription=is_subscription
|
|
192
|
+
)
|
|
193
|
+
self._raw_message = self._raw_message[pos:].lstrip()
|
|
194
|
+
|
|
195
|
+
def _error_log_listener_task_exception(self, e: Exception) -> None:
|
|
196
|
+
super()._error_log_listener_task_exception(e)
|
|
197
|
+
# reset the raw message buffer on exception when error logging
|
|
198
|
+
self._raw_message = ""
|
|
@@ -4,13 +4,23 @@ from abc import (
|
|
|
4
4
|
import asyncio
|
|
5
5
|
import logging
|
|
6
6
|
from typing import (
|
|
7
|
+
Any,
|
|
8
|
+
List,
|
|
7
9
|
Optional,
|
|
10
|
+
Union,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
from websockets import (
|
|
14
|
+
ConnectionClosed,
|
|
15
|
+
WebSocketException,
|
|
8
16
|
)
|
|
9
17
|
|
|
10
18
|
from web3._utils.caching import (
|
|
11
19
|
generate_cache_key,
|
|
12
20
|
)
|
|
13
21
|
from web3.exceptions import (
|
|
22
|
+
ProviderConnectionError,
|
|
23
|
+
TaskNotRunning,
|
|
14
24
|
TimeExhausted,
|
|
15
25
|
)
|
|
16
26
|
from web3.providers.async_base import (
|
|
@@ -35,19 +45,24 @@ class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
|
|
|
35
45
|
_message_listener_task: Optional["asyncio.Task[None]"] = None
|
|
36
46
|
_listen_event: asyncio.Event = asyncio.Event()
|
|
37
47
|
|
|
48
|
+
_batch_request_counter: Optional[int] = None
|
|
49
|
+
|
|
38
50
|
def __init__(
|
|
39
51
|
self,
|
|
40
52
|
request_timeout: float = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,
|
|
41
53
|
subscription_response_queue_size: int = 500,
|
|
42
54
|
silence_listener_task_exceptions: bool = False,
|
|
55
|
+
max_connection_retries: int = 5,
|
|
56
|
+
**kwargs: Any,
|
|
43
57
|
) -> None:
|
|
44
|
-
super().__init__()
|
|
58
|
+
super().__init__(**kwargs)
|
|
45
59
|
self._request_processor = RequestProcessor(
|
|
46
60
|
self,
|
|
47
61
|
subscription_response_queue_size=subscription_response_queue_size,
|
|
48
62
|
)
|
|
49
63
|
self.request_timeout = request_timeout
|
|
50
64
|
self.silence_listener_task_exceptions = silence_listener_task_exceptions
|
|
65
|
+
self._max_connection_retries = max_connection_retries
|
|
51
66
|
|
|
52
67
|
def get_endpoint_uri_or_ipc_path(self) -> str:
|
|
53
68
|
if hasattr(self, "endpoint_uri"):
|
|
@@ -61,16 +76,124 @@ class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
|
|
|
61
76
|
)
|
|
62
77
|
|
|
63
78
|
async def connect(self) -> None:
|
|
64
|
-
|
|
79
|
+
_connection_attempts = 0
|
|
80
|
+
_backoff_rate_change = 1.75
|
|
81
|
+
_backoff_time = 1.75
|
|
82
|
+
|
|
83
|
+
while _connection_attempts != self._max_connection_retries:
|
|
84
|
+
try:
|
|
85
|
+
_connection_attempts += 1
|
|
86
|
+
self.logger.info(
|
|
87
|
+
f"Connecting to: {self.get_endpoint_uri_or_ipc_path()}"
|
|
88
|
+
)
|
|
89
|
+
await self._provider_specific_connect()
|
|
90
|
+
self._message_listener_task = asyncio.create_task(
|
|
91
|
+
self._message_listener()
|
|
92
|
+
)
|
|
93
|
+
self._message_listener_task.add_done_callback(
|
|
94
|
+
self._message_listener_callback
|
|
95
|
+
)
|
|
96
|
+
self.logger.info(
|
|
97
|
+
f"Successfully connected to: {self.get_endpoint_uri_or_ipc_path()}"
|
|
98
|
+
)
|
|
99
|
+
break
|
|
100
|
+
except (WebSocketException, OSError) as e:
|
|
101
|
+
if _connection_attempts == self._max_connection_retries:
|
|
102
|
+
raise ProviderConnectionError(
|
|
103
|
+
f"Could not connect to: {self.get_endpoint_uri_or_ipc_path()}. "
|
|
104
|
+
f"Retries exceeded max of {self._max_connection_retries}."
|
|
105
|
+
) from e
|
|
106
|
+
self.logger.info(
|
|
107
|
+
f"Could not connect to: {self.get_endpoint_uri_or_ipc_path()}. "
|
|
108
|
+
f"Retrying in {round(_backoff_time, 1)} seconds.",
|
|
109
|
+
exc_info=True,
|
|
110
|
+
)
|
|
111
|
+
await asyncio.sleep(_backoff_time)
|
|
112
|
+
_backoff_time *= _backoff_rate_change
|
|
65
113
|
|
|
66
114
|
async def disconnect(self) -> None:
|
|
115
|
+
try:
|
|
116
|
+
if self._message_listener_task:
|
|
117
|
+
self._message_listener_task.cancel()
|
|
118
|
+
await self._message_listener_task
|
|
119
|
+
except (asyncio.CancelledError, StopAsyncIteration, ConnectionClosed):
|
|
120
|
+
pass
|
|
121
|
+
finally:
|
|
122
|
+
self._message_listener_task = None
|
|
123
|
+
self.logger.info("Message listener background task successfully shut down.")
|
|
124
|
+
|
|
125
|
+
await self._provider_specific_disconnect()
|
|
126
|
+
self._request_processor.clear_caches()
|
|
127
|
+
self.logger.info(
|
|
128
|
+
f"Successfully disconnected from: {self.get_endpoint_uri_or_ipc_path()}"
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
# -- private methods -- #
|
|
132
|
+
|
|
133
|
+
async def _provider_specific_connect(self) -> None:
|
|
67
134
|
raise NotImplementedError("Must be implemented by subclasses")
|
|
68
135
|
|
|
69
|
-
async def
|
|
136
|
+
async def _provider_specific_disconnect(self) -> None:
|
|
70
137
|
raise NotImplementedError("Must be implemented by subclasses")
|
|
71
138
|
|
|
139
|
+
async def _provider_specific_message_listener(self) -> None:
|
|
140
|
+
raise NotImplementedError("Must be implemented by subclasses")
|
|
141
|
+
|
|
142
|
+
def _message_listener_callback(
|
|
143
|
+
self, message_listener_task: "asyncio.Task[None]"
|
|
144
|
+
) -> None:
|
|
145
|
+
# Puts a `TaskNotRunning` in the queue to signal the end of the listener task
|
|
146
|
+
# to any running subscription streams that are awaiting a response.
|
|
147
|
+
self._request_processor._subscription_response_queue.put_nowait(
|
|
148
|
+
TaskNotRunning(message_listener_task)
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
async def _message_listener(self) -> None:
|
|
152
|
+
self.logger.info(
|
|
153
|
+
f"{self.__class__.__qualname__} listener background task started. Storing "
|
|
154
|
+
"all messages in appropriate request processor queues / caches to be "
|
|
155
|
+
"processed."
|
|
156
|
+
)
|
|
157
|
+
while True:
|
|
158
|
+
# the use of sleep(0) seems to be the most efficient way to yield control
|
|
159
|
+
# back to the event loop to share the loop with other tasks.
|
|
160
|
+
await asyncio.sleep(0)
|
|
161
|
+
try:
|
|
162
|
+
await self._provider_specific_message_listener()
|
|
163
|
+
except Exception as e:
|
|
164
|
+
if not self.silence_listener_task_exceptions:
|
|
165
|
+
raise e
|
|
166
|
+
else:
|
|
167
|
+
self._error_log_listener_task_exception(e)
|
|
168
|
+
|
|
169
|
+
def _error_log_listener_task_exception(self, e: Exception) -> None:
|
|
170
|
+
"""
|
|
171
|
+
When silencing listener task exceptions, this method is used to log the
|
|
172
|
+
exception and keep the listener task alive. Override this method to fine-tune
|
|
173
|
+
error logging behavior for the implementation class.
|
|
174
|
+
"""
|
|
175
|
+
self.logger.error(
|
|
176
|
+
"Exception caught in listener, error logging and keeping "
|
|
177
|
+
"listener background task alive."
|
|
178
|
+
f"\n error={e.__class__.__name__}: {e}"
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
def _handle_listener_task_exceptions(self) -> None:
|
|
182
|
+
"""
|
|
183
|
+
Should be called every time a `PersistentConnectionProvider` is polling for
|
|
184
|
+
messages in the main loop. If the message listener task has completed and an
|
|
185
|
+
exception was recorded, raise the exception in the main loop.
|
|
186
|
+
"""
|
|
187
|
+
msg_listener_task = getattr(self, "_message_listener_task", None)
|
|
188
|
+
if (
|
|
189
|
+
msg_listener_task
|
|
190
|
+
and msg_listener_task.done()
|
|
191
|
+
and msg_listener_task.exception()
|
|
192
|
+
):
|
|
193
|
+
raise msg_listener_task.exception()
|
|
194
|
+
|
|
72
195
|
async def _get_response_for_request_id(
|
|
73
|
-
self, request_id: RPCId, timeout: Optional[float] = None
|
|
196
|
+
self, request_id: Union[RPCId, List[RPCId]], timeout: Optional[float] = None
|
|
74
197
|
) -> RPCResponse:
|
|
75
198
|
if timeout is None:
|
|
76
199
|
timeout = self.request_timeout
|
|
@@ -79,10 +202,9 @@ class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
|
|
|
79
202
|
request_cache_key = generate_cache_key(request_id)
|
|
80
203
|
|
|
81
204
|
while True:
|
|
82
|
-
#
|
|
83
|
-
#
|
|
84
|
-
|
|
85
|
-
await asyncio.sleep(0)
|
|
205
|
+
# check if an exception was recorded in the listener task and raise it
|
|
206
|
+
# in the main loop if so
|
|
207
|
+
self._handle_listener_task_exceptions()
|
|
86
208
|
|
|
87
209
|
if request_cache_key in self._request_processor._request_response_cache:
|
|
88
210
|
self.logger.debug(
|
|
@@ -92,11 +214,13 @@ class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
|
|
|
92
214
|
cache_key=request_cache_key,
|
|
93
215
|
)
|
|
94
216
|
return popped_response
|
|
217
|
+
else:
|
|
218
|
+
await asyncio.sleep(0)
|
|
95
219
|
|
|
96
220
|
try:
|
|
97
221
|
# Add the request timeout around the while loop that checks the request
|
|
98
|
-
# cache
|
|
99
|
-
#
|
|
222
|
+
# cache. If the request is not in the cache within the request_timeout,
|
|
223
|
+
# raise ``TimeExhausted``.
|
|
100
224
|
return await asyncio.wait_for(_match_response_id_to_request_id(), timeout)
|
|
101
225
|
except asyncio.TimeoutError:
|
|
102
226
|
raise TimeExhausted(
|