web3 7.11.0__py3-none-any.whl → 7.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ens/async_ens.py +2 -2
- ens/ens.py +2 -2
- ens/utils.py +14 -3
- web3/_utils/abi.py +24 -20
- web3/_utils/batching.py +22 -68
- web3/_utils/caching/request_caching_validation.py +8 -4
- web3/_utils/contract_sources/contract_data/ambiguous_function_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/arrays_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/bytes_contracts.py +5 -5
- web3/_utils/contract_sources/contract_data/constructor_contracts.py +7 -7
- web3/_utils/contract_sources/contract_data/contract_caller_tester.py +3 -3
- web3/_utils/contract_sources/contract_data/emitter_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/event_contracts.py +7 -7
- web3/_utils/contract_sources/contract_data/extended_resolver.py +3 -3
- web3/_utils/contract_sources/contract_data/fallback_function_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/function_name_tester_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/math_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/offchain_lookup.py +3 -3
- web3/_utils/contract_sources/contract_data/offchain_resolver.py +3 -3
- web3/_utils/contract_sources/contract_data/panic_errors_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/payable_tester.py +3 -3
- web3/_utils/contract_sources/contract_data/receive_function_contracts.py +5 -5
- web3/_utils/contract_sources/contract_data/reflector_contracts.py +3 -3
- web3/_utils/contract_sources/contract_data/revert_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/simple_resolver.py +3 -3
- web3/_utils/contract_sources/contract_data/storage_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/string_contract.py +3 -3
- web3/_utils/contract_sources/contract_data/tuple_contracts.py +5 -5
- web3/_utils/decorators.py +14 -11
- web3/_utils/error_formatters_utils.py +17 -0
- web3/_utils/filters.py +39 -28
- web3/_utils/http_session_manager.py +18 -15
- web3/_utils/method_formatters.py +19 -24
- web3/_utils/module_testing/eth_module.py +107 -47
- web3/_utils/module_testing/web3_module.py +78 -4
- web3/_utils/validation.py +1 -1
- web3/contract/utils.py +20 -35
- web3/eth/async_eth.py +4 -0
- web3/eth/eth.py +15 -2
- web3/manager.py +105 -23
- web3/method.py +17 -9
- web3/providers/async_base.py +15 -1
- web3/providers/base.py +18 -5
- web3/providers/ipc.py +2 -4
- web3/providers/legacy_websocket.py +4 -5
- web3/providers/persistent/async_ipc.py +3 -1
- web3/providers/persistent/persistent.py +110 -40
- web3/providers/persistent/request_processor.py +34 -51
- web3/providers/persistent/subscription_manager.py +13 -7
- web3/providers/rpc/async_rpc.py +7 -7
- web3/providers/rpc/rpc.py +6 -6
- web3/utils/abi.py +1 -1
- web3/utils/subscriptions.py +7 -4
- {web3-7.11.0.dist-info → web3-7.12.0.dist-info}/METADATA +1 -1
- {web3-7.11.0.dist-info → web3-7.12.0.dist-info}/RECORD +58 -58
- {web3-7.11.0.dist-info → web3-7.12.0.dist-info}/WHEEL +1 -1
- {web3-7.11.0.dist-info → web3-7.12.0.dist-info}/licenses/LICENSE +0 -0
- {web3-7.11.0.dist-info → web3-7.12.0.dist-info}/top_level.txt +0 -0
web3/manager.py
CHANGED
|
@@ -159,7 +159,7 @@ class RequestManager:
|
|
|
159
159
|
request_func = provider.request_func(
|
|
160
160
|
cast("Web3", self.w3), cast("MiddlewareOnion", self.middleware_onion)
|
|
161
161
|
)
|
|
162
|
-
self.logger.debug(
|
|
162
|
+
self.logger.debug("Making request. Method: %s", method)
|
|
163
163
|
return request_func(method, params)
|
|
164
164
|
|
|
165
165
|
async def _coro_make_request(
|
|
@@ -169,7 +169,7 @@ class RequestManager:
|
|
|
169
169
|
request_func = await provider.request_func(
|
|
170
170
|
cast("AsyncWeb3", self.w3), cast("MiddlewareOnion", self.middleware_onion)
|
|
171
171
|
)
|
|
172
|
-
self.logger.debug(
|
|
172
|
+
self.logger.debug("Making request. Method: %s", method)
|
|
173
173
|
return await request_func(method, params)
|
|
174
174
|
|
|
175
175
|
#
|
|
@@ -261,7 +261,7 @@ class RequestManager:
|
|
|
261
261
|
return RequestBatcher(self.w3)
|
|
262
262
|
|
|
263
263
|
def _make_batch_request(
|
|
264
|
-
self, requests_info: List[Tuple[Tuple["RPCEndpoint", Any],
|
|
264
|
+
self, requests_info: List[Tuple[Tuple["RPCEndpoint", Any], Tuple[Any, ...]]]
|
|
265
265
|
) -> List[RPCResponse]:
|
|
266
266
|
"""
|
|
267
267
|
Make a batch request using the provider
|
|
@@ -291,7 +291,7 @@ class RequestManager:
|
|
|
291
291
|
async def _async_make_batch_request(
|
|
292
292
|
self,
|
|
293
293
|
requests_info: List[
|
|
294
|
-
Coroutine[Any, Any, Tuple[Tuple["RPCEndpoint", Any],
|
|
294
|
+
Coroutine[Any, Any, Tuple[Tuple["RPCEndpoint", Any], Tuple[Any]]]
|
|
295
295
|
],
|
|
296
296
|
) -> List[RPCResponse]:
|
|
297
297
|
"""
|
|
@@ -315,13 +315,6 @@ class RequestManager:
|
|
|
315
315
|
if isinstance(response, list):
|
|
316
316
|
# expected format
|
|
317
317
|
response = cast(List[RPCResponse], response)
|
|
318
|
-
if isinstance(self.provider, PersistentConnectionProvider):
|
|
319
|
-
# call _process_response for each response in the batch
|
|
320
|
-
return [
|
|
321
|
-
cast(RPCResponse, await self._process_response(resp))
|
|
322
|
-
for resp in response
|
|
323
|
-
]
|
|
324
|
-
|
|
325
318
|
formatted_responses = [
|
|
326
319
|
self._format_batched_response(info, resp)
|
|
327
320
|
for info, resp in zip(unpacked_requests_info, response)
|
|
@@ -331,6 +324,86 @@ class RequestManager:
|
|
|
331
324
|
# expect a single response with an error
|
|
332
325
|
raise_error_for_batch_response(response, self.logger)
|
|
333
326
|
|
|
327
|
+
async def _async_send_batch(
|
|
328
|
+
self, requests: List[Tuple["RPCEndpoint", Any]]
|
|
329
|
+
) -> List[RPCRequest]:
|
|
330
|
+
"""
|
|
331
|
+
Send a batch request via socket.
|
|
332
|
+
"""
|
|
333
|
+
if not isinstance(self._provider, PersistentConnectionProvider):
|
|
334
|
+
raise Web3TypeError(
|
|
335
|
+
"Only providers that maintain an open, persistent connection "
|
|
336
|
+
"can send batch requests."
|
|
337
|
+
)
|
|
338
|
+
send_func = await self._provider.send_batch_func(
|
|
339
|
+
cast("AsyncWeb3", self.w3),
|
|
340
|
+
cast("MiddlewareOnion", self.middleware_onion),
|
|
341
|
+
)
|
|
342
|
+
self.logger.debug(
|
|
343
|
+
"Sending batch request to open socket connection: %s",
|
|
344
|
+
self._provider.get_endpoint_uri_or_ipc_path(),
|
|
345
|
+
)
|
|
346
|
+
return await send_func(requests)
|
|
347
|
+
|
|
348
|
+
async def _async_recv_batch(self, requests: List[RPCRequest]) -> List[RPCResponse]:
|
|
349
|
+
"""
|
|
350
|
+
Receive a batch request via socket.
|
|
351
|
+
"""
|
|
352
|
+
if not isinstance(self._provider, PersistentConnectionProvider):
|
|
353
|
+
raise Web3TypeError(
|
|
354
|
+
"Only providers that maintain an open, persistent connection "
|
|
355
|
+
"can receive batch requests."
|
|
356
|
+
)
|
|
357
|
+
recv_func = await self._provider.recv_batch_func(
|
|
358
|
+
cast("AsyncWeb3", self.w3),
|
|
359
|
+
cast("MiddlewareOnion", self.middleware_onion),
|
|
360
|
+
)
|
|
361
|
+
self.logger.debug(
|
|
362
|
+
"Receiving batch request from open socket connection: %s",
|
|
363
|
+
self._provider.get_endpoint_uri_or_ipc_path(),
|
|
364
|
+
)
|
|
365
|
+
return await recv_func(requests)
|
|
366
|
+
|
|
367
|
+
async def _async_make_socket_batch_request(
|
|
368
|
+
self,
|
|
369
|
+
requests_info: List[
|
|
370
|
+
Coroutine[Any, Any, Tuple[Tuple["RPCEndpoint", Any], Tuple[Any, ...]]]
|
|
371
|
+
],
|
|
372
|
+
) -> List[RPCResponse]:
|
|
373
|
+
"""
|
|
374
|
+
Send and receive a batch request via a socket.
|
|
375
|
+
"""
|
|
376
|
+
if not isinstance(self._provider, PersistentConnectionProvider):
|
|
377
|
+
raise Web3TypeError(
|
|
378
|
+
"Only providers that maintain an open, persistent connection "
|
|
379
|
+
"can send and receive batch requests."
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
unpacked_requests_info = await asyncio.gather(*requests_info)
|
|
383
|
+
reqs = [req for req, _ in unpacked_requests_info]
|
|
384
|
+
response_formatters = [resp_f for _, resp_f in unpacked_requests_info]
|
|
385
|
+
|
|
386
|
+
requests = await self._async_send_batch(reqs)
|
|
387
|
+
|
|
388
|
+
for i, request in enumerate(requests):
|
|
389
|
+
self._provider._request_processor.cache_request_information(
|
|
390
|
+
request["id"],
|
|
391
|
+
request["method"],
|
|
392
|
+
request["params"],
|
|
393
|
+
response_formatters=response_formatters[i],
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
responses = await self._async_recv_batch(requests)
|
|
397
|
+
if isinstance(responses, list):
|
|
398
|
+
# expected format
|
|
399
|
+
return [
|
|
400
|
+
cast(RPCResponse, await self._process_response(resp))
|
|
401
|
+
for resp in responses
|
|
402
|
+
]
|
|
403
|
+
else:
|
|
404
|
+
# expect a single response with an error
|
|
405
|
+
raise_error_for_batch_response(responses, self.logger)
|
|
406
|
+
|
|
334
407
|
def _format_batched_response(
|
|
335
408
|
self,
|
|
336
409
|
requests_info: Tuple[Tuple[RPCEndpoint, Any], Sequence[Any]],
|
|
@@ -366,9 +439,12 @@ class RequestManager:
|
|
|
366
439
|
) -> RPCResponse:
|
|
367
440
|
provider = cast(PersistentConnectionProvider, self._provider)
|
|
368
441
|
self.logger.debug(
|
|
369
|
-
"Making request to open socket connection and waiting for response: "
|
|
370
|
-
|
|
371
|
-
|
|
442
|
+
"Making request to open socket connection and waiting for response: %s,\n"
|
|
443
|
+
" method: %s,\n"
|
|
444
|
+
" params: %s",
|
|
445
|
+
provider.get_endpoint_uri_or_ipc_path(),
|
|
446
|
+
method,
|
|
447
|
+
params,
|
|
372
448
|
)
|
|
373
449
|
rpc_request = await self.send(method, params)
|
|
374
450
|
provider._request_processor.cache_request_information(
|
|
@@ -388,9 +464,12 @@ class RequestManager:
|
|
|
388
464
|
middleware_onion,
|
|
389
465
|
)
|
|
390
466
|
self.logger.debug(
|
|
391
|
-
"Sending request to open socket connection: "
|
|
392
|
-
|
|
393
|
-
|
|
467
|
+
"Sending request to open socket connection: %s,\n"
|
|
468
|
+
" method: %s,\n"
|
|
469
|
+
" params: %s",
|
|
470
|
+
provider.get_endpoint_uri_or_ipc_path(),
|
|
471
|
+
method,
|
|
472
|
+
params,
|
|
394
473
|
)
|
|
395
474
|
return await send_func(method, params)
|
|
396
475
|
|
|
@@ -404,7 +483,8 @@ class RequestManager:
|
|
|
404
483
|
)
|
|
405
484
|
self.logger.debug(
|
|
406
485
|
"Getting response for request from open socket connection:\n"
|
|
407
|
-
|
|
486
|
+
" request: %s",
|
|
487
|
+
rpc_request,
|
|
408
488
|
)
|
|
409
489
|
response = await recv_func(rpc_request)
|
|
410
490
|
try:
|
|
@@ -417,8 +497,8 @@ class RequestManager:
|
|
|
417
497
|
async def recv(self) -> Union[RPCResponse, FormattedEthSubscriptionResponse]:
|
|
418
498
|
provider = cast(PersistentConnectionProvider, self._provider)
|
|
419
499
|
self.logger.debug(
|
|
420
|
-
"Getting next response from open socket connection: "
|
|
421
|
-
|
|
500
|
+
"Getting next response from open socket connection: %s",
|
|
501
|
+
provider.get_endpoint_uri_or_ipc_path(),
|
|
422
502
|
)
|
|
423
503
|
# pop from the queue since the listener task is responsible for reading
|
|
424
504
|
# directly from the socket
|
|
@@ -501,9 +581,11 @@ class RequestManager:
|
|
|
501
581
|
# subscription as it comes in
|
|
502
582
|
request_info.subscription_id = subscription_id
|
|
503
583
|
provider.logger.debug(
|
|
504
|
-
"Caching eth_subscription info:\n
|
|
505
|
-
|
|
506
|
-
|
|
584
|
+
"Caching eth_subscription info:\n"
|
|
585
|
+
" cache_key=%s,\n"
|
|
586
|
+
" request_info=%s",
|
|
587
|
+
cache_key,
|
|
588
|
+
request_info.__dict__,
|
|
507
589
|
)
|
|
508
590
|
self._request_processor._request_information_cache.cache(
|
|
509
591
|
cache_key, request_info
|
web3/method.py
CHANGED
|
@@ -165,8 +165,7 @@ class Method(Generic[TFunc]):
|
|
|
165
165
|
"usually attached to a web3 instance."
|
|
166
166
|
)
|
|
167
167
|
|
|
168
|
-
|
|
169
|
-
if hasattr(provider, "_is_batching") and provider._is_batching:
|
|
168
|
+
if module.w3.provider._is_batching:
|
|
170
169
|
if self.json_rpc_method in RPC_METHODS_UNSUPPORTED_DURING_BATCH:
|
|
171
170
|
raise MethodNotSupported(
|
|
172
171
|
f"Method `{self.json_rpc_method}` is not supported within a batch "
|
|
@@ -182,12 +181,13 @@ class Method(Generic[TFunc]):
|
|
|
182
181
|
@property
|
|
183
182
|
def method_selector_fn(
|
|
184
183
|
self,
|
|
185
|
-
) -> Callable[
|
|
184
|
+
) -> Callable[[], RPCEndpoint]:
|
|
186
185
|
"""Gets the method selector from the config."""
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
186
|
+
method = self.json_rpc_method
|
|
187
|
+
if callable(method):
|
|
188
|
+
return method
|
|
189
|
+
elif isinstance(method, str):
|
|
190
|
+
return lambda: method
|
|
191
191
|
raise Web3ValueError(
|
|
192
192
|
"``json_rpc_method`` config invalid. May be a string or function"
|
|
193
193
|
)
|
|
@@ -241,17 +241,25 @@ class Method(Generic[TFunc]):
|
|
|
241
241
|
|
|
242
242
|
class DeprecatedMethod:
|
|
243
243
|
def __init__(
|
|
244
|
-
self,
|
|
244
|
+
self,
|
|
245
|
+
method: Method[Callable[..., Any]],
|
|
246
|
+
old_name: Optional[str] = None,
|
|
247
|
+
new_name: Optional[str] = None,
|
|
248
|
+
msg: Optional[str] = None,
|
|
245
249
|
) -> None:
|
|
246
250
|
self.method = method
|
|
247
251
|
self.old_name = old_name
|
|
248
252
|
self.new_name = new_name
|
|
253
|
+
self.msg = msg
|
|
249
254
|
|
|
250
255
|
def __get__(
|
|
251
256
|
self, obj: Optional["Module"] = None, obj_type: Optional[Type["Module"]] = None
|
|
252
257
|
) -> Any:
|
|
258
|
+
message = f"{self.old_name} is deprecated in favor of {self.new_name}"
|
|
259
|
+
if self.msg is not None:
|
|
260
|
+
message = self.msg
|
|
253
261
|
warnings.warn(
|
|
254
|
-
|
|
262
|
+
message,
|
|
255
263
|
category=DeprecationWarning,
|
|
256
264
|
stacklevel=2,
|
|
257
265
|
)
|
web3/providers/async_base.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import contextvars
|
|
2
3
|
import itertools
|
|
3
4
|
import logging
|
|
4
5
|
from typing import (
|
|
@@ -61,6 +62,9 @@ if TYPE_CHECKING:
|
|
|
61
62
|
AsyncWeb3,
|
|
62
63
|
WebSocketProvider,
|
|
63
64
|
)
|
|
65
|
+
from web3._utils.batching import ( # noqa: F401
|
|
66
|
+
RequestBatcher,
|
|
67
|
+
)
|
|
64
68
|
from web3.providers.persistent import ( # noqa: F401
|
|
65
69
|
RequestProcessor,
|
|
66
70
|
)
|
|
@@ -94,12 +98,19 @@ class AsyncBaseProvider:
|
|
|
94
98
|
self.cache_allowed_requests = cache_allowed_requests
|
|
95
99
|
self.cacheable_requests = cacheable_requests or CACHEABLE_REQUESTS
|
|
96
100
|
self.request_cache_validation_threshold = request_cache_validation_threshold
|
|
97
|
-
|
|
101
|
+
|
|
102
|
+
self._batching_context: contextvars.ContextVar[
|
|
103
|
+
Optional["RequestBatcher[Any]"]
|
|
104
|
+
] = contextvars.ContextVar("batching_context", default=None)
|
|
98
105
|
self._batch_request_func_cache: Tuple[
|
|
99
106
|
Tuple[Middleware, ...],
|
|
100
107
|
Callable[..., Coroutine[Any, Any, Union[List[RPCResponse], RPCResponse]]],
|
|
101
108
|
] = (None, None)
|
|
102
109
|
|
|
110
|
+
@property
|
|
111
|
+
def _is_batching(self) -> bool:
|
|
112
|
+
return self._batching_context.get() is not None
|
|
113
|
+
|
|
103
114
|
async def request_func(
|
|
104
115
|
self, async_w3: "AsyncWeb3", middleware_onion: MiddlewareOnion
|
|
105
116
|
) -> Callable[..., Coroutine[Any, Any, RPCResponse]]:
|
|
@@ -240,3 +251,6 @@ class AsyncJSONBaseProvider(AsyncBaseProvider):
|
|
|
240
251
|
)
|
|
241
252
|
+ b"]"
|
|
242
253
|
)
|
|
254
|
+
|
|
255
|
+
def encode_batch_request_dicts(self, request_dicts: List[RPCRequest]) -> bytes:
|
|
256
|
+
return b"[" + b",".join(self.encode_rpc_dict(d) for d in request_dicts) + b"]"
|
web3/providers/base.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import contextvars
|
|
1
2
|
import itertools
|
|
2
3
|
import logging
|
|
3
4
|
import threading
|
|
@@ -50,6 +51,9 @@ from web3.utils import (
|
|
|
50
51
|
|
|
51
52
|
if TYPE_CHECKING:
|
|
52
53
|
from web3 import Web3 # noqa: F401
|
|
54
|
+
from web3._utils.batching import (
|
|
55
|
+
RequestBatcher,
|
|
56
|
+
)
|
|
53
57
|
|
|
54
58
|
|
|
55
59
|
class BaseProvider:
|
|
@@ -81,6 +85,20 @@ class BaseProvider:
|
|
|
81
85
|
self.cacheable_requests = cacheable_requests or CACHEABLE_REQUESTS
|
|
82
86
|
self.request_cache_validation_threshold = request_cache_validation_threshold
|
|
83
87
|
|
|
88
|
+
self._batching_context: contextvars.ContextVar[
|
|
89
|
+
Optional["RequestBatcher[Any]"]
|
|
90
|
+
] = contextvars.ContextVar("batching_context", default=None)
|
|
91
|
+
self._batch_request_func_cache: Tuple[
|
|
92
|
+
Tuple[Middleware, ...], Callable[..., Union[List[RPCResponse], RPCResponse]]
|
|
93
|
+
] = (None, None)
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
def _is_batching(self) -> bool:
|
|
97
|
+
"""
|
|
98
|
+
Check if the provider is currently batching requests.
|
|
99
|
+
"""
|
|
100
|
+
return self._batching_context.get() is not None
|
|
101
|
+
|
|
84
102
|
def request_func(
|
|
85
103
|
self, w3: "Web3", middleware_onion: MiddlewareOnion
|
|
86
104
|
) -> Callable[..., RPCResponse]:
|
|
@@ -120,11 +138,6 @@ class JSONBaseProvider(BaseProvider):
|
|
|
120
138
|
super().__init__(**kwargs)
|
|
121
139
|
self.request_counter = itertools.count()
|
|
122
140
|
|
|
123
|
-
self._is_batching: bool = False
|
|
124
|
-
self._batch_request_func_cache: Tuple[
|
|
125
|
-
Tuple[Middleware, ...], Callable[..., Union[List[RPCResponse], RPCResponse]]
|
|
126
|
-
] = (None, None)
|
|
127
|
-
|
|
128
141
|
def encode_rpc_request(self, method: RPCEndpoint, params: Any) -> bytes:
|
|
129
142
|
rpc_dict = {
|
|
130
143
|
"jsonrpc": "2.0",
|
web3/providers/ipc.py
CHANGED
|
@@ -30,7 +30,6 @@ from web3.types import (
|
|
|
30
30
|
)
|
|
31
31
|
|
|
32
32
|
from .._utils.batching import (
|
|
33
|
-
batching_context,
|
|
34
33
|
sort_batch_response_by_response_ids,
|
|
35
34
|
)
|
|
36
35
|
from .._utils.caching import (
|
|
@@ -197,16 +196,15 @@ class IPCProvider(JSONBaseProvider):
|
|
|
197
196
|
@handle_request_caching
|
|
198
197
|
def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
|
199
198
|
self.logger.debug(
|
|
200
|
-
|
|
199
|
+
"Making request IPC. Path: %s, Method: %s", self.ipc_path, method
|
|
201
200
|
)
|
|
202
201
|
request = self.encode_rpc_request(method, params)
|
|
203
202
|
return self._make_request(request)
|
|
204
203
|
|
|
205
|
-
@batching_context
|
|
206
204
|
def make_batch_request(
|
|
207
205
|
self, requests: List[Tuple[RPCEndpoint, Any]]
|
|
208
206
|
) -> List[RPCResponse]:
|
|
209
|
-
self.logger.debug(
|
|
207
|
+
self.logger.debug("Making batch request IPC. Path: %s", self.ipc_path)
|
|
210
208
|
request_data = self.encode_batch_rpc_request(requests)
|
|
211
209
|
response = cast(List[RPCResponse], self._make_request(request_data))
|
|
212
210
|
return sort_batch_response_by_response_ids(response)
|
|
@@ -27,7 +27,6 @@ from websockets.legacy.client import (
|
|
|
27
27
|
)
|
|
28
28
|
|
|
29
29
|
from web3._utils.batching import (
|
|
30
|
-
batching_context,
|
|
31
30
|
sort_batch_response_by_response_ids,
|
|
32
31
|
)
|
|
33
32
|
from web3._utils.caching import (
|
|
@@ -136,7 +135,7 @@ class LegacyWebSocketProvider(JSONBaseProvider):
|
|
|
136
135
|
@handle_request_caching
|
|
137
136
|
def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
|
|
138
137
|
self.logger.debug(
|
|
139
|
-
|
|
138
|
+
"Making request WebSocket. URI: %s, Method: %s", self.endpoint_uri, method
|
|
140
139
|
)
|
|
141
140
|
request_data = self.encode_rpc_request(method, params)
|
|
142
141
|
future = asyncio.run_coroutine_threadsafe(
|
|
@@ -144,13 +143,13 @@ class LegacyWebSocketProvider(JSONBaseProvider):
|
|
|
144
143
|
)
|
|
145
144
|
return future.result()
|
|
146
145
|
|
|
147
|
-
@batching_context
|
|
148
146
|
def make_batch_request(
|
|
149
147
|
self, requests: List[Tuple[RPCEndpoint, Any]]
|
|
150
148
|
) -> List[RPCResponse]:
|
|
151
149
|
self.logger.debug(
|
|
152
|
-
|
|
153
|
-
|
|
150
|
+
"Making batch request WebSocket. URI: %s, Methods: %s",
|
|
151
|
+
self.endpoint_uri,
|
|
152
|
+
requests,
|
|
154
153
|
)
|
|
155
154
|
request_data = self.encode_batch_rpc_request(requests)
|
|
156
155
|
future = asyncio.run_coroutine_threadsafe(
|
|
@@ -110,7 +110,9 @@ class AsyncIPCProvider(PersistentConnectionProvider):
|
|
|
110
110
|
raise
|
|
111
111
|
|
|
112
112
|
if not data:
|
|
113
|
-
raise PersistentConnectionClosedOK(
|
|
113
|
+
raise PersistentConnectionClosedOK(
|
|
114
|
+
user_message="Socket reader received end of stream."
|
|
115
|
+
)
|
|
114
116
|
return self.decode_rpc_response(data)
|
|
115
117
|
|
|
116
118
|
# -- private methods -- #
|