redis 6.4.0__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. redis/__init__.py +1 -1
  2. redis/_parsers/base.py +193 -8
  3. redis/_parsers/helpers.py +64 -6
  4. redis/_parsers/hiredis.py +16 -10
  5. redis/_parsers/resp3.py +11 -5
  6. redis/asyncio/client.py +65 -8
  7. redis/asyncio/cluster.py +57 -5
  8. redis/asyncio/connection.py +62 -2
  9. redis/asyncio/http/__init__.py +0 -0
  10. redis/asyncio/http/http_client.py +265 -0
  11. redis/asyncio/multidb/__init__.py +0 -0
  12. redis/asyncio/multidb/client.py +530 -0
  13. redis/asyncio/multidb/command_executor.py +339 -0
  14. redis/asyncio/multidb/config.py +210 -0
  15. redis/asyncio/multidb/database.py +69 -0
  16. redis/asyncio/multidb/event.py +84 -0
  17. redis/asyncio/multidb/failover.py +125 -0
  18. redis/asyncio/multidb/failure_detector.py +38 -0
  19. redis/asyncio/multidb/healthcheck.py +285 -0
  20. redis/background.py +204 -0
  21. redis/cache.py +1 -0
  22. redis/client.py +97 -16
  23. redis/cluster.py +14 -3
  24. redis/commands/core.py +348 -313
  25. redis/commands/helpers.py +0 -20
  26. redis/commands/json/commands.py +2 -2
  27. redis/commands/search/__init__.py +2 -2
  28. redis/commands/search/aggregation.py +24 -26
  29. redis/commands/search/commands.py +10 -10
  30. redis/commands/search/field.py +2 -2
  31. redis/commands/search/query.py +23 -23
  32. redis/commands/vectorset/__init__.py +1 -1
  33. redis/commands/vectorset/commands.py +43 -25
  34. redis/commands/vectorset/utils.py +40 -4
  35. redis/connection.py +1257 -83
  36. redis/data_structure.py +81 -0
  37. redis/event.py +84 -10
  38. redis/exceptions.py +8 -0
  39. redis/http/__init__.py +0 -0
  40. redis/http/http_client.py +425 -0
  41. redis/maint_notifications.py +810 -0
  42. redis/multidb/__init__.py +0 -0
  43. redis/multidb/circuit.py +144 -0
  44. redis/multidb/client.py +526 -0
  45. redis/multidb/command_executor.py +350 -0
  46. redis/multidb/config.py +207 -0
  47. redis/multidb/database.py +130 -0
  48. redis/multidb/event.py +89 -0
  49. redis/multidb/exception.py +17 -0
  50. redis/multidb/failover.py +125 -0
  51. redis/multidb/failure_detector.py +104 -0
  52. redis/multidb/healthcheck.py +282 -0
  53. redis/retry.py +14 -1
  54. redis/utils.py +34 -0
  55. {redis-6.4.0.dist-info → redis-7.0.0.dist-info}/METADATA +7 -4
  56. redis-7.0.0.dist-info/RECORD +105 -0
  57. redis-6.4.0.dist-info/RECORD +0 -78
  58. {redis-6.4.0.dist-info → redis-7.0.0.dist-info}/WHEEL +0 -0
  59. {redis-6.4.0.dist-info → redis-7.0.0.dist-info}/licenses/LICENSE +0 -0
redis/asyncio/cluster.py CHANGED
@@ -86,10 +86,11 @@ from redis.utils import (
86
86
  )
87
87
 
88
88
  if SSL_AVAILABLE:
89
- from ssl import TLSVersion, VerifyMode
89
+ from ssl import TLSVersion, VerifyFlags, VerifyMode
90
90
  else:
91
91
  TLSVersion = None
92
92
  VerifyMode = None
93
+ VerifyFlags = None
93
94
 
94
95
  TargetNodesT = TypeVar(
95
96
  "TargetNodesT", str, "ClusterNode", List["ClusterNode"], Dict[Any, "ClusterNode"]
@@ -299,6 +300,8 @@ class RedisCluster(AbstractRedis, AbstractRedisCluster, AsyncRedisClusterCommand
299
300
  ssl_ca_certs: Optional[str] = None,
300
301
  ssl_ca_data: Optional[str] = None,
301
302
  ssl_cert_reqs: Union[str, VerifyMode] = "required",
303
+ ssl_include_verify_flags: Optional[List[VerifyFlags]] = None,
304
+ ssl_exclude_verify_flags: Optional[List[VerifyFlags]] = None,
302
305
  ssl_certfile: Optional[str] = None,
303
306
  ssl_check_hostname: bool = True,
304
307
  ssl_keyfile: Optional[str] = None,
@@ -358,6 +361,8 @@ class RedisCluster(AbstractRedis, AbstractRedisCluster, AsyncRedisClusterCommand
358
361
  "ssl_ca_certs": ssl_ca_certs,
359
362
  "ssl_ca_data": ssl_ca_data,
360
363
  "ssl_cert_reqs": ssl_cert_reqs,
364
+ "ssl_include_verify_flags": ssl_include_verify_flags,
365
+ "ssl_exclude_verify_flags": ssl_exclude_verify_flags,
361
366
  "ssl_certfile": ssl_certfile,
362
367
  "ssl_check_hostname": ssl_check_hostname,
363
368
  "ssl_keyfile": ssl_keyfile,
@@ -404,6 +409,7 @@ class RedisCluster(AbstractRedis, AbstractRedisCluster, AsyncRedisClusterCommand
404
409
  else:
405
410
  self._event_dispatcher = event_dispatcher
406
411
 
412
+ self.startup_nodes = startup_nodes
407
413
  self.nodes_manager = NodesManager(
408
414
  startup_nodes,
409
415
  require_full_coverage,
@@ -431,6 +437,12 @@ class RedisCluster(AbstractRedis, AbstractRedisCluster, AsyncRedisClusterCommand
431
437
  self._initialize = True
432
438
  self._lock: Optional[asyncio.Lock] = None
433
439
 
440
+ # When used as an async context manager, we need to increment and decrement
441
+ # a usage counter so that we can close the connection pool when no one is
442
+ # using the client.
443
+ self._usage_counter = 0
444
+ self._usage_lock = asyncio.Lock()
445
+
434
446
  async def initialize(self) -> "RedisCluster":
435
447
  """Get all nodes from startup nodes & creates connections if not initialized."""
436
448
  if self._initialize:
@@ -467,10 +479,47 @@ class RedisCluster(AbstractRedis, AbstractRedisCluster, AsyncRedisClusterCommand
467
479
  await self.aclose()
468
480
 
469
481
  async def __aenter__(self) -> "RedisCluster":
470
- return await self.initialize()
482
+ """
483
+ Async context manager entry. Increments a usage counter so that the
484
+ connection pool is only closed (via aclose()) when no context is using
485
+ the client.
486
+ """
487
+ await self._increment_usage()
488
+ try:
489
+ # Initialize the client (i.e. establish connection, etc.)
490
+ return await self.initialize()
491
+ except Exception:
492
+ # If initialization fails, decrement the counter to keep it in sync
493
+ await self._decrement_usage()
494
+ raise
471
495
 
472
- async def __aexit__(self, exc_type: None, exc_value: None, traceback: None) -> None:
473
- await self.aclose()
496
+ async def _increment_usage(self) -> int:
497
+ """
498
+ Helper coroutine to increment the usage counter while holding the lock.
499
+ Returns the new value of the usage counter.
500
+ """
501
+ async with self._usage_lock:
502
+ self._usage_counter += 1
503
+ return self._usage_counter
504
+
505
+ async def _decrement_usage(self) -> int:
506
+ """
507
+ Helper coroutine to decrement the usage counter while holding the lock.
508
+ Returns the new value of the usage counter.
509
+ """
510
+ async with self._usage_lock:
511
+ self._usage_counter -= 1
512
+ return self._usage_counter
513
+
514
+ async def __aexit__(self, exc_type, exc_value, traceback):
515
+ """
516
+ Async context manager exit. Decrements a usage counter. If this is the
517
+ last exit (counter becomes zero), the client closes its connection pool.
518
+ """
519
+ current_usage = await asyncio.shield(self._decrement_usage())
520
+ if current_usage == 0:
521
+ # This was the last active context, so disconnect the pool.
522
+ await asyncio.shield(self.aclose())
474
523
 
475
524
  def __await__(self) -> Generator[Any, None, "RedisCluster"]:
476
525
  return self.initialize().__await__()
@@ -2205,7 +2254,10 @@ class TransactionStrategy(AbstractStrategy):
2205
2254
  await self._pipe.cluster_client.nodes_manager.initialize()
2206
2255
  self.reinitialize_counter = 0
2207
2256
  else:
2208
- self._pipe.cluster_client.nodes_manager.update_moved_exception(error)
2257
+ if isinstance(error, AskError):
2258
+ self._pipe.cluster_client.nodes_manager.update_moved_exception(
2259
+ error
2260
+ )
2209
2261
 
2210
2262
  self._executing = False
2211
2263
 
@@ -30,11 +30,12 @@ from ..utils import SSL_AVAILABLE
30
30
 
31
31
  if SSL_AVAILABLE:
32
32
  import ssl
33
- from ssl import SSLContext, TLSVersion
33
+ from ssl import SSLContext, TLSVersion, VerifyFlags
34
34
  else:
35
35
  ssl = None
36
36
  TLSVersion = None
37
37
  SSLContext = None
38
+ VerifyFlags = None
38
39
 
39
40
  from ..auth.token import TokenInterface
40
41
  from ..event import AsyncAfterConnectionReleasedEvent, EventDispatcher
@@ -212,6 +213,7 @@ class AbstractConnection:
212
213
  self._connect_callbacks: List[weakref.WeakMethod[ConnectCallbackT]] = []
213
214
  self._buffer_cutoff = 6000
214
215
  self._re_auth_token: Optional[TokenInterface] = None
216
+ self._should_reconnect = False
215
217
 
216
218
  try:
217
219
  p = int(protocol)
@@ -342,6 +344,12 @@ class AbstractConnection:
342
344
  if task and inspect.isawaitable(task):
343
345
  await task
344
346
 
347
+ def mark_for_reconnect(self):
348
+ self._should_reconnect = True
349
+
350
+ def should_reconnect(self):
351
+ return self._should_reconnect
352
+
345
353
  @abstractmethod
346
354
  async def _connect(self):
347
355
  pass
@@ -793,6 +801,8 @@ class SSLConnection(Connection):
793
801
  ssl_keyfile: Optional[str] = None,
794
802
  ssl_certfile: Optional[str] = None,
795
803
  ssl_cert_reqs: Union[str, ssl.VerifyMode] = "required",
804
+ ssl_include_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
805
+ ssl_exclude_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
796
806
  ssl_ca_certs: Optional[str] = None,
797
807
  ssl_ca_data: Optional[str] = None,
798
808
  ssl_check_hostname: bool = True,
@@ -807,6 +817,8 @@ class SSLConnection(Connection):
807
817
  keyfile=ssl_keyfile,
808
818
  certfile=ssl_certfile,
809
819
  cert_reqs=ssl_cert_reqs,
820
+ include_verify_flags=ssl_include_verify_flags,
821
+ exclude_verify_flags=ssl_exclude_verify_flags,
810
822
  ca_certs=ssl_ca_certs,
811
823
  ca_data=ssl_ca_data,
812
824
  check_hostname=ssl_check_hostname,
@@ -832,6 +844,14 @@ class SSLConnection(Connection):
832
844
  def cert_reqs(self):
833
845
  return self.ssl_context.cert_reqs
834
846
 
847
+ @property
848
+ def include_verify_flags(self):
849
+ return self.ssl_context.include_verify_flags
850
+
851
+ @property
852
+ def exclude_verify_flags(self):
853
+ return self.ssl_context.exclude_verify_flags
854
+
835
855
  @property
836
856
  def ca_certs(self):
837
857
  return self.ssl_context.ca_certs
@@ -854,6 +874,8 @@ class RedisSSLContext:
854
874
  "keyfile",
855
875
  "certfile",
856
876
  "cert_reqs",
877
+ "include_verify_flags",
878
+ "exclude_verify_flags",
857
879
  "ca_certs",
858
880
  "ca_data",
859
881
  "context",
@@ -867,6 +889,8 @@ class RedisSSLContext:
867
889
  keyfile: Optional[str] = None,
868
890
  certfile: Optional[str] = None,
869
891
  cert_reqs: Optional[Union[str, ssl.VerifyMode]] = None,
892
+ include_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
893
+ exclude_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
870
894
  ca_certs: Optional[str] = None,
871
895
  ca_data: Optional[str] = None,
872
896
  check_hostname: bool = False,
@@ -892,6 +916,8 @@ class RedisSSLContext:
892
916
  )
893
917
  cert_reqs = CERT_REQS[cert_reqs]
894
918
  self.cert_reqs = cert_reqs
919
+ self.include_verify_flags = include_verify_flags
920
+ self.exclude_verify_flags = exclude_verify_flags
895
921
  self.ca_certs = ca_certs
896
922
  self.ca_data = ca_data
897
923
  self.check_hostname = (
@@ -906,6 +932,12 @@ class RedisSSLContext:
906
932
  context = ssl.create_default_context()
907
933
  context.check_hostname = self.check_hostname
908
934
  context.verify_mode = self.cert_reqs
935
+ if self.include_verify_flags:
936
+ for flag in self.include_verify_flags:
937
+ context.verify_flags |= flag
938
+ if self.exclude_verify_flags:
939
+ for flag in self.exclude_verify_flags:
940
+ context.verify_flags &= ~flag
909
941
  if self.certfile and self.keyfile:
910
942
  context.load_cert_chain(certfile=self.certfile, keyfile=self.keyfile)
911
943
  if self.ca_certs or self.ca_data:
@@ -953,6 +985,20 @@ def to_bool(value) -> Optional[bool]:
953
985
  return bool(value)
954
986
 
955
987
 
988
+ def parse_ssl_verify_flags(value):
989
+ # flags are passed in as a string representation of a list,
990
+ # e.g. VERIFY_X509_STRICT, VERIFY_X509_PARTIAL_CHAIN
991
+ verify_flags_str = value.replace("[", "").replace("]", "")
992
+
993
+ verify_flags = []
994
+ for flag in verify_flags_str.split(","):
995
+ flag = flag.strip()
996
+ if not hasattr(VerifyFlags, flag):
997
+ raise ValueError(f"Invalid ssl verify flag: {flag}")
998
+ verify_flags.append(getattr(VerifyFlags, flag))
999
+ return verify_flags
1000
+
1001
+
956
1002
  URL_QUERY_ARGUMENT_PARSERS: Mapping[str, Callable[..., object]] = MappingProxyType(
957
1003
  {
958
1004
  "db": int,
@@ -963,6 +1009,8 @@ URL_QUERY_ARGUMENT_PARSERS: Mapping[str, Callable[..., object]] = MappingProxyTy
963
1009
  "max_connections": int,
964
1010
  "health_check_interval": int,
965
1011
  "ssl_check_hostname": to_bool,
1012
+ "ssl_include_verify_flags": parse_ssl_verify_flags,
1013
+ "ssl_exclude_verify_flags": parse_ssl_verify_flags,
966
1014
  "timeout": float,
967
1015
  }
968
1016
  )
@@ -1021,6 +1069,7 @@ def parse_url(url: str) -> ConnectKwargs:
1021
1069
 
1022
1070
  if parsed.scheme == "rediss":
1023
1071
  kwargs["connection_class"] = SSLConnection
1072
+
1024
1073
  else:
1025
1074
  valid_schemes = "redis://, rediss://, unix://"
1026
1075
  raise ValueError(
@@ -1198,6 +1247,9 @@ class ConnectionPool:
1198
1247
  # Connections should always be returned to the correct pool,
1199
1248
  # not doing so is an error that will cause an exception here.
1200
1249
  self._in_use_connections.remove(connection)
1250
+ if connection.should_reconnect():
1251
+ await connection.disconnect()
1252
+
1201
1253
  self._available_connections.append(connection)
1202
1254
  await self._event_dispatcher.dispatch_async(
1203
1255
  AsyncAfterConnectionReleasedEvent(connection)
@@ -1225,6 +1277,14 @@ class ConnectionPool:
1225
1277
  if exc:
1226
1278
  raise exc
1227
1279
 
1280
+ async def update_active_connections_for_reconnect(self):
1281
+ """
1282
+ Mark all active connections for reconnect.
1283
+ """
1284
+ async with self._lock:
1285
+ for conn in self._in_use_connections:
1286
+ conn.mark_for_reconnect()
1287
+
1228
1288
  async def aclose(self) -> None:
1229
1289
  """Close the pool, disconnecting all connections"""
1230
1290
  await self.disconnect()
@@ -1296,7 +1356,7 @@ class BlockingConnectionPool(ConnectionPool):
1296
1356
  def __init__(
1297
1357
  self,
1298
1358
  max_connections: int = 50,
1299
- timeout: Optional[int] = 20,
1359
+ timeout: Optional[float] = 20,
1300
1360
  connection_class: Type[AbstractConnection] = Connection,
1301
1361
  queue_class: Type[asyncio.Queue] = asyncio.LifoQueue, # deprecated
1302
1362
  **connection_kwargs,
File without changes
@@ -0,0 +1,265 @@
1
+ import asyncio
2
+ from abc import ABC, abstractmethod
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from typing import Any, Mapping, Optional, Union
5
+
6
+ from redis.http.http_client import HttpClient, HttpResponse
7
+
8
+ DEFAULT_USER_AGENT = "HttpClient/1.0 (+https://example.invalid)"
9
+ DEFAULT_TIMEOUT = 30.0
10
+ RETRY_STATUS_CODES = {429, 500, 502, 503, 504}
11
+
12
+
13
+ class AsyncHTTPClient(ABC):
14
+ @abstractmethod
15
+ async def get(
16
+ self,
17
+ path: str,
18
+ params: Optional[
19
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
20
+ ] = None,
21
+ headers: Optional[Mapping[str, str]] = None,
22
+ timeout: Optional[float] = None,
23
+ expect_json: bool = True,
24
+ ) -> Union[HttpResponse, Any]:
25
+ """
26
+ Invoke HTTP GET request."""
27
+ pass
28
+
29
+ @abstractmethod
30
+ async def delete(
31
+ self,
32
+ path: str,
33
+ params: Optional[
34
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
35
+ ] = None,
36
+ headers: Optional[Mapping[str, str]] = None,
37
+ timeout: Optional[float] = None,
38
+ expect_json: bool = True,
39
+ ) -> Union[HttpResponse, Any]:
40
+ """
41
+ Invoke HTTP DELETE request."""
42
+ pass
43
+
44
+ @abstractmethod
45
+ async def post(
46
+ self,
47
+ path: str,
48
+ json_body: Optional[Any] = None,
49
+ data: Optional[Union[bytes, str]] = None,
50
+ params: Optional[
51
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
52
+ ] = None,
53
+ headers: Optional[Mapping[str, str]] = None,
54
+ timeout: Optional[float] = None,
55
+ expect_json: bool = True,
56
+ ) -> Union[HttpResponse, Any]:
57
+ """
58
+ Invoke HTTP POST request."""
59
+ pass
60
+
61
+ @abstractmethod
62
+ async def put(
63
+ self,
64
+ path: str,
65
+ json_body: Optional[Any] = None,
66
+ data: Optional[Union[bytes, str]] = None,
67
+ params: Optional[
68
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
69
+ ] = None,
70
+ headers: Optional[Mapping[str, str]] = None,
71
+ timeout: Optional[float] = None,
72
+ expect_json: bool = True,
73
+ ) -> Union[HttpResponse, Any]:
74
+ """
75
+ Invoke HTTP PUT request."""
76
+ pass
77
+
78
+ @abstractmethod
79
+ async def patch(
80
+ self,
81
+ path: str,
82
+ json_body: Optional[Any] = None,
83
+ data: Optional[Union[bytes, str]] = None,
84
+ params: Optional[
85
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
86
+ ] = None,
87
+ headers: Optional[Mapping[str, str]] = None,
88
+ timeout: Optional[float] = None,
89
+ expect_json: bool = True,
90
+ ) -> Union[HttpResponse, Any]:
91
+ """
92
+ Invoke HTTP PATCH request."""
93
+ pass
94
+
95
+ @abstractmethod
96
+ async def request(
97
+ self,
98
+ method: str,
99
+ path: str,
100
+ params: Optional[
101
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
102
+ ] = None,
103
+ headers: Optional[Mapping[str, str]] = None,
104
+ body: Optional[Union[bytes, str]] = None,
105
+ timeout: Optional[float] = None,
106
+ ) -> HttpResponse:
107
+ """
108
+ Invoke HTTP request with given method."""
109
+ pass
110
+
111
+
112
+ class AsyncHTTPClientWrapper(AsyncHTTPClient):
113
+ """
114
+ An async wrapper around sync HTTP client with thread pool execution.
115
+ """
116
+
117
+ def __init__(self, client: HttpClient, max_workers: int = 10) -> None:
118
+ """
119
+ Initialize a new HTTP client instance.
120
+
121
+ Args:
122
+ client: Sync HTTP client instance.
123
+ max_workers: Maximum number of concurrent requests.
124
+
125
+ The client supports both regular HTTPS with server verification and mutual TLS
126
+ authentication. For server verification, provide CA certificate information via
127
+ ca_file, ca_path or ca_data. For mutual TLS, additionally provide a client
128
+ certificate and key via client_cert_file and client_key_file.
129
+ """
130
+ self.client = client
131
+ self._executor = ThreadPoolExecutor(max_workers=max_workers)
132
+
133
+ async def get(
134
+ self,
135
+ path: str,
136
+ params: Optional[
137
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
138
+ ] = None,
139
+ headers: Optional[Mapping[str, str]] = None,
140
+ timeout: Optional[float] = None,
141
+ expect_json: bool = True,
142
+ ) -> Union[HttpResponse, Any]:
143
+ loop = asyncio.get_event_loop()
144
+ return await loop.run_in_executor(
145
+ self._executor, self.client.get, path, params, headers, timeout, expect_json
146
+ )
147
+
148
+ async def delete(
149
+ self,
150
+ path: str,
151
+ params: Optional[
152
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
153
+ ] = None,
154
+ headers: Optional[Mapping[str, str]] = None,
155
+ timeout: Optional[float] = None,
156
+ expect_json: bool = True,
157
+ ) -> Union[HttpResponse, Any]:
158
+ loop = asyncio.get_event_loop()
159
+ return await loop.run_in_executor(
160
+ self._executor,
161
+ self.client.delete,
162
+ path,
163
+ params,
164
+ headers,
165
+ timeout,
166
+ expect_json,
167
+ )
168
+
169
+ async def post(
170
+ self,
171
+ path: str,
172
+ json_body: Optional[Any] = None,
173
+ data: Optional[Union[bytes, str]] = None,
174
+ params: Optional[
175
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
176
+ ] = None,
177
+ headers: Optional[Mapping[str, str]] = None,
178
+ timeout: Optional[float] = None,
179
+ expect_json: bool = True,
180
+ ) -> Union[HttpResponse, Any]:
181
+ loop = asyncio.get_event_loop()
182
+ return await loop.run_in_executor(
183
+ self._executor,
184
+ self.client.post,
185
+ path,
186
+ json_body,
187
+ data,
188
+ params,
189
+ headers,
190
+ timeout,
191
+ expect_json,
192
+ )
193
+
194
+ async def put(
195
+ self,
196
+ path: str,
197
+ json_body: Optional[Any] = None,
198
+ data: Optional[Union[bytes, str]] = None,
199
+ params: Optional[
200
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
201
+ ] = None,
202
+ headers: Optional[Mapping[str, str]] = None,
203
+ timeout: Optional[float] = None,
204
+ expect_json: bool = True,
205
+ ) -> Union[HttpResponse, Any]:
206
+ loop = asyncio.get_event_loop()
207
+ return await loop.run_in_executor(
208
+ self._executor,
209
+ self.client.put,
210
+ path,
211
+ json_body,
212
+ data,
213
+ params,
214
+ headers,
215
+ timeout,
216
+ expect_json,
217
+ )
218
+
219
+ async def patch(
220
+ self,
221
+ path: str,
222
+ json_body: Optional[Any] = None,
223
+ data: Optional[Union[bytes, str]] = None,
224
+ params: Optional[
225
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
226
+ ] = None,
227
+ headers: Optional[Mapping[str, str]] = None,
228
+ timeout: Optional[float] = None,
229
+ expect_json: bool = True,
230
+ ) -> Union[HttpResponse, Any]:
231
+ loop = asyncio.get_event_loop()
232
+ return await loop.run_in_executor(
233
+ self._executor,
234
+ self.client.patch,
235
+ path,
236
+ json_body,
237
+ data,
238
+ params,
239
+ headers,
240
+ timeout,
241
+ expect_json,
242
+ )
243
+
244
+ async def request(
245
+ self,
246
+ method: str,
247
+ path: str,
248
+ params: Optional[
249
+ Mapping[str, Union[None, str, int, float, bool, list, tuple]]
250
+ ] = None,
251
+ headers: Optional[Mapping[str, str]] = None,
252
+ body: Optional[Union[bytes, str]] = None,
253
+ timeout: Optional[float] = None,
254
+ ) -> HttpResponse:
255
+ loop = asyncio.get_event_loop()
256
+ return await loop.run_in_executor(
257
+ self._executor,
258
+ self.client.request,
259
+ method,
260
+ path,
261
+ params,
262
+ headers,
263
+ body,
264
+ timeout,
265
+ )
File without changes