redis 5.3.0b5__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. redis/__init__.py +2 -11
  2. redis/_parsers/base.py +14 -2
  3. redis/_parsers/resp3.py +2 -2
  4. redis/asyncio/client.py +102 -82
  5. redis/asyncio/cluster.py +147 -102
  6. redis/asyncio/connection.py +77 -24
  7. redis/asyncio/lock.py +26 -5
  8. redis/asyncio/retry.py +12 -0
  9. redis/asyncio/sentinel.py +11 -1
  10. redis/asyncio/utils.py +1 -1
  11. redis/auth/token.py +6 -2
  12. redis/backoff.py +15 -0
  13. redis/client.py +106 -98
  14. redis/cluster.py +208 -79
  15. redis/commands/cluster.py +1 -11
  16. redis/commands/core.py +219 -207
  17. redis/commands/helpers.py +19 -76
  18. redis/commands/json/__init__.py +1 -1
  19. redis/commands/redismodules.py +5 -17
  20. redis/commands/search/aggregation.py +3 -1
  21. redis/commands/search/commands.py +43 -16
  22. redis/commands/search/dialect.py +3 -0
  23. redis/commands/search/profile_information.py +14 -0
  24. redis/commands/search/query.py +5 -1
  25. redis/commands/timeseries/__init__.py +1 -1
  26. redis/commands/vectorset/__init__.py +46 -0
  27. redis/commands/vectorset/commands.py +367 -0
  28. redis/commands/vectorset/utils.py +94 -0
  29. redis/connection.py +78 -29
  30. redis/exceptions.py +4 -1
  31. redis/lock.py +24 -4
  32. redis/ocsp.py +2 -1
  33. redis/retry.py +12 -0
  34. redis/sentinel.py +3 -1
  35. redis/utils.py +114 -1
  36. {redis-5.3.0b5.dist-info → redis-6.0.0.dist-info}/METADATA +57 -23
  37. redis-6.0.0.dist-info/RECORD +78 -0
  38. {redis-5.3.0b5.dist-info → redis-6.0.0.dist-info}/WHEEL +1 -2
  39. redis/commands/graph/__init__.py +0 -263
  40. redis/commands/graph/commands.py +0 -313
  41. redis/commands/graph/edge.py +0 -91
  42. redis/commands/graph/exceptions.py +0 -3
  43. redis/commands/graph/execution_plan.py +0 -211
  44. redis/commands/graph/node.py +0 -88
  45. redis/commands/graph/path.py +0 -78
  46. redis/commands/graph/query_result.py +0 -588
  47. redis-5.3.0b5.dist-info/RECORD +0 -82
  48. redis-5.3.0b5.dist-info/top_level.txt +0 -1
  49. /redis/commands/search/{indexDefinition.py → index_definition.py} +0 -0
  50. {redis-5.3.0b5.dist-info → redis-6.0.0.dist-info/licenses}/LICENSE +0 -0
@@ -3,7 +3,6 @@ import copy
3
3
  import enum
4
4
  import inspect
5
5
  import socket
6
- import ssl
7
6
  import sys
8
7
  import warnings
9
8
  import weakref
@@ -27,9 +26,19 @@ from typing import (
27
26
  )
28
27
  from urllib.parse import ParseResult, parse_qs, unquote, urlparse
29
28
 
29
+ from ..utils import SSL_AVAILABLE
30
+
31
+ if SSL_AVAILABLE:
32
+ import ssl
33
+ from ssl import SSLContext, TLSVersion
34
+ else:
35
+ ssl = None
36
+ TLSVersion = None
37
+ SSLContext = None
38
+
30
39
  from ..auth.token import TokenInterface
31
40
  from ..event import AsyncAfterConnectionReleasedEvent, EventDispatcher
32
- from ..utils import format_error_message
41
+ from ..utils import deprecated_args, format_error_message
33
42
 
34
43
  # the functionality is available in 3.11.x but has a major issue before
35
44
  # 3.11.3. See https://github.com/redis/redis-py/issues/2633
@@ -284,6 +293,9 @@ class AbstractConnection:
284
293
 
285
294
  async def connect(self):
286
295
  """Connects to the Redis server if not already connected"""
296
+ await self.connect_check_health(check_health=True)
297
+
298
+ async def connect_check_health(self, check_health: bool = True):
287
299
  if self.is_connected:
288
300
  return
289
301
  try:
@@ -302,7 +314,7 @@ class AbstractConnection:
302
314
  try:
303
315
  if not self.redis_connect_func:
304
316
  # Use the default on_connect function
305
- await self.on_connect()
317
+ await self.on_connect_check_health(check_health=check_health)
306
318
  else:
307
319
  # Use the passed function redis_connect_func
308
320
  (
@@ -341,6 +353,9 @@ class AbstractConnection:
341
353
 
342
354
  async def on_connect(self) -> None:
343
355
  """Initialize the connection, authenticate and select a database"""
356
+ await self.on_connect_check_health(check_health=True)
357
+
358
+ async def on_connect_check_health(self, check_health: bool = True) -> None:
344
359
  self._parser.on_connect(self)
345
360
  parser = self._parser
346
361
 
@@ -363,7 +378,11 @@ class AbstractConnection:
363
378
  self._parser.on_connect(self)
364
379
  if len(auth_args) == 1:
365
380
  auth_args = ["default", auth_args[0]]
366
- await self.send_command("HELLO", self.protocol, "AUTH", *auth_args)
381
+ # avoid checking health here -- PING will fail if we try
382
+ # to check the health prior to the AUTH
383
+ await self.send_command(
384
+ "HELLO", self.protocol, "AUTH", *auth_args, check_health=False
385
+ )
367
386
  response = await self.read_response()
368
387
  if response.get(b"proto") != int(self.protocol) and response.get(
369
388
  "proto"
@@ -394,7 +413,7 @@ class AbstractConnection:
394
413
  # update cluster exception classes
395
414
  self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES
396
415
  self._parser.on_connect(self)
397
- await self.send_command("HELLO", self.protocol)
416
+ await self.send_command("HELLO", self.protocol, check_health=check_health)
398
417
  response = await self.read_response()
399
418
  # if response.get(b"proto") != self.protocol and response.get(
400
419
  # "proto"
@@ -403,18 +422,35 @@ class AbstractConnection:
403
422
 
404
423
  # if a client_name is given, set it
405
424
  if self.client_name:
406
- await self.send_command("CLIENT", "SETNAME", self.client_name)
425
+ await self.send_command(
426
+ "CLIENT",
427
+ "SETNAME",
428
+ self.client_name,
429
+ check_health=check_health,
430
+ )
407
431
  if str_if_bytes(await self.read_response()) != "OK":
408
432
  raise ConnectionError("Error setting client name")
409
433
 
410
434
  # set the library name and version, pipeline for lower startup latency
411
435
  if self.lib_name:
412
- await self.send_command("CLIENT", "SETINFO", "LIB-NAME", self.lib_name)
436
+ await self.send_command(
437
+ "CLIENT",
438
+ "SETINFO",
439
+ "LIB-NAME",
440
+ self.lib_name,
441
+ check_health=check_health,
442
+ )
413
443
  if self.lib_version:
414
- await self.send_command("CLIENT", "SETINFO", "LIB-VER", self.lib_version)
444
+ await self.send_command(
445
+ "CLIENT",
446
+ "SETINFO",
447
+ "LIB-VER",
448
+ self.lib_version,
449
+ check_health=check_health,
450
+ )
415
451
  # if a database is specified, switch to it. Also pipeline this
416
452
  if self.db:
417
- await self.send_command("SELECT", self.db)
453
+ await self.send_command("SELECT", self.db, check_health=check_health)
418
454
 
419
455
  # read responses from pipeline
420
456
  for _ in (sent for sent in (self.lib_name, self.lib_version) if sent):
@@ -476,8 +512,8 @@ class AbstractConnection:
476
512
  self, command: Union[bytes, str, Iterable[bytes]], check_health: bool = True
477
513
  ) -> None:
478
514
  if not self.is_connected:
479
- await self.connect()
480
- elif check_health:
515
+ await self.connect_check_health(check_health=False)
516
+ if check_health:
481
517
  await self.check_health()
482
518
 
483
519
  try:
@@ -755,14 +791,17 @@ class SSLConnection(Connection):
755
791
  self,
756
792
  ssl_keyfile: Optional[str] = None,
757
793
  ssl_certfile: Optional[str] = None,
758
- ssl_cert_reqs: str = "required",
794
+ ssl_cert_reqs: Union[str, ssl.VerifyMode] = "required",
759
795
  ssl_ca_certs: Optional[str] = None,
760
796
  ssl_ca_data: Optional[str] = None,
761
- ssl_check_hostname: bool = False,
762
- ssl_min_version: Optional[ssl.TLSVersion] = None,
797
+ ssl_check_hostname: bool = True,
798
+ ssl_min_version: Optional[TLSVersion] = None,
763
799
  ssl_ciphers: Optional[str] = None,
764
800
  **kwargs,
765
801
  ):
802
+ if not SSL_AVAILABLE:
803
+ raise RedisError("Python wasn't built with SSL support")
804
+
766
805
  self.ssl_context: RedisSSLContext = RedisSSLContext(
767
806
  keyfile=ssl_keyfile,
768
807
  certfile=ssl_certfile,
@@ -826,19 +865,22 @@ class RedisSSLContext:
826
865
  self,
827
866
  keyfile: Optional[str] = None,
828
867
  certfile: Optional[str] = None,
829
- cert_reqs: Optional[str] = None,
868
+ cert_reqs: Optional[Union[str, ssl.VerifyMode]] = None,
830
869
  ca_certs: Optional[str] = None,
831
870
  ca_data: Optional[str] = None,
832
871
  check_hostname: bool = False,
833
- min_version: Optional[ssl.TLSVersion] = None,
872
+ min_version: Optional[TLSVersion] = None,
834
873
  ciphers: Optional[str] = None,
835
874
  ):
875
+ if not SSL_AVAILABLE:
876
+ raise RedisError("Python wasn't built with SSL support")
877
+
836
878
  self.keyfile = keyfile
837
879
  self.certfile = certfile
838
880
  if cert_reqs is None:
839
- self.cert_reqs = ssl.CERT_NONE
881
+ cert_reqs = ssl.CERT_NONE
840
882
  elif isinstance(cert_reqs, str):
841
- CERT_REQS = {
883
+ CERT_REQS = { # noqa: N806
842
884
  "none": ssl.CERT_NONE,
843
885
  "optional": ssl.CERT_OPTIONAL,
844
886
  "required": ssl.CERT_REQUIRED,
@@ -847,15 +889,16 @@ class RedisSSLContext:
847
889
  raise RedisError(
848
890
  f"Invalid SSL Certificate Requirements Flag: {cert_reqs}"
849
891
  )
850
- self.cert_reqs = CERT_REQS[cert_reqs]
892
+ cert_reqs = CERT_REQS[cert_reqs]
893
+ self.cert_reqs = cert_reqs
851
894
  self.ca_certs = ca_certs
852
895
  self.ca_data = ca_data
853
896
  self.check_hostname = check_hostname
854
897
  self.min_version = min_version
855
898
  self.ciphers = ciphers
856
- self.context: Optional[ssl.SSLContext] = None
899
+ self.context: Optional[SSLContext] = None
857
900
 
858
- def get(self) -> ssl.SSLContext:
901
+ def get(self) -> SSLContext:
859
902
  if not self.context:
860
903
  context = ssl.create_default_context()
861
904
  context.check_hostname = self.check_hostname
@@ -1087,7 +1130,12 @@ class ConnectionPool:
1087
1130
  or len(self._in_use_connections) < self.max_connections
1088
1131
  )
1089
1132
 
1090
- async def get_connection(self, command_name, *keys, **options):
1133
+ @deprecated_args(
1134
+ args_to_warn=["*"],
1135
+ reason="Use get_connection() without args instead",
1136
+ version="5.3.0",
1137
+ )
1138
+ async def get_connection(self, command_name=None, *keys, **options):
1091
1139
  async with self._lock:
1092
1140
  """Get a connected connection from the pool"""
1093
1141
  connection = self.get_available_connection()
@@ -1133,7 +1181,7 @@ class ConnectionPool:
1133
1181
  try:
1134
1182
  if await connection.can_read_destructive():
1135
1183
  raise ConnectionError("Connection has data") from None
1136
- except (ConnectionError, OSError):
1184
+ except (ConnectionError, TimeoutError, OSError):
1137
1185
  await connection.disconnect()
1138
1186
  await connection.connect()
1139
1187
  if await connection.can_read_destructive():
@@ -1255,7 +1303,12 @@ class BlockingConnectionPool(ConnectionPool):
1255
1303
  self._condition = asyncio.Condition()
1256
1304
  self.timeout = timeout
1257
1305
 
1258
- async def get_connection(self, command_name, *keys, **options):
1306
+ @deprecated_args(
1307
+ args_to_warn=["*"],
1308
+ reason="Use get_connection() without args instead",
1309
+ version="5.3.0",
1310
+ )
1311
+ async def get_connection(self, command_name=None, *keys, **options):
1259
1312
  """Gets a connection from the pool, blocking until one is available"""
1260
1313
  try:
1261
1314
  async with self._condition:
redis/asyncio/lock.py CHANGED
@@ -1,14 +1,18 @@
1
1
  import asyncio
2
+ import logging
2
3
  import threading
3
4
  import uuid
4
5
  from types import SimpleNamespace
5
6
  from typing import TYPE_CHECKING, Awaitable, Optional, Union
6
7
 
7
8
  from redis.exceptions import LockError, LockNotOwnedError
9
+ from redis.typing import Number
8
10
 
9
11
  if TYPE_CHECKING:
10
12
  from redis.asyncio import Redis, RedisCluster
11
13
 
14
+ logger = logging.getLogger(__name__)
15
+
12
16
 
13
17
  class Lock:
14
18
  """
@@ -82,8 +86,9 @@ class Lock:
82
86
  timeout: Optional[float] = None,
83
87
  sleep: float = 0.1,
84
88
  blocking: bool = True,
85
- blocking_timeout: Optional[float] = None,
89
+ blocking_timeout: Optional[Number] = None,
86
90
  thread_local: bool = True,
91
+ raise_on_release_error: bool = True,
87
92
  ):
88
93
  """
89
94
  Create a new Lock instance named ``name`` using the Redis client
@@ -127,6 +132,11 @@ class Lock:
127
132
  thread-1 would see the token value as "xyz" and would be
128
133
  able to successfully release the thread-2's lock.
129
134
 
135
+ ``raise_on_release_error`` indicates whether to raise an exception when
136
+ the lock is no longer owned when exiting the context manager. By default,
137
+ this is True, meaning an exception will be raised. If False, the warning
138
+ will be logged and the exception will be suppressed.
139
+
130
140
  In some use cases it's necessary to disable thread local storage. For
131
141
  example, if you have code where one thread acquires a lock and passes
132
142
  that lock instance to a worker thread to release later. If thread
@@ -143,6 +153,7 @@ class Lock:
143
153
  self.blocking_timeout = blocking_timeout
144
154
  self.thread_local = bool(thread_local)
145
155
  self.local = threading.local() if self.thread_local else SimpleNamespace()
156
+ self.raise_on_release_error = raise_on_release_error
146
157
  self.local.token = None
147
158
  self.register_scripts()
148
159
 
@@ -162,12 +173,19 @@ class Lock:
162
173
  raise LockError("Unable to acquire lock within the time specified")
163
174
 
164
175
  async def __aexit__(self, exc_type, exc_value, traceback):
165
- await self.release()
176
+ try:
177
+ await self.release()
178
+ except LockError:
179
+ if self.raise_on_release_error:
180
+ raise
181
+ logger.warning(
182
+ "Lock was unlocked or no longer owned when exiting context manager."
183
+ )
166
184
 
167
185
  async def acquire(
168
186
  self,
169
187
  blocking: Optional[bool] = None,
170
- blocking_timeout: Optional[float] = None,
188
+ blocking_timeout: Optional[Number] = None,
171
189
  token: Optional[Union[str, bytes]] = None,
172
190
  ):
173
191
  """
@@ -249,7 +267,10 @@ class Lock:
249
267
  """Releases the already acquired lock"""
250
268
  expected_token = self.local.token
251
269
  if expected_token is None:
252
- raise LockError("Cannot release an unlocked lock")
270
+ raise LockError(
271
+ "Cannot release a lock that's not owned or is already unlocked.",
272
+ lock_name=self.name,
273
+ )
253
274
  self.local.token = None
254
275
  return self.do_release(expected_token)
255
276
 
@@ -262,7 +283,7 @@ class Lock:
262
283
  raise LockNotOwnedError("Cannot release a lock that's no longer owned")
263
284
 
264
285
  def extend(
265
- self, additional_time: float, replace_ttl: bool = False
286
+ self, additional_time: Number, replace_ttl: bool = False
266
287
  ) -> Awaitable[bool]:
267
288
  """
268
289
  Adds more time to an already acquired lock.
redis/asyncio/retry.py CHANGED
@@ -43,6 +43,18 @@ class Retry:
43
43
  set(self._supported_errors + tuple(specified_errors))
44
44
  )
45
45
 
46
+ def get_retries(self) -> int:
47
+ """
48
+ Get the number of retries.
49
+ """
50
+ return self._retries
51
+
52
+ def update_retries(self, value: int) -> None:
53
+ """
54
+ Set the number of retries.
55
+ """
56
+ self._retries = value
57
+
46
58
  async def call_with_retry(
47
59
  self, do: Callable[[], Awaitable[T]], fail: Callable[[RedisError], Any]
48
60
  ) -> T:
redis/asyncio/sentinel.py CHANGED
@@ -198,6 +198,7 @@ class Sentinel(AsyncSentinelCommands):
198
198
  sentinels,
199
199
  min_other_sentinels=0,
200
200
  sentinel_kwargs=None,
201
+ force_master_ip=None,
201
202
  **connection_kwargs,
202
203
  ):
203
204
  # if sentinel_kwargs isn't defined, use the socket_* options from
@@ -214,6 +215,7 @@ class Sentinel(AsyncSentinelCommands):
214
215
  ]
215
216
  self.min_other_sentinels = min_other_sentinels
216
217
  self.connection_kwargs = connection_kwargs
218
+ self._force_master_ip = force_master_ip
217
219
 
218
220
  async def execute_command(self, *args, **kwargs):
219
221
  """
@@ -277,7 +279,13 @@ class Sentinel(AsyncSentinelCommands):
277
279
  sentinel,
278
280
  self.sentinels[0],
279
281
  )
280
- return state["ip"], state["port"]
282
+
283
+ ip = (
284
+ self._force_master_ip
285
+ if self._force_master_ip is not None
286
+ else state["ip"]
287
+ )
288
+ return ip, state["port"]
281
289
 
282
290
  error_info = ""
283
291
  if len(collected_errors) > 0:
@@ -318,6 +326,8 @@ class Sentinel(AsyncSentinelCommands):
318
326
  ):
319
327
  """
320
328
  Returns a redis client instance for the ``service_name`` master.
329
+ Sentinel client will detect failover and reconnect Redis clients
330
+ automatically.
321
331
 
322
332
  A :py:class:`~redis.sentinel.SentinelConnectionPool` class is
323
333
  used to retrieve the master's address before establishing a new
redis/asyncio/utils.py CHANGED
@@ -16,7 +16,7 @@ def from_url(url, **kwargs):
16
16
  return Redis.from_url(url, **kwargs)
17
17
 
18
18
 
19
- class pipeline:
19
+ class pipeline: # noqa: N801
20
20
  def __init__(self, redis_obj: "Redis"):
21
21
  self.p: "Pipeline" = redis_obj.pipeline()
22
22
 
redis/auth/token.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from abc import ABC, abstractmethod
2
2
  from datetime import datetime, timezone
3
3
 
4
- import jwt
5
4
  from redis.auth.err import InvalidTokenSchemaErr
6
5
 
7
6
 
@@ -77,10 +76,15 @@ class SimpleToken(TokenInterface):
77
76
 
78
77
 
79
78
  class JWToken(TokenInterface):
80
-
81
79
  REQUIRED_FIELDS = {"exp"}
82
80
 
83
81
  def __init__(self, token: str):
82
+ try:
83
+ import jwt
84
+ except ImportError as ie:
85
+ raise ImportError(
86
+ f"The PyJWT library is required for {self.__class__.__name__}.",
87
+ ) from ie
84
88
  self._value = token
85
89
  self._decoded = jwt.decode(
86
90
  self._value,
redis/backoff.py CHANGED
@@ -110,5 +110,20 @@ class DecorrelatedJitterBackoff(AbstractBackoff):
110
110
  return self._previous_backoff
111
111
 
112
112
 
113
+ class ExponentialWithJitterBackoff(AbstractBackoff):
114
+ """Exponential backoff upon failure, with jitter"""
115
+
116
+ def __init__(self, cap: float = DEFAULT_CAP, base: float = DEFAULT_BASE) -> None:
117
+ """
118
+ `cap`: maximum backoff time in seconds
119
+ `base`: base backoff time in seconds
120
+ """
121
+ self._cap = cap
122
+ self._base = base
123
+
124
+ def compute(self, failures: int) -> float:
125
+ return min(self._cap, random.random() * self._base * 2**failures)
126
+
127
+
113
128
  def default_backoff():
114
129
  return EqualJitterBackoff()