limits 3.7.0__tar.gz → 3.8.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. {limits-3.7.0 → limits-3.8.0}/HISTORY.rst +11 -0
  2. {limits-3.7.0 → limits-3.8.0}/PKG-INFO +1 -7
  3. {limits-3.7.0 → limits-3.8.0}/README.rst +0 -6
  4. {limits-3.7.0 → limits-3.8.0}/doc/source/api.rst +2 -0
  5. {limits-3.7.0 → limits-3.8.0}/limits/_version.py +3 -3
  6. {limits-3.7.0 → limits-3.8.0}/limits/aio/storage/base.py +78 -2
  7. {limits-3.7.0 → limits-3.8.0}/limits/aio/storage/etcd.py +7 -1
  8. {limits-3.7.0 → limits-3.8.0}/limits/aio/storage/memcached.py +19 -3
  9. {limits-3.7.0 → limits-3.8.0}/limits/aio/storage/memory.py +11 -3
  10. {limits-3.7.0 → limits-3.8.0}/limits/aio/storage/mongodb.py +16 -4
  11. {limits-3.7.0 → limits-3.8.0}/limits/aio/storage/redis.py +24 -8
  12. {limits-3.7.0 → limits-3.8.0}/limits/errors.py +9 -0
  13. {limits-3.7.0 → limits-3.8.0}/limits/limits.py +1 -0
  14. {limits-3.7.0 → limits-3.8.0}/limits/storage/base.py +77 -2
  15. {limits-3.7.0 → limits-3.8.0}/limits/storage/etcd.py +7 -1
  16. {limits-3.7.0 → limits-3.8.0}/limits/storage/memcached.py +31 -4
  17. {limits-3.7.0 → limits-3.8.0}/limits/storage/memory.py +11 -3
  18. {limits-3.7.0 → limits-3.8.0}/limits/storage/mongodb.py +20 -5
  19. {limits-3.7.0 → limits-3.8.0}/limits/storage/redis.py +18 -7
  20. {limits-3.7.0 → limits-3.8.0}/limits/storage/redis_cluster.py +1 -1
  21. {limits-3.7.0 → limits-3.8.0}/limits/storage/redis_sentinel.py +8 -3
  22. {limits-3.7.0 → limits-3.8.0}/limits/typing.py +2 -0
  23. {limits-3.7.0 → limits-3.8.0}/limits/util.py +1 -0
  24. {limits-3.7.0 → limits-3.8.0}/limits.egg-info/PKG-INFO +1 -7
  25. limits-3.8.0/pyproject.toml +10 -0
  26. {limits-3.7.0 → limits-3.8.0}/requirements/dev.txt +0 -2
  27. {limits-3.7.0 → limits-3.8.0}/requirements/docs.txt +2 -2
  28. {limits-3.7.0 → limits-3.8.0}/requirements/test.txt +2 -2
  29. {limits-3.7.0 → limits-3.8.0}/setup.py +4 -1
  30. {limits-3.7.0 → limits-3.8.0}/tests/test_storage.py +101 -1
  31. {limits-3.7.0 → limits-3.8.0}/versioneer.py +161 -112
  32. limits-3.7.0/pyproject.toml +0 -3
  33. {limits-3.7.0 → limits-3.8.0}/CLASSIFIERS +0 -0
  34. {limits-3.7.0 → limits-3.8.0}/CONTRIBUTIONS.rst +0 -0
  35. {limits-3.7.0 → limits-3.8.0}/LICENSE.txt +0 -0
  36. {limits-3.7.0 → limits-3.8.0}/MANIFEST.in +0 -0
  37. {limits-3.7.0 → limits-3.8.0}/doc/Makefile +0 -0
  38. {limits-3.7.0 → limits-3.8.0}/doc/source/_static/custom.css +0 -0
  39. {limits-3.7.0 → limits-3.8.0}/doc/source/async.rst +0 -0
  40. {limits-3.7.0 → limits-3.8.0}/doc/source/changelog.rst +0 -0
  41. {limits-3.7.0 → limits-3.8.0}/doc/source/conf.py +2 -2
  42. {limits-3.7.0 → limits-3.8.0}/doc/source/custom-storage.rst +0 -0
  43. {limits-3.7.0 → limits-3.8.0}/doc/source/index.rst +0 -0
  44. {limits-3.7.0 → limits-3.8.0}/doc/source/installation.rst +0 -0
  45. {limits-3.7.0 → limits-3.8.0}/doc/source/quickstart.rst +0 -0
  46. {limits-3.7.0 → limits-3.8.0}/doc/source/storage.rst +0 -0
  47. {limits-3.7.0 → limits-3.8.0}/doc/source/strategies.rst +0 -0
  48. {limits-3.7.0 → limits-3.8.0}/doc/source/theme_config.py +0 -0
  49. {limits-3.7.0 → limits-3.8.0}/limits/__init__.py +0 -0
  50. {limits-3.7.0 → limits-3.8.0}/limits/aio/__init__.py +0 -0
  51. {limits-3.7.0 → limits-3.8.0}/limits/aio/storage/__init__.py +0 -0
  52. {limits-3.7.0 → limits-3.8.0}/limits/aio/strategies.py +0 -0
  53. {limits-3.7.0 → limits-3.8.0}/limits/py.typed +0 -0
  54. {limits-3.7.0 → limits-3.8.0}/limits/resources/redis/lua_scripts/acquire_moving_window.lua +0 -0
  55. {limits-3.7.0 → limits-3.8.0}/limits/resources/redis/lua_scripts/clear_keys.lua +0 -0
  56. {limits-3.7.0 → limits-3.8.0}/limits/resources/redis/lua_scripts/incr_expire.lua +0 -0
  57. {limits-3.7.0 → limits-3.8.0}/limits/resources/redis/lua_scripts/moving_window.lua +0 -0
  58. {limits-3.7.0 → limits-3.8.0}/limits/storage/__init__.py +0 -0
  59. {limits-3.7.0 → limits-3.8.0}/limits/storage/registry.py +0 -0
  60. {limits-3.7.0 → limits-3.8.0}/limits/strategies.py +0 -0
  61. {limits-3.7.0 → limits-3.8.0}/limits/version.py +0 -0
  62. {limits-3.7.0 → limits-3.8.0}/limits.egg-info/SOURCES.txt +0 -0
  63. {limits-3.7.0 → limits-3.8.0}/limits.egg-info/dependency_links.txt +0 -0
  64. {limits-3.7.0 → limits-3.8.0}/limits.egg-info/not-zip-safe +0 -0
  65. {limits-3.7.0 → limits-3.8.0}/limits.egg-info/requires.txt +0 -0
  66. {limits-3.7.0 → limits-3.8.0}/limits.egg-info/top_level.txt +0 -0
  67. {limits-3.7.0 → limits-3.8.0}/requirements/ci.txt +0 -0
  68. {limits-3.7.0 → limits-3.8.0}/requirements/main.txt +0 -0
  69. {limits-3.7.0 → limits-3.8.0}/requirements/storage/async-etcd.txt +0 -0
  70. {limits-3.7.0 → limits-3.8.0}/requirements/storage/async-memcached.txt +0 -0
  71. {limits-3.7.0 → limits-3.8.0}/requirements/storage/async-mongodb.txt +0 -0
  72. {limits-3.7.0 → limits-3.8.0}/requirements/storage/async-redis.txt +0 -0
  73. {limits-3.7.0 → limits-3.8.0}/requirements/storage/etcd.txt +0 -0
  74. {limits-3.7.0 → limits-3.8.0}/requirements/storage/memcached.txt +0 -0
  75. {limits-3.7.0 → limits-3.8.0}/requirements/storage/mongodb.txt +0 -0
  76. {limits-3.7.0 → limits-3.8.0}/requirements/storage/redis.txt +0 -0
  77. {limits-3.7.0 → limits-3.8.0}/requirements/storage/rediscluster.txt +0 -0
  78. {limits-3.7.0 → limits-3.8.0}/setup.cfg +0 -0
  79. {limits-3.7.0 → limits-3.8.0}/tests/test_limit_granularities.py +0 -0
  80. {limits-3.7.0 → limits-3.8.0}/tests/test_limits.py +0 -0
  81. {limits-3.7.0 → limits-3.8.0}/tests/test_ratelimit_parser.py +0 -0
  82. {limits-3.7.0 → limits-3.8.0}/tests/test_strategy.py +0 -0
  83. {limits-3.7.0 → limits-3.8.0}/tests/test_utils.py +0 -0
@@ -3,6 +3,16 @@
3
3
  Changelog
4
4
  =========
5
5
 
6
+ v3.8.0
7
+ ------
8
+ Release Date: 2024-02-14
9
+
10
+ * Features
11
+
12
+ * Add option to wrap storage errors with a ``StorageError``
13
+ exception
14
+
15
+
6
16
  v3.7.0
7
17
  ------
8
18
  Release Date: 2023-11-24
@@ -638,5 +648,6 @@ Release Date: 2015-01-08
638
648
 
639
649
 
640
650
 
651
+
641
652
 
642
653
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: limits
3
- Version: 3.7.0
3
+ Version: 3.8.0
4
4
  Summary: Rate limiting utilities
5
5
  Home-page: https://limits.readthedocs.org
6
6
  Author: Ali-Akber Saifee
@@ -76,12 +76,6 @@ limits
76
76
 
77
77
  **limits** is a python library to perform rate limiting with commonly used storage backends (Redis, Memcached, MongoDB & Etcd).
78
78
 
79
- ----
80
-
81
- Sponsored by Zuplo - fully-managed, programmable API Management platform.
82
- Add rate limiting and more to your public API in minutes, try it at `zuplo.com <https://zuplo.link/3DZM9Ej>`_
83
-
84
- ----
85
79
 
86
80
  Supported Strategies
87
81
  ====================
@@ -18,12 +18,6 @@ limits
18
18
 
19
19
  **limits** is a python library to perform rate limiting with commonly used storage backends (Redis, Memcached, MongoDB & Etcd).
20
20
 
21
- ----
22
-
23
- Sponsored by Zuplo - fully-managed, programmable API Management platform.
24
- Add rate limiting and more to your public API in minutes, try it at `zuplo.com <https://zuplo.link/3DZM9Ej>`_
25
-
26
- ----
27
21
 
28
22
  Supported Strategies
29
23
  ====================
@@ -203,3 +203,5 @@ Exceptions
203
203
  :no-inherited-members:
204
204
  .. autoexception:: limits.errors.ConcurrentUpdateError
205
205
  :no-inherited-members:
206
+ .. autoexception:: limits.errors.StorageError
207
+ :no-inherited-members:
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2023-11-24T14:02:38-0800",
11
+ "date": "2024-02-14T15:45:28-0800",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "0946fdcd3b9509041a823d3bb1b63ea1de9ab6ee",
15
- "version": "3.7.0"
14
+ "full-revisionid": "f1fe9f3efffef2f4c5975a93a069a8b759d3a240",
15
+ "version": "3.8.0"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -1,12 +1,43 @@
1
+ from __future__ import annotations
2
+
3
+ import functools
1
4
  from abc import ABC, abstractmethod
5
+ from typing import Any, cast
2
6
 
3
7
  from deprecated.sphinx import versionadded
4
8
 
9
+ from limits import errors
5
10
  from limits.storage.registry import StorageRegistry
6
- from limits.typing import List, Optional, Tuple, Union
11
+ from limits.typing import (
12
+ Awaitable,
13
+ Callable,
14
+ List,
15
+ Optional,
16
+ P,
17
+ R,
18
+ Tuple,
19
+ Type,
20
+ Union,
21
+ )
7
22
  from limits.util import LazyDependency
8
23
 
9
24
 
25
+ def _wrap_errors(
26
+ storage: Storage,
27
+ fn: Callable[P, Awaitable[R]],
28
+ ) -> Callable[P, Awaitable[R]]:
29
+ @functools.wraps(fn)
30
+ async def inner(*args: P.args, **kwargs: P.kwargs) -> R:
31
+ try:
32
+ return await fn(*args, **kwargs)
33
+ except storage.base_exceptions as exc:
34
+ if storage.wrap_exceptions:
35
+ raise errors.StorageError(exc) from exc
36
+ raise
37
+
38
+ return inner
39
+
40
+
10
41
  @versionadded(version="2.1")
11
42
  class Storage(LazyDependency, metaclass=StorageRegistry):
12
43
  """
@@ -16,10 +47,38 @@ class Storage(LazyDependency, metaclass=StorageRegistry):
16
47
  STORAGE_SCHEME: Optional[List[str]]
17
48
  """The storage schemes to register against this implementation"""
18
49
 
50
+ def __new__(cls, *args: Any, **kwargs: Any) -> Storage: # type: ignore[misc]
51
+ inst = super().__new__(cls)
52
+
53
+ for method in {
54
+ "incr",
55
+ "get",
56
+ "get_expiry",
57
+ "check",
58
+ "reset",
59
+ "clear",
60
+ }:
61
+ setattr(inst, method, _wrap_errors(inst, getattr(inst, method)))
62
+
63
+ return inst
64
+
19
65
  def __init__(
20
- self, uri: Optional[str] = None, **options: Union[float, str, bool]
66
+ self,
67
+ uri: Optional[str] = None,
68
+ wrap_exceptions: bool = False,
69
+ **options: Union[float, str, bool],
21
70
  ) -> None:
71
+ """
72
+ :param wrap_exceptions: Whether to wrap storage exceptions in
73
+ :exc:`limits.errors.StorageError` before raising it.
74
+ """
22
75
  super().__init__()
76
+ self.wrap_exceptions = wrap_exceptions
77
+
78
+ @property
79
+ @abstractmethod
80
+ def base_exceptions(self) -> Union[Type[Exception], Tuple[Type[Exception], ...]]:
81
+ raise NotImplementedError
23
82
 
24
83
  @abstractmethod
25
84
  async def incr(
@@ -80,6 +139,22 @@ class MovingWindowSupport(ABC):
80
139
  the moving window strategy
81
140
  """
82
141
 
142
+ def __new__(cls, *args: Any, **kwargs: Any) -> MovingWindowSupport: # type: ignore[misc]
143
+ inst = super().__new__(cls)
144
+
145
+ for method in {
146
+ "acquire_entry",
147
+ "get_moving_window",
148
+ }:
149
+ setattr(
150
+ inst,
151
+ method,
152
+ _wrap_errors(cast(Storage, inst), getattr(inst, method)),
153
+ )
154
+
155
+ return inst
156
+
157
+ @abstractmethod
83
158
  async def acquire_entry(
84
159
  self, key: str, limit: int, expiry: int, amount: int = 1
85
160
  ) -> bool:
@@ -91,6 +166,7 @@ class MovingWindowSupport(ABC):
91
166
  """
92
167
  raise NotImplementedError
93
168
 
169
+ @abstractmethod
94
170
  async def get_moving_window(
95
171
  self, key: str, limit: int, expiry: int
96
172
  ) -> Tuple[int, int]:
@@ -1,7 +1,7 @@
1
1
  import asyncio
2
2
  import time
3
3
  import urllib.parse
4
- from typing import TYPE_CHECKING, Optional
4
+ from typing import TYPE_CHECKING, Optional, Tuple, Type, Union
5
5
 
6
6
  from limits.aio.storage.base import Storage
7
7
  from limits.errors import ConcurrentUpdateError
@@ -46,6 +46,12 @@ class EtcdStorage(Storage):
46
46
  )
47
47
  self.max_retries = max_retries
48
48
 
49
+ @property
50
+ def base_exceptions(
51
+ self,
52
+ ) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
53
+ return self.lib.ClientError # type: ignore[no-any-return]
54
+
49
55
  def prefixed_key(self, key: str) -> bytes:
50
56
  return f"{self.PREFIX}/{key}".encode()
51
57
 
@@ -4,7 +4,7 @@ import urllib.parse
4
4
  from deprecated.sphinx import versionadded
5
5
 
6
6
  from limits.aio.storage.base import Storage
7
- from limits.typing import EmcacheClientP, Optional, Union
7
+ from limits.typing import EmcacheClientP, Optional, Tuple, Type, Union
8
8
 
9
9
 
10
10
  @versionadded(version="2.1")
@@ -20,10 +20,17 @@ class MemcachedStorage(Storage):
20
20
 
21
21
  DEPENDENCIES = ["emcache"]
22
22
 
23
- def __init__(self, uri: str, **options: Union[float, str, bool]) -> None:
23
+ def __init__(
24
+ self,
25
+ uri: str,
26
+ wrap_exceptions: bool = False,
27
+ **options: Union[float, str, bool],
28
+ ) -> None:
24
29
  """
25
30
  :param uri: memcached location of the form
26
31
  ``async+memcached://host:port,host:port``
32
+ :param wrap_exceptions: Whether to wrap storage exceptions in
33
+ :exc:`limits.errors.StorageError` before raising it.
27
34
  :param options: all remaining keyword arguments are passed
28
35
  directly to the constructor of :class:`emcache.Client`
29
36
  :raise ConfigurationError: when :pypi:`emcache` is not available
@@ -38,9 +45,18 @@ class MemcachedStorage(Storage):
38
45
 
39
46
  self._options = options
40
47
  self._storage = None
41
- super().__init__(uri, **options)
48
+ super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
42
49
  self.dependency = self.dependencies["emcache"].module
43
50
 
51
+ @property
52
+ def base_exceptions(
53
+ self,
54
+ ) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
55
+ return (
56
+ self.dependency.ClusterNoAvailableNodes,
57
+ self.dependency.CommandError,
58
+ )
59
+
44
60
  async def get_storage(self) -> EmcacheClientP:
45
61
  if not self._storage:
46
62
  self._storage = await self.dependency.create_client(
@@ -6,7 +6,7 @@ from deprecated.sphinx import versionadded
6
6
 
7
7
  import limits.typing
8
8
  from limits.aio.storage.base import MovingWindowSupport, Storage
9
- from limits.typing import Dict, List, Optional, Tuple
9
+ from limits.typing import Dict, List, Optional, Tuple, Type, Union
10
10
 
11
11
 
12
12
  class LockableEntry(asyncio.Lock):
@@ -30,12 +30,20 @@ class MemoryStorage(Storage, MovingWindowSupport):
30
30
  async context
31
31
  """
32
32
 
33
- def __init__(self, uri: Optional[str] = None, **_: str) -> None:
33
+ def __init__(
34
+ self, uri: Optional[str] = None, wrap_exceptions: bool = False, **_: str
35
+ ) -> None:
34
36
  self.storage: limits.typing.Counter[str] = Counter()
35
37
  self.expirations: Dict[str, float] = {}
36
38
  self.events: Dict[str, List[LockableEntry]] = {}
37
39
  self.timer: Optional[asyncio.Task[None]] = None
38
- super().__init__(uri, **_)
40
+ super().__init__(uri, wrap_exceptions=wrap_exceptions, **_)
41
+
42
+ @property
43
+ def base_exceptions(
44
+ self,
45
+ ) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
46
+ return ValueError
39
47
 
40
48
  async def __expire_events(self) -> None:
41
49
  for key in self.events.keys():
@@ -9,7 +9,8 @@ from typing import Any, cast
9
9
  from deprecated.sphinx import versionadded
10
10
 
11
11
  from limits.aio.storage.base import MovingWindowSupport, Storage
12
- from limits.typing import Dict, Optional, ParamSpec, Tuple, TypeVar, Union
12
+ from limits.typing import Dict, Optional, ParamSpec, Tuple, Type, TypeVar, Union
13
+ from limits.util import get_dependency
13
14
 
14
15
  P = ParamSpec("P")
15
16
  R = TypeVar("R")
@@ -40,6 +41,7 @@ class MongoDBStorage(Storage, MovingWindowSupport):
40
41
  self,
41
42
  uri: str,
42
43
  database_name: str = "limits",
44
+ wrap_exceptions: bool = False,
43
45
  **options: Union[float, str, bool],
44
46
  ) -> None:
45
47
  """
@@ -47,6 +49,8 @@ class MongoDBStorage(Storage, MovingWindowSupport):
47
49
  This uri is passed directly to :class:`~motor.motor_asyncio.AsyncIOMotorClient`
48
50
  :param database_name: The database to use for storing the rate limit
49
51
  collections.
52
+ :param wrap_exceptions: Whether to wrap storage exceptions in
53
+ :exc:`limits.errors.StorageError` before raising it.
50
54
  :param options: all remaining keyword arguments are merged with
51
55
  :data:`DEFAULT_OPTIONS` and passed to the constructor of
52
56
  :class:`~motor.motor_asyncio.AsyncIOMotorClient`
@@ -58,10 +62,11 @@ class MongoDBStorage(Storage, MovingWindowSupport):
58
62
  [mongo_opts.setdefault(k, v) for k, v in self.DEFAULT_OPTIONS.items()]
59
63
  uri = uri.replace("async+mongodb", "mongodb", 1)
60
64
 
61
- super().__init__(uri, **options)
65
+ super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
62
66
 
63
67
  self.dependency = self.dependencies["motor.motor_asyncio"]
64
68
  self.proxy_dependency = self.dependencies["pymongo"]
69
+ self.lib_errors, _ = get_dependency("pymongo.errors")
65
70
 
66
71
  self.storage = self.dependency.module.AsyncIOMotorClient(uri, **mongo_opts)
67
72
  # TODO: Fix this hack. It was noticed when running a benchmark
@@ -72,6 +77,12 @@ class MongoDBStorage(Storage, MovingWindowSupport):
72
77
  self.__database_name = database_name
73
78
  self.__indices_created = False
74
79
 
80
+ @property
81
+ def base_exceptions(
82
+ self,
83
+ ) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
84
+ return self.lib_errors.PyMongoError # type: ignore
85
+
75
86
  @property
76
87
  def database(self): # type: ignore
77
88
  return self.storage.get_database(self.__database_name)
@@ -257,8 +268,9 @@ class MongoDBStorage(Storage, MovingWindowSupport):
257
268
  await self.database.windows.update_one(
258
269
  {
259
270
  "_id": key,
260
- "entries.%d"
261
- % (limit - amount): {"$not": {"$gte": timestamp - expiry}},
271
+ "entries.%d" % (limit - amount): {
272
+ "$not": {"$gte": timestamp - expiry}
273
+ },
262
274
  },
263
275
  updates,
264
276
  upsert=True,
@@ -7,7 +7,7 @@ from packaging.version import Version
7
7
 
8
8
  from limits.aio.storage.base import MovingWindowSupport, Storage
9
9
  from limits.errors import ConfigurationError
10
- from limits.typing import AsyncRedisClient, Dict, Optional, Tuple, Union
10
+ from limits.typing import AsyncRedisClient, Dict, Optional, Tuple, Type, Union
11
11
  from limits.util import get_package_data
12
12
 
13
13
  if TYPE_CHECKING:
@@ -159,6 +159,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
159
159
  self,
160
160
  uri: str,
161
161
  connection_pool: Optional["coredis.ConnectionPool"] = None,
162
+ wrap_exceptions: bool = False,
162
163
  **options: Union[float, str, bool],
163
164
  ) -> None:
164
165
  """
@@ -167,13 +168,15 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
167
168
  - ``async+redis://[:password]@host:port``
168
169
  - ``async+redis://[:password]@host:port/db``
169
170
  - ``async+rediss://[:password]@host:port``
170
- - ``async+unix:///path/to/sock`` etc...
171
+ - ``async+redis+unix:///path/to/sock?db=0`` etc...
171
172
 
172
173
  This uri is passed directly to :meth:`coredis.Redis.from_url` with
173
174
  the initial ``async`` removed, except for the case of ``async+redis+unix``
174
175
  where it is replaced with ``unix``.
175
176
  :param connection_pool: if provided, the redis client is initialized with
176
177
  the connection pool and any other params passed as :paramref:`options`
178
+ :param wrap_exceptions: Whether to wrap storage exceptions in
179
+ :exc:`limits.errors.StorageError` before raising it.
177
180
  :param options: all remaining keyword arguments are passed
178
181
  directly to the constructor of :class:`coredis.Redis`
179
182
  :raise ConfigurationError: when the redis library is not available
@@ -181,7 +184,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
181
184
  uri = uri.replace("async+redis", "redis", 1)
182
185
  uri = uri.replace("redis+unix", "unix")
183
186
 
184
- super().__init__(uri, **options)
187
+ super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
185
188
 
186
189
  self.dependency = self.dependencies["coredis"].module
187
190
 
@@ -194,6 +197,12 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
194
197
 
195
198
  self.initialize_storage(uri)
196
199
 
200
+ @property
201
+ def base_exceptions(
202
+ self,
203
+ ) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
204
+ return self.dependency.RedisError # type: ignore[no-any-return]
205
+
197
206
  def initialize_storage(self, _uri: str) -> None:
198
207
  # all these methods are coroutines, so must be called with await
199
208
  self.lua_moving_window = self.storage.register_script(self.SCRIPT_MOVING_WINDOW)
@@ -268,8 +277,8 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
268
277
 
269
278
  async def reset(self) -> Optional[int]:
270
279
  """
271
- This function calls a Lua Script to delete keys prefixed with `self.PREFIX`
272
- in block of 5000.
280
+ This function calls a Lua Script to delete keys prefixed with
281
+ ``self.PREFIX`` in blocks of 5000.
273
282
 
274
283
  .. warning:: This operation was designed to be fast, but was not tested
275
284
  on a large production based system. Be careful with its usage as it
@@ -298,7 +307,12 @@ class RedisClusterStorage(RedisStorage):
298
307
  }
299
308
  "Default options passed to :class:`coredis.RedisCluster`"
300
309
 
301
- def __init__(self, uri: str, **options: Union[float, str, bool]) -> None:
310
+ def __init__(
311
+ self,
312
+ uri: str,
313
+ wrap_exceptions: bool = False,
314
+ **options: Union[float, str, bool],
315
+ ) -> None:
302
316
  """
303
317
  :param uri: url of the form
304
318
  ``async+redis+cluster://[:password]@host:port,host:port``
@@ -322,7 +336,9 @@ class RedisClusterStorage(RedisStorage):
322
336
  host, port = loc.split(":")
323
337
  cluster_hosts.append({"host": host, "port": int(port)})
324
338
 
325
- super(RedisStorage, self).__init__(uri, **options)
339
+ super(RedisStorage, self).__init__(
340
+ uri, wrap_exceptions=wrap_exceptions, **options
341
+ )
326
342
 
327
343
  self.dependency = self.dependencies["coredis"].module
328
344
 
@@ -336,7 +352,7 @@ class RedisClusterStorage(RedisStorage):
336
352
  """
337
353
  Redis Clusters are sharded and deleting across shards
338
354
  can't be done atomically. Because of this, this reset loops over all
339
- keys that are prefixed with `self.PREFIX` and calls delete on them,
355
+ keys that are prefixed with ``self.PREFIX`` and calls delete on them,
340
356
  one at a time.
341
357
 
342
358
  .. warning:: This operation was not tested with extremely large data sets.
@@ -17,3 +17,12 @@ class ConcurrentUpdateError(Exception):
17
17
 
18
18
  def __init__(self, key: str, attempts: int) -> None:
19
19
  super().__init__(f"Unable to update {key} after {attempts} retries")
20
+
21
+
22
+ class StorageError(Exception):
23
+ """
24
+ Error raised when an error is encountered in a storage
25
+ """
26
+
27
+ def __init__(self, storage_error: Exception) -> None:
28
+ self.storage_error = storage_error
@@ -1,6 +1,7 @@
1
1
  """
2
2
 
3
3
  """
4
+
4
5
  from __future__ import annotations
5
6
 
6
7
  from functools import total_ordering
@@ -1,11 +1,38 @@
1
+ from __future__ import annotations
2
+
3
+ import functools
1
4
  import threading
2
5
  from abc import ABC, abstractmethod
6
+ from typing import Any, cast
3
7
 
8
+ from limits import errors
4
9
  from limits.storage.registry import StorageRegistry
5
- from limits.typing import List, Optional, Tuple, Union
10
+ from limits.typing import (
11
+ Callable,
12
+ List,
13
+ Optional,
14
+ P,
15
+ R,
16
+ Tuple,
17
+ Type,
18
+ Union,
19
+ )
6
20
  from limits.util import LazyDependency
7
21
 
8
22
 
23
+ def _wrap_errors(storage: Storage, fn: Callable[P, R]) -> Callable[P, R]:
24
+ @functools.wraps(fn)
25
+ def inner(*args: P.args, **kwargs: P.kwargs) -> R:
26
+ try:
27
+ return fn(*args, **kwargs)
28
+ except storage.base_exceptions as exc:
29
+ if storage.wrap_exceptions:
30
+ raise errors.StorageError(exc) from exc
31
+ raise
32
+
33
+ return inner
34
+
35
+
9
36
  class Storage(LazyDependency, metaclass=StorageRegistry):
10
37
  """
11
38
  Base class to extend when implementing a storage backend.
@@ -14,9 +41,40 @@ class Storage(LazyDependency, metaclass=StorageRegistry):
14
41
  STORAGE_SCHEME: Optional[List[str]]
15
42
  """The storage schemes to register against this implementation"""
16
43
 
17
- def __init__(self, uri: Optional[str] = None, **options: Union[float, str, bool]):
44
+ def __new__(cls, *args: Any, **kwargs: Any) -> Storage: # type: ignore[misc]
45
+ inst = super().__new__(cls)
46
+
47
+ for method in {
48
+ "incr",
49
+ "get",
50
+ "get_expiry",
51
+ "check",
52
+ "reset",
53
+ "clear",
54
+ }:
55
+ setattr(inst, method, _wrap_errors(inst, getattr(inst, method)))
56
+
57
+ return inst
58
+
59
+ def __init__(
60
+ self,
61
+ uri: Optional[str] = None,
62
+ wrap_exceptions: bool = False,
63
+ **options: Union[float, str, bool],
64
+ ):
65
+ """
66
+ :param wrap_exceptions: Whether to wrap storage exceptions in
67
+ :exc:`limits.errors.StorageError` before raising it.
68
+ """
69
+
18
70
  self.lock = threading.RLock()
19
71
  super().__init__()
72
+ self.wrap_exceptions = wrap_exceptions
73
+
74
+ @property
75
+ @abstractmethod
76
+ def base_exceptions(self) -> Union[Type[Exception], Tuple[Type[Exception], ...]]:
77
+ raise NotImplementedError
20
78
 
21
79
  @abstractmethod
22
80
  def incr(
@@ -77,6 +135,22 @@ class MovingWindowSupport(ABC):
77
135
  the moving window strategy
78
136
  """
79
137
 
138
+ def __new__(cls, *args: Any, **kwargs: Any) -> MovingWindowSupport: # type: ignore[misc]
139
+ inst = super().__new__(cls)
140
+
141
+ for method in {
142
+ "acquire_entry",
143
+ "get_moving_window",
144
+ }:
145
+ setattr(
146
+ inst,
147
+ method,
148
+ _wrap_errors(cast(Storage, inst), getattr(inst, method)),
149
+ )
150
+
151
+ return inst
152
+
153
+ @abstractmethod
80
154
  def acquire_entry(self, key: str, limit: int, expiry: int, amount: int = 1) -> bool:
81
155
  """
82
156
  :param key: rate limit key to acquire an entry in
@@ -86,6 +160,7 @@ class MovingWindowSupport(ABC):
86
160
  """
87
161
  raise NotImplementedError
88
162
 
163
+ @abstractmethod
89
164
  def get_moving_window(self, key: str, limit: int, expiry: int) -> Tuple[int, int]:
90
165
  """
91
166
  returns the starting point and the number of entries in the moving
@@ -1,6 +1,6 @@
1
1
  import time
2
2
  import urllib.parse
3
- from typing import TYPE_CHECKING, Optional
3
+ from typing import TYPE_CHECKING, Optional, Tuple, Type, Union
4
4
 
5
5
  from limits.errors import ConcurrentUpdateError
6
6
  from limits.storage.base import Storage
@@ -44,6 +44,12 @@ class EtcdStorage(Storage):
44
44
  )
45
45
  self.max_retries = max_retries
46
46
 
47
+ @property
48
+ def base_exceptions(
49
+ self,
50
+ ) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
51
+ return self.lib.Etcd3Exception # type: ignore[no-any-return]
52
+
47
53
  def prefixed_key(self, key: str) -> bytes:
48
54
  return f"{self.PREFIX}/{key}".encode()
49
55