limits 3.7.0__py3-none-any.whl → 3.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- limits/_version.py +3 -3
- limits/aio/storage/base.py +78 -2
- limits/aio/storage/etcd.py +7 -1
- limits/aio/storage/memcached.py +19 -3
- limits/aio/storage/memory.py +11 -3
- limits/aio/storage/mongodb.py +16 -4
- limits/aio/storage/redis.py +24 -8
- limits/errors.py +9 -0
- limits/limits.py +1 -0
- limits/storage/base.py +77 -2
- limits/storage/etcd.py +7 -1
- limits/storage/memcached.py +31 -4
- limits/storage/memory.py +11 -3
- limits/storage/mongodb.py +20 -5
- limits/storage/redis.py +18 -7
- limits/storage/redis_cluster.py +1 -1
- limits/storage/redis_sentinel.py +8 -3
- limits/typing.py +2 -0
- limits/util.py +1 -0
- {limits-3.7.0.dist-info → limits-3.8.0.dist-info}/METADATA +1 -7
- limits-3.8.0.dist-info/RECORD +37 -0
- {limits-3.7.0.dist-info → limits-3.8.0.dist-info}/WHEEL +1 -1
- limits-3.7.0.dist-info/RECORD +0 -37
- {limits-3.7.0.dist-info → limits-3.8.0.dist-info}/LICENSE.txt +0 -0
- {limits-3.7.0.dist-info → limits-3.8.0.dist-info}/top_level.txt +0 -0
limits/_version.py
CHANGED
|
@@ -8,11 +8,11 @@ import json
|
|
|
8
8
|
|
|
9
9
|
version_json = '''
|
|
10
10
|
{
|
|
11
|
-
"date": "
|
|
11
|
+
"date": "2024-02-14T15:45:28-0800",
|
|
12
12
|
"dirty": false,
|
|
13
13
|
"error": null,
|
|
14
|
-
"full-revisionid": "
|
|
15
|
-
"version": "3.
|
|
14
|
+
"full-revisionid": "f1fe9f3efffef2f4c5975a93a069a8b759d3a240",
|
|
15
|
+
"version": "3.8.0"
|
|
16
16
|
}
|
|
17
17
|
''' # END VERSION_JSON
|
|
18
18
|
|
limits/aio/storage/base.py
CHANGED
|
@@ -1,12 +1,43 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import functools
|
|
1
4
|
from abc import ABC, abstractmethod
|
|
5
|
+
from typing import Any, cast
|
|
2
6
|
|
|
3
7
|
from deprecated.sphinx import versionadded
|
|
4
8
|
|
|
9
|
+
from limits import errors
|
|
5
10
|
from limits.storage.registry import StorageRegistry
|
|
6
|
-
from limits.typing import
|
|
11
|
+
from limits.typing import (
|
|
12
|
+
Awaitable,
|
|
13
|
+
Callable,
|
|
14
|
+
List,
|
|
15
|
+
Optional,
|
|
16
|
+
P,
|
|
17
|
+
R,
|
|
18
|
+
Tuple,
|
|
19
|
+
Type,
|
|
20
|
+
Union,
|
|
21
|
+
)
|
|
7
22
|
from limits.util import LazyDependency
|
|
8
23
|
|
|
9
24
|
|
|
25
|
+
def _wrap_errors(
|
|
26
|
+
storage: Storage,
|
|
27
|
+
fn: Callable[P, Awaitable[R]],
|
|
28
|
+
) -> Callable[P, Awaitable[R]]:
|
|
29
|
+
@functools.wraps(fn)
|
|
30
|
+
async def inner(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
31
|
+
try:
|
|
32
|
+
return await fn(*args, **kwargs)
|
|
33
|
+
except storage.base_exceptions as exc:
|
|
34
|
+
if storage.wrap_exceptions:
|
|
35
|
+
raise errors.StorageError(exc) from exc
|
|
36
|
+
raise
|
|
37
|
+
|
|
38
|
+
return inner
|
|
39
|
+
|
|
40
|
+
|
|
10
41
|
@versionadded(version="2.1")
|
|
11
42
|
class Storage(LazyDependency, metaclass=StorageRegistry):
|
|
12
43
|
"""
|
|
@@ -16,10 +47,38 @@ class Storage(LazyDependency, metaclass=StorageRegistry):
|
|
|
16
47
|
STORAGE_SCHEME: Optional[List[str]]
|
|
17
48
|
"""The storage schemes to register against this implementation"""
|
|
18
49
|
|
|
50
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Storage: # type: ignore[misc]
|
|
51
|
+
inst = super().__new__(cls)
|
|
52
|
+
|
|
53
|
+
for method in {
|
|
54
|
+
"incr",
|
|
55
|
+
"get",
|
|
56
|
+
"get_expiry",
|
|
57
|
+
"check",
|
|
58
|
+
"reset",
|
|
59
|
+
"clear",
|
|
60
|
+
}:
|
|
61
|
+
setattr(inst, method, _wrap_errors(inst, getattr(inst, method)))
|
|
62
|
+
|
|
63
|
+
return inst
|
|
64
|
+
|
|
19
65
|
def __init__(
|
|
20
|
-
self,
|
|
66
|
+
self,
|
|
67
|
+
uri: Optional[str] = None,
|
|
68
|
+
wrap_exceptions: bool = False,
|
|
69
|
+
**options: Union[float, str, bool],
|
|
21
70
|
) -> None:
|
|
71
|
+
"""
|
|
72
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
73
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
74
|
+
"""
|
|
22
75
|
super().__init__()
|
|
76
|
+
self.wrap_exceptions = wrap_exceptions
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
@abstractmethod
|
|
80
|
+
def base_exceptions(self) -> Union[Type[Exception], Tuple[Type[Exception], ...]]:
|
|
81
|
+
raise NotImplementedError
|
|
23
82
|
|
|
24
83
|
@abstractmethod
|
|
25
84
|
async def incr(
|
|
@@ -80,6 +139,22 @@ class MovingWindowSupport(ABC):
|
|
|
80
139
|
the moving window strategy
|
|
81
140
|
"""
|
|
82
141
|
|
|
142
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> MovingWindowSupport: # type: ignore[misc]
|
|
143
|
+
inst = super().__new__(cls)
|
|
144
|
+
|
|
145
|
+
for method in {
|
|
146
|
+
"acquire_entry",
|
|
147
|
+
"get_moving_window",
|
|
148
|
+
}:
|
|
149
|
+
setattr(
|
|
150
|
+
inst,
|
|
151
|
+
method,
|
|
152
|
+
_wrap_errors(cast(Storage, inst), getattr(inst, method)),
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
return inst
|
|
156
|
+
|
|
157
|
+
@abstractmethod
|
|
83
158
|
async def acquire_entry(
|
|
84
159
|
self, key: str, limit: int, expiry: int, amount: int = 1
|
|
85
160
|
) -> bool:
|
|
@@ -91,6 +166,7 @@ class MovingWindowSupport(ABC):
|
|
|
91
166
|
"""
|
|
92
167
|
raise NotImplementedError
|
|
93
168
|
|
|
169
|
+
@abstractmethod
|
|
94
170
|
async def get_moving_window(
|
|
95
171
|
self, key: str, limit: int, expiry: int
|
|
96
172
|
) -> Tuple[int, int]:
|
limits/aio/storage/etcd.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import time
|
|
3
3
|
import urllib.parse
|
|
4
|
-
from typing import TYPE_CHECKING, Optional
|
|
4
|
+
from typing import TYPE_CHECKING, Optional, Tuple, Type, Union
|
|
5
5
|
|
|
6
6
|
from limits.aio.storage.base import Storage
|
|
7
7
|
from limits.errors import ConcurrentUpdateError
|
|
@@ -46,6 +46,12 @@ class EtcdStorage(Storage):
|
|
|
46
46
|
)
|
|
47
47
|
self.max_retries = max_retries
|
|
48
48
|
|
|
49
|
+
@property
|
|
50
|
+
def base_exceptions(
|
|
51
|
+
self,
|
|
52
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
53
|
+
return self.lib.ClientError # type: ignore[no-any-return]
|
|
54
|
+
|
|
49
55
|
def prefixed_key(self, key: str) -> bytes:
|
|
50
56
|
return f"{self.PREFIX}/{key}".encode()
|
|
51
57
|
|
limits/aio/storage/memcached.py
CHANGED
|
@@ -4,7 +4,7 @@ import urllib.parse
|
|
|
4
4
|
from deprecated.sphinx import versionadded
|
|
5
5
|
|
|
6
6
|
from limits.aio.storage.base import Storage
|
|
7
|
-
from limits.typing import EmcacheClientP, Optional, Union
|
|
7
|
+
from limits.typing import EmcacheClientP, Optional, Tuple, Type, Union
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
@versionadded(version="2.1")
|
|
@@ -20,10 +20,17 @@ class MemcachedStorage(Storage):
|
|
|
20
20
|
|
|
21
21
|
DEPENDENCIES = ["emcache"]
|
|
22
22
|
|
|
23
|
-
def __init__(
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
uri: str,
|
|
26
|
+
wrap_exceptions: bool = False,
|
|
27
|
+
**options: Union[float, str, bool],
|
|
28
|
+
) -> None:
|
|
24
29
|
"""
|
|
25
30
|
:param uri: memcached location of the form
|
|
26
31
|
``async+memcached://host:port,host:port``
|
|
32
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
33
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
27
34
|
:param options: all remaining keyword arguments are passed
|
|
28
35
|
directly to the constructor of :class:`emcache.Client`
|
|
29
36
|
:raise ConfigurationError: when :pypi:`emcache` is not available
|
|
@@ -38,9 +45,18 @@ class MemcachedStorage(Storage):
|
|
|
38
45
|
|
|
39
46
|
self._options = options
|
|
40
47
|
self._storage = None
|
|
41
|
-
super().__init__(uri, **options)
|
|
48
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
42
49
|
self.dependency = self.dependencies["emcache"].module
|
|
43
50
|
|
|
51
|
+
@property
|
|
52
|
+
def base_exceptions(
|
|
53
|
+
self,
|
|
54
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
55
|
+
return (
|
|
56
|
+
self.dependency.ClusterNoAvailableNodes,
|
|
57
|
+
self.dependency.CommandError,
|
|
58
|
+
)
|
|
59
|
+
|
|
44
60
|
async def get_storage(self) -> EmcacheClientP:
|
|
45
61
|
if not self._storage:
|
|
46
62
|
self._storage = await self.dependency.create_client(
|
limits/aio/storage/memory.py
CHANGED
|
@@ -6,7 +6,7 @@ from deprecated.sphinx import versionadded
|
|
|
6
6
|
|
|
7
7
|
import limits.typing
|
|
8
8
|
from limits.aio.storage.base import MovingWindowSupport, Storage
|
|
9
|
-
from limits.typing import Dict, List, Optional, Tuple
|
|
9
|
+
from limits.typing import Dict, List, Optional, Tuple, Type, Union
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class LockableEntry(asyncio.Lock):
|
|
@@ -30,12 +30,20 @@ class MemoryStorage(Storage, MovingWindowSupport):
|
|
|
30
30
|
async context
|
|
31
31
|
"""
|
|
32
32
|
|
|
33
|
-
def __init__(
|
|
33
|
+
def __init__(
|
|
34
|
+
self, uri: Optional[str] = None, wrap_exceptions: bool = False, **_: str
|
|
35
|
+
) -> None:
|
|
34
36
|
self.storage: limits.typing.Counter[str] = Counter()
|
|
35
37
|
self.expirations: Dict[str, float] = {}
|
|
36
38
|
self.events: Dict[str, List[LockableEntry]] = {}
|
|
37
39
|
self.timer: Optional[asyncio.Task[None]] = None
|
|
38
|
-
super().__init__(uri, **_)
|
|
40
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **_)
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def base_exceptions(
|
|
44
|
+
self,
|
|
45
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
46
|
+
return ValueError
|
|
39
47
|
|
|
40
48
|
async def __expire_events(self) -> None:
|
|
41
49
|
for key in self.events.keys():
|
limits/aio/storage/mongodb.py
CHANGED
|
@@ -9,7 +9,8 @@ from typing import Any, cast
|
|
|
9
9
|
from deprecated.sphinx import versionadded
|
|
10
10
|
|
|
11
11
|
from limits.aio.storage.base import MovingWindowSupport, Storage
|
|
12
|
-
from limits.typing import Dict, Optional, ParamSpec, Tuple, TypeVar, Union
|
|
12
|
+
from limits.typing import Dict, Optional, ParamSpec, Tuple, Type, TypeVar, Union
|
|
13
|
+
from limits.util import get_dependency
|
|
13
14
|
|
|
14
15
|
P = ParamSpec("P")
|
|
15
16
|
R = TypeVar("R")
|
|
@@ -40,6 +41,7 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
40
41
|
self,
|
|
41
42
|
uri: str,
|
|
42
43
|
database_name: str = "limits",
|
|
44
|
+
wrap_exceptions: bool = False,
|
|
43
45
|
**options: Union[float, str, bool],
|
|
44
46
|
) -> None:
|
|
45
47
|
"""
|
|
@@ -47,6 +49,8 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
47
49
|
This uri is passed directly to :class:`~motor.motor_asyncio.AsyncIOMotorClient`
|
|
48
50
|
:param database_name: The database to use for storing the rate limit
|
|
49
51
|
collections.
|
|
52
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
53
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
50
54
|
:param options: all remaining keyword arguments are merged with
|
|
51
55
|
:data:`DEFAULT_OPTIONS` and passed to the constructor of
|
|
52
56
|
:class:`~motor.motor_asyncio.AsyncIOMotorClient`
|
|
@@ -58,10 +62,11 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
58
62
|
[mongo_opts.setdefault(k, v) for k, v in self.DEFAULT_OPTIONS.items()]
|
|
59
63
|
uri = uri.replace("async+mongodb", "mongodb", 1)
|
|
60
64
|
|
|
61
|
-
super().__init__(uri, **options)
|
|
65
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
62
66
|
|
|
63
67
|
self.dependency = self.dependencies["motor.motor_asyncio"]
|
|
64
68
|
self.proxy_dependency = self.dependencies["pymongo"]
|
|
69
|
+
self.lib_errors, _ = get_dependency("pymongo.errors")
|
|
65
70
|
|
|
66
71
|
self.storage = self.dependency.module.AsyncIOMotorClient(uri, **mongo_opts)
|
|
67
72
|
# TODO: Fix this hack. It was noticed when running a benchmark
|
|
@@ -72,6 +77,12 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
72
77
|
self.__database_name = database_name
|
|
73
78
|
self.__indices_created = False
|
|
74
79
|
|
|
80
|
+
@property
|
|
81
|
+
def base_exceptions(
|
|
82
|
+
self,
|
|
83
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
84
|
+
return self.lib_errors.PyMongoError # type: ignore
|
|
85
|
+
|
|
75
86
|
@property
|
|
76
87
|
def database(self): # type: ignore
|
|
77
88
|
return self.storage.get_database(self.__database_name)
|
|
@@ -257,8 +268,9 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
257
268
|
await self.database.windows.update_one(
|
|
258
269
|
{
|
|
259
270
|
"_id": key,
|
|
260
|
-
"entries.%d"
|
|
261
|
-
|
|
271
|
+
"entries.%d" % (limit - amount): {
|
|
272
|
+
"$not": {"$gte": timestamp - expiry}
|
|
273
|
+
},
|
|
262
274
|
},
|
|
263
275
|
updates,
|
|
264
276
|
upsert=True,
|
limits/aio/storage/redis.py
CHANGED
|
@@ -7,7 +7,7 @@ from packaging.version import Version
|
|
|
7
7
|
|
|
8
8
|
from limits.aio.storage.base import MovingWindowSupport, Storage
|
|
9
9
|
from limits.errors import ConfigurationError
|
|
10
|
-
from limits.typing import AsyncRedisClient, Dict, Optional, Tuple, Union
|
|
10
|
+
from limits.typing import AsyncRedisClient, Dict, Optional, Tuple, Type, Union
|
|
11
11
|
from limits.util import get_package_data
|
|
12
12
|
|
|
13
13
|
if TYPE_CHECKING:
|
|
@@ -159,6 +159,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
159
159
|
self,
|
|
160
160
|
uri: str,
|
|
161
161
|
connection_pool: Optional["coredis.ConnectionPool"] = None,
|
|
162
|
+
wrap_exceptions: bool = False,
|
|
162
163
|
**options: Union[float, str, bool],
|
|
163
164
|
) -> None:
|
|
164
165
|
"""
|
|
@@ -167,13 +168,15 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
167
168
|
- ``async+redis://[:password]@host:port``
|
|
168
169
|
- ``async+redis://[:password]@host:port/db``
|
|
169
170
|
- ``async+rediss://[:password]@host:port``
|
|
170
|
-
- ``async+unix:///path/to/sock`` etc...
|
|
171
|
+
- ``async+redis+unix:///path/to/sock?db=0`` etc...
|
|
171
172
|
|
|
172
173
|
This uri is passed directly to :meth:`coredis.Redis.from_url` with
|
|
173
174
|
the initial ``async`` removed, except for the case of ``async+redis+unix``
|
|
174
175
|
where it is replaced with ``unix``.
|
|
175
176
|
:param connection_pool: if provided, the redis client is initialized with
|
|
176
177
|
the connection pool and any other params passed as :paramref:`options`
|
|
178
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
179
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
177
180
|
:param options: all remaining keyword arguments are passed
|
|
178
181
|
directly to the constructor of :class:`coredis.Redis`
|
|
179
182
|
:raise ConfigurationError: when the redis library is not available
|
|
@@ -181,7 +184,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
181
184
|
uri = uri.replace("async+redis", "redis", 1)
|
|
182
185
|
uri = uri.replace("redis+unix", "unix")
|
|
183
186
|
|
|
184
|
-
super().__init__(uri, **options)
|
|
187
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
185
188
|
|
|
186
189
|
self.dependency = self.dependencies["coredis"].module
|
|
187
190
|
|
|
@@ -194,6 +197,12 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
194
197
|
|
|
195
198
|
self.initialize_storage(uri)
|
|
196
199
|
|
|
200
|
+
@property
|
|
201
|
+
def base_exceptions(
|
|
202
|
+
self,
|
|
203
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
204
|
+
return self.dependency.RedisError # type: ignore[no-any-return]
|
|
205
|
+
|
|
197
206
|
def initialize_storage(self, _uri: str) -> None:
|
|
198
207
|
# all these methods are coroutines, so must be called with await
|
|
199
208
|
self.lua_moving_window = self.storage.register_script(self.SCRIPT_MOVING_WINDOW)
|
|
@@ -268,8 +277,8 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
268
277
|
|
|
269
278
|
async def reset(self) -> Optional[int]:
|
|
270
279
|
"""
|
|
271
|
-
This function calls a Lua Script to delete keys prefixed with
|
|
272
|
-
in
|
|
280
|
+
This function calls a Lua Script to delete keys prefixed with
|
|
281
|
+
``self.PREFIX`` in blocks of 5000.
|
|
273
282
|
|
|
274
283
|
.. warning:: This operation was designed to be fast, but was not tested
|
|
275
284
|
on a large production based system. Be careful with its usage as it
|
|
@@ -298,7 +307,12 @@ class RedisClusterStorage(RedisStorage):
|
|
|
298
307
|
}
|
|
299
308
|
"Default options passed to :class:`coredis.RedisCluster`"
|
|
300
309
|
|
|
301
|
-
def __init__(
|
|
310
|
+
def __init__(
|
|
311
|
+
self,
|
|
312
|
+
uri: str,
|
|
313
|
+
wrap_exceptions: bool = False,
|
|
314
|
+
**options: Union[float, str, bool],
|
|
315
|
+
) -> None:
|
|
302
316
|
"""
|
|
303
317
|
:param uri: url of the form
|
|
304
318
|
``async+redis+cluster://[:password]@host:port,host:port``
|
|
@@ -322,7 +336,9 @@ class RedisClusterStorage(RedisStorage):
|
|
|
322
336
|
host, port = loc.split(":")
|
|
323
337
|
cluster_hosts.append({"host": host, "port": int(port)})
|
|
324
338
|
|
|
325
|
-
super(RedisStorage, self).__init__(
|
|
339
|
+
super(RedisStorage, self).__init__(
|
|
340
|
+
uri, wrap_exceptions=wrap_exceptions, **options
|
|
341
|
+
)
|
|
326
342
|
|
|
327
343
|
self.dependency = self.dependencies["coredis"].module
|
|
328
344
|
|
|
@@ -336,7 +352,7 @@ class RedisClusterStorage(RedisStorage):
|
|
|
336
352
|
"""
|
|
337
353
|
Redis Clusters are sharded and deleting across shards
|
|
338
354
|
can't be done atomically. Because of this, this reset loops over all
|
|
339
|
-
keys that are prefixed with
|
|
355
|
+
keys that are prefixed with ``self.PREFIX`` and calls delete on them,
|
|
340
356
|
one at a time.
|
|
341
357
|
|
|
342
358
|
.. warning:: This operation was not tested with extremely large data sets.
|
limits/errors.py
CHANGED
|
@@ -17,3 +17,12 @@ class ConcurrentUpdateError(Exception):
|
|
|
17
17
|
|
|
18
18
|
def __init__(self, key: str, attempts: int) -> None:
|
|
19
19
|
super().__init__(f"Unable to update {key} after {attempts} retries")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class StorageError(Exception):
|
|
23
|
+
"""
|
|
24
|
+
Error raised when an error is encountered in a storage
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self, storage_error: Exception) -> None:
|
|
28
|
+
self.storage_error = storage_error
|
limits/limits.py
CHANGED
limits/storage/base.py
CHANGED
|
@@ -1,11 +1,38 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import functools
|
|
1
4
|
import threading
|
|
2
5
|
from abc import ABC, abstractmethod
|
|
6
|
+
from typing import Any, cast
|
|
3
7
|
|
|
8
|
+
from limits import errors
|
|
4
9
|
from limits.storage.registry import StorageRegistry
|
|
5
|
-
from limits.typing import
|
|
10
|
+
from limits.typing import (
|
|
11
|
+
Callable,
|
|
12
|
+
List,
|
|
13
|
+
Optional,
|
|
14
|
+
P,
|
|
15
|
+
R,
|
|
16
|
+
Tuple,
|
|
17
|
+
Type,
|
|
18
|
+
Union,
|
|
19
|
+
)
|
|
6
20
|
from limits.util import LazyDependency
|
|
7
21
|
|
|
8
22
|
|
|
23
|
+
def _wrap_errors(storage: Storage, fn: Callable[P, R]) -> Callable[P, R]:
|
|
24
|
+
@functools.wraps(fn)
|
|
25
|
+
def inner(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
26
|
+
try:
|
|
27
|
+
return fn(*args, **kwargs)
|
|
28
|
+
except storage.base_exceptions as exc:
|
|
29
|
+
if storage.wrap_exceptions:
|
|
30
|
+
raise errors.StorageError(exc) from exc
|
|
31
|
+
raise
|
|
32
|
+
|
|
33
|
+
return inner
|
|
34
|
+
|
|
35
|
+
|
|
9
36
|
class Storage(LazyDependency, metaclass=StorageRegistry):
|
|
10
37
|
"""
|
|
11
38
|
Base class to extend when implementing a storage backend.
|
|
@@ -14,9 +41,40 @@ class Storage(LazyDependency, metaclass=StorageRegistry):
|
|
|
14
41
|
STORAGE_SCHEME: Optional[List[str]]
|
|
15
42
|
"""The storage schemes to register against this implementation"""
|
|
16
43
|
|
|
17
|
-
def
|
|
44
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Storage: # type: ignore[misc]
|
|
45
|
+
inst = super().__new__(cls)
|
|
46
|
+
|
|
47
|
+
for method in {
|
|
48
|
+
"incr",
|
|
49
|
+
"get",
|
|
50
|
+
"get_expiry",
|
|
51
|
+
"check",
|
|
52
|
+
"reset",
|
|
53
|
+
"clear",
|
|
54
|
+
}:
|
|
55
|
+
setattr(inst, method, _wrap_errors(inst, getattr(inst, method)))
|
|
56
|
+
|
|
57
|
+
return inst
|
|
58
|
+
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
uri: Optional[str] = None,
|
|
62
|
+
wrap_exceptions: bool = False,
|
|
63
|
+
**options: Union[float, str, bool],
|
|
64
|
+
):
|
|
65
|
+
"""
|
|
66
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
67
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
68
|
+
"""
|
|
69
|
+
|
|
18
70
|
self.lock = threading.RLock()
|
|
19
71
|
super().__init__()
|
|
72
|
+
self.wrap_exceptions = wrap_exceptions
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
@abstractmethod
|
|
76
|
+
def base_exceptions(self) -> Union[Type[Exception], Tuple[Type[Exception], ...]]:
|
|
77
|
+
raise NotImplementedError
|
|
20
78
|
|
|
21
79
|
@abstractmethod
|
|
22
80
|
def incr(
|
|
@@ -77,6 +135,22 @@ class MovingWindowSupport(ABC):
|
|
|
77
135
|
the moving window strategy
|
|
78
136
|
"""
|
|
79
137
|
|
|
138
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> MovingWindowSupport: # type: ignore[misc]
|
|
139
|
+
inst = super().__new__(cls)
|
|
140
|
+
|
|
141
|
+
for method in {
|
|
142
|
+
"acquire_entry",
|
|
143
|
+
"get_moving_window",
|
|
144
|
+
}:
|
|
145
|
+
setattr(
|
|
146
|
+
inst,
|
|
147
|
+
method,
|
|
148
|
+
_wrap_errors(cast(Storage, inst), getattr(inst, method)),
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
return inst
|
|
152
|
+
|
|
153
|
+
@abstractmethod
|
|
80
154
|
def acquire_entry(self, key: str, limit: int, expiry: int, amount: int = 1) -> bool:
|
|
81
155
|
"""
|
|
82
156
|
:param key: rate limit key to acquire an entry in
|
|
@@ -86,6 +160,7 @@ class MovingWindowSupport(ABC):
|
|
|
86
160
|
"""
|
|
87
161
|
raise NotImplementedError
|
|
88
162
|
|
|
163
|
+
@abstractmethod
|
|
89
164
|
def get_moving_window(self, key: str, limit: int, expiry: int) -> Tuple[int, int]:
|
|
90
165
|
"""
|
|
91
166
|
returns the starting point and the number of entries in the moving
|
limits/storage/etcd.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import time
|
|
2
2
|
import urllib.parse
|
|
3
|
-
from typing import TYPE_CHECKING, Optional
|
|
3
|
+
from typing import TYPE_CHECKING, Optional, Tuple, Type, Union
|
|
4
4
|
|
|
5
5
|
from limits.errors import ConcurrentUpdateError
|
|
6
6
|
from limits.storage.base import Storage
|
|
@@ -44,6 +44,12 @@ class EtcdStorage(Storage):
|
|
|
44
44
|
)
|
|
45
45
|
self.max_retries = max_retries
|
|
46
46
|
|
|
47
|
+
@property
|
|
48
|
+
def base_exceptions(
|
|
49
|
+
self,
|
|
50
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
51
|
+
return self.lib.Etcd3Exception # type: ignore[no-any-return]
|
|
52
|
+
|
|
47
53
|
def prefixed_key(self, key: str) -> bytes:
|
|
48
54
|
return f"{self.PREFIX}/{key}".encode()
|
|
49
55
|
|
limits/storage/memcached.py
CHANGED
|
@@ -7,7 +7,17 @@ from typing import cast
|
|
|
7
7
|
|
|
8
8
|
from limits.errors import ConfigurationError
|
|
9
9
|
from limits.storage.base import Storage
|
|
10
|
-
from limits.typing import
|
|
10
|
+
from limits.typing import (
|
|
11
|
+
Callable,
|
|
12
|
+
List,
|
|
13
|
+
MemcachedClientP,
|
|
14
|
+
Optional,
|
|
15
|
+
P,
|
|
16
|
+
R,
|
|
17
|
+
Tuple,
|
|
18
|
+
Type,
|
|
19
|
+
Union,
|
|
20
|
+
)
|
|
11
21
|
from limits.util import get_dependency
|
|
12
22
|
|
|
13
23
|
|
|
@@ -20,16 +30,20 @@ class MemcachedStorage(Storage):
|
|
|
20
30
|
|
|
21
31
|
STORAGE_SCHEME = ["memcached"]
|
|
22
32
|
"""The storage scheme for memcached"""
|
|
33
|
+
DEPENDENCIES = ["pymemcache"]
|
|
23
34
|
|
|
24
35
|
def __init__(
|
|
25
36
|
self,
|
|
26
37
|
uri: str,
|
|
38
|
+
wrap_exceptions: bool = False,
|
|
27
39
|
**options: Union[str, Callable[[], MemcachedClientP]],
|
|
28
40
|
) -> None:
|
|
29
41
|
"""
|
|
30
42
|
:param uri: memcached location of the form
|
|
31
43
|
``memcached://host:port,host:port``,
|
|
32
44
|
``memcached:///var/tmp/path/to/sock``
|
|
45
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
46
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
33
47
|
:param options: all remaining keyword arguments are passed
|
|
34
48
|
directly to the constructor of :class:`pymemcache.client.base.PooledClient`
|
|
35
49
|
or :class:`pymemcache.client.hash.HashClient` (if there are more than
|
|
@@ -50,6 +64,7 @@ class MemcachedStorage(Storage):
|
|
|
50
64
|
if parsed.path and not parsed.netloc and not parsed.port:
|
|
51
65
|
self.hosts = [parsed.path] # type: ignore
|
|
52
66
|
|
|
67
|
+
self.dependency = self.dependencies["pymemcache"].module
|
|
53
68
|
self.library = str(options.pop("library", "pymemcache.client"))
|
|
54
69
|
self.cluster_library = str(
|
|
55
70
|
options.pop("cluster_library", "pymemcache.client.hash")
|
|
@@ -67,6 +82,13 @@ class MemcachedStorage(Storage):
|
|
|
67
82
|
) # pragma: no cover
|
|
68
83
|
self.local_storage = threading.local()
|
|
69
84
|
self.local_storage.storage = None
|
|
85
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions)
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def base_exceptions(
|
|
89
|
+
self,
|
|
90
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
91
|
+
return self.dependency.MemcacheError # type: ignore[no-any-return]
|
|
70
92
|
|
|
71
93
|
def get_client(
|
|
72
94
|
self, module: ModuleType, hosts: List[Tuple[str, int]], **kwargs: str
|
|
@@ -77,11 +99,14 @@ class MemcachedStorage(Storage):
|
|
|
77
99
|
:param module: the memcached module
|
|
78
100
|
:param hosts: list of memcached hosts
|
|
79
101
|
"""
|
|
102
|
+
|
|
80
103
|
return cast(
|
|
81
104
|
MemcachedClientP,
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
105
|
+
(
|
|
106
|
+
module.HashClient(hosts, **kwargs)
|
|
107
|
+
if len(hosts) > 1
|
|
108
|
+
else module.PooledClient(*hosts, **kwargs)
|
|
109
|
+
),
|
|
85
110
|
)
|
|
86
111
|
|
|
87
112
|
def call_memcached_func(
|
|
@@ -89,6 +114,7 @@ class MemcachedStorage(Storage):
|
|
|
89
114
|
) -> R:
|
|
90
115
|
if "noreply" in kwargs:
|
|
91
116
|
argspec = inspect.getfullargspec(func)
|
|
117
|
+
|
|
92
118
|
if not ("noreply" in argspec.args or argspec.varkw):
|
|
93
119
|
kwargs.pop("noreply")
|
|
94
120
|
|
|
@@ -104,6 +130,7 @@ class MemcachedStorage(Storage):
|
|
|
104
130
|
dependency = get_dependency(
|
|
105
131
|
self.cluster_library if len(self.hosts) > 1 else self.library
|
|
106
132
|
)[0]
|
|
133
|
+
|
|
107
134
|
if not dependency:
|
|
108
135
|
raise ConfigurationError(f"Unable to import {self.cluster_library}")
|
|
109
136
|
self.local_storage.storage = self.client_getter(
|
limits/storage/memory.py
CHANGED
|
@@ -4,7 +4,7 @@ from collections import Counter
|
|
|
4
4
|
|
|
5
5
|
import limits.typing
|
|
6
6
|
from limits.storage.base import MovingWindowSupport, Storage
|
|
7
|
-
from limits.typing import Dict, List, Optional, Tuple
|
|
7
|
+
from limits.typing import Dict, List, Optional, Tuple, Type, Union
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class LockableEntry(threading._RLock): # type: ignore
|
|
@@ -24,13 +24,21 @@ class MemoryStorage(Storage, MovingWindowSupport):
|
|
|
24
24
|
|
|
25
25
|
STORAGE_SCHEME = ["memory"]
|
|
26
26
|
|
|
27
|
-
def __init__(
|
|
27
|
+
def __init__(
|
|
28
|
+
self, uri: Optional[str] = None, wrap_exceptions: bool = False, **_: str
|
|
29
|
+
):
|
|
28
30
|
self.storage: limits.typing.Counter[str] = Counter()
|
|
29
31
|
self.expirations: Dict[str, float] = {}
|
|
30
32
|
self.events: Dict[str, List[LockableEntry]] = {}
|
|
31
33
|
self.timer = threading.Timer(0.01, self.__expire_events)
|
|
32
34
|
self.timer.start()
|
|
33
|
-
super().__init__(uri, **_)
|
|
35
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **_)
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def base_exceptions(
|
|
39
|
+
self,
|
|
40
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
41
|
+
return ValueError
|
|
34
42
|
|
|
35
43
|
def __expire_events(self) -> None:
|
|
36
44
|
for key in list(self.events.keys()):
|
limits/storage/mongodb.py
CHANGED
|
@@ -7,8 +7,9 @@ from typing import TYPE_CHECKING, Any
|
|
|
7
7
|
|
|
8
8
|
from deprecated.sphinx import versionadded
|
|
9
9
|
|
|
10
|
-
from limits.typing import Dict, Optional, Tuple, Union
|
|
10
|
+
from limits.typing import Dict, Optional, Tuple, Type, Union
|
|
11
11
|
|
|
12
|
+
from ..util import get_dependency
|
|
12
13
|
from .base import MovingWindowSupport, Storage
|
|
13
14
|
|
|
14
15
|
if TYPE_CHECKING:
|
|
@@ -33,22 +34,29 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
33
34
|
DEPENDENCIES = ["pymongo"]
|
|
34
35
|
|
|
35
36
|
def __init__(
|
|
36
|
-
self,
|
|
37
|
+
self,
|
|
38
|
+
uri: str,
|
|
39
|
+
database_name: str = "limits",
|
|
40
|
+
wrap_exceptions: bool = False,
|
|
41
|
+
**options: Union[int, str, bool],
|
|
37
42
|
) -> None:
|
|
38
43
|
"""
|
|
39
44
|
:param uri: uri of the form ``mongodb://[user:password]@host:port?...``,
|
|
40
45
|
This uri is passed directly to :class:`~pymongo.mongo_client.MongoClient`
|
|
41
46
|
:param database_name: The database to use for storing the rate limit
|
|
42
47
|
collections.
|
|
48
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
49
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
43
50
|
:param options: all remaining keyword arguments are merged with
|
|
44
51
|
:data:`DEFAULT_OPTIONS` and passed to the constructor of
|
|
45
52
|
:class:`~pymongo.mongo_client.MongoClient`
|
|
46
53
|
:raise ConfigurationError: when the :pypi:`pymongo` library is not available
|
|
47
54
|
"""
|
|
48
55
|
|
|
49
|
-
super().__init__(uri, **options)
|
|
56
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
50
57
|
|
|
51
58
|
self.lib = self.dependencies["pymongo"].module
|
|
59
|
+
self.lib_errors, _ = get_dependency("pymongo.errors")
|
|
52
60
|
|
|
53
61
|
mongo_opts = options.copy()
|
|
54
62
|
[mongo_opts.setdefault(k, v) for k, v in self.DEFAULT_OPTIONS.items()]
|
|
@@ -60,6 +68,12 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
60
68
|
self.windows = self.storage.get_database(database_name).windows
|
|
61
69
|
self.__initialize_database()
|
|
62
70
|
|
|
71
|
+
@property
|
|
72
|
+
def base_exceptions(
|
|
73
|
+
self,
|
|
74
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
75
|
+
return self.lib_errors.PyMongoError # type: ignore
|
|
76
|
+
|
|
63
77
|
def __initialize_database(self) -> None:
|
|
64
78
|
self.counters.create_index("expireAt", expireAfterSeconds=0)
|
|
65
79
|
self.windows.create_index("expireAt", expireAfterSeconds=0)
|
|
@@ -222,8 +236,9 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
222
236
|
self.windows.update_one(
|
|
223
237
|
{
|
|
224
238
|
"_id": key,
|
|
225
|
-
"entries.%d"
|
|
226
|
-
|
|
239
|
+
"entries.%d" % (limit - amount): {
|
|
240
|
+
"$not": {"$gte": timestamp - expiry}
|
|
241
|
+
},
|
|
227
242
|
},
|
|
228
243
|
updates,
|
|
229
244
|
upsert=True,
|
limits/storage/redis.py
CHANGED
|
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING
|
|
|
5
5
|
|
|
6
6
|
from packaging.version import Version
|
|
7
7
|
|
|
8
|
-
from limits.typing import Optional, RedisClient, ScriptP, Tuple, Union
|
|
8
|
+
from limits.typing import Optional, RedisClient, ScriptP, Tuple, Type, Union
|
|
9
9
|
|
|
10
10
|
from ..util import get_package_data
|
|
11
11
|
from .base import MovingWindowSupport, Storage
|
|
@@ -145,6 +145,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
145
145
|
self,
|
|
146
146
|
uri: str,
|
|
147
147
|
connection_pool: Optional[redis.connection.ConnectionPool] = None,
|
|
148
|
+
wrap_exceptions: bool = False,
|
|
148
149
|
**options: Union[float, str, bool],
|
|
149
150
|
) -> None:
|
|
150
151
|
"""
|
|
@@ -155,21 +156,31 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
155
156
|
case of ``redis+unix://`` where it is replaced with ``unix://``.
|
|
156
157
|
:param connection_pool: if provided, the redis client is initialized with
|
|
157
158
|
the connection pool and any other params passed as :paramref:`options`
|
|
159
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
160
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
158
161
|
:param options: all remaining keyword arguments are passed
|
|
159
162
|
directly to the constructor of :class:`redis.Redis`
|
|
160
163
|
:raise ConfigurationError: when the :pypi:`redis` library is not available
|
|
161
164
|
"""
|
|
162
|
-
super().__init__(uri, **options)
|
|
163
|
-
|
|
165
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
166
|
+
self.dependency = self.dependencies["redis"].module
|
|
164
167
|
|
|
165
168
|
uri = uri.replace("redis+unix", "unix")
|
|
166
169
|
|
|
167
170
|
if not connection_pool:
|
|
168
|
-
self.storage =
|
|
171
|
+
self.storage = self.dependency.from_url(uri, **options)
|
|
169
172
|
else:
|
|
170
|
-
self.storage =
|
|
173
|
+
self.storage = self.dependency.Redis(
|
|
174
|
+
connection_pool=connection_pool, **options
|
|
175
|
+
)
|
|
171
176
|
self.initialize_storage(uri)
|
|
172
177
|
|
|
178
|
+
@property
|
|
179
|
+
def base_exceptions(
|
|
180
|
+
self,
|
|
181
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
182
|
+
return self.dependency.RedisError # type: ignore[no-any-return]
|
|
183
|
+
|
|
173
184
|
def initialize_storage(self, _uri: str) -> None:
|
|
174
185
|
self.lua_moving_window = self.storage.register_script(self.SCRIPT_MOVING_WINDOW)
|
|
175
186
|
self.lua_acquire_window = self.storage.register_script(
|
|
@@ -237,8 +248,8 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
237
248
|
|
|
238
249
|
def reset(self) -> Optional[int]:
|
|
239
250
|
"""
|
|
240
|
-
This function calls a Lua Script to delete keys prefixed with
|
|
241
|
-
in
|
|
251
|
+
This function calls a Lua Script to delete keys prefixed with
|
|
252
|
+
``self.PREFIX`` in blocks of 5000.
|
|
242
253
|
|
|
243
254
|
.. warning::
|
|
244
255
|
This operation was designed to be fast, but was not tested
|
limits/storage/redis_cluster.py
CHANGED
|
@@ -116,7 +116,7 @@ class RedisClusterStorage(RedisStorage):
|
|
|
116
116
|
"""
|
|
117
117
|
Redis Clusters are sharded and deleting across shards
|
|
118
118
|
can't be done atomically. Because of this, this reset loops over all
|
|
119
|
-
keys that are prefixed with
|
|
119
|
+
keys that are prefixed with ``self.PREFIX`` and calls delete on them,
|
|
120
120
|
one at a time.
|
|
121
121
|
|
|
122
122
|
.. warning::
|
limits/storage/redis_sentinel.py
CHANGED
|
@@ -29,7 +29,8 @@ class RedisSentinelStorage(RedisStorage):
|
|
|
29
29
|
service_name: Optional[str] = None,
|
|
30
30
|
use_replicas: bool = True,
|
|
31
31
|
sentinel_kwargs: Optional[Dict[str, Union[float, str, bool]]] = None,
|
|
32
|
-
|
|
32
|
+
wrap_exceptions: bool = False,
|
|
33
|
+
**options: Union[float, str, bool],
|
|
33
34
|
) -> None:
|
|
34
35
|
"""
|
|
35
36
|
:param uri: url of the form
|
|
@@ -39,13 +40,17 @@ class RedisSentinelStorage(RedisStorage):
|
|
|
39
40
|
:param use_replicas: Whether to use replicas for read only operations
|
|
40
41
|
:param sentinel_kwargs: kwargs to pass as
|
|
41
42
|
:attr:`sentinel_kwargs` to :class:`redis.sentinel.Sentinel`
|
|
43
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
44
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
42
45
|
:param options: all remaining keyword arguments are passed
|
|
43
46
|
directly to the constructor of :class:`redis.sentinel.Sentinel`
|
|
44
47
|
:raise ConfigurationError: when the redis library is not available
|
|
45
48
|
or if the redis master host cannot be pinged.
|
|
46
49
|
"""
|
|
47
50
|
|
|
48
|
-
super(RedisStorage, self).__init__(
|
|
51
|
+
super(RedisStorage, self).__init__(
|
|
52
|
+
uri, wrap_exceptions=wrap_exceptions, **options
|
|
53
|
+
)
|
|
49
54
|
|
|
50
55
|
parsed = urllib.parse.urlparse(uri)
|
|
51
56
|
sentinel_configuration = []
|
|
@@ -74,7 +79,7 @@ class RedisSentinelStorage(RedisStorage):
|
|
|
74
79
|
self.sentinel: "redis.sentinel.Sentinel" = sentinel_dep.Sentinel(
|
|
75
80
|
sentinel_configuration,
|
|
76
81
|
sentinel_kwargs={**parsed_auth, **sentinel_options},
|
|
77
|
-
**{**parsed_auth, **options}
|
|
82
|
+
**{**parsed_auth, **options},
|
|
78
83
|
)
|
|
79
84
|
self.storage = self.sentinel.master_for(self.service_name)
|
|
80
85
|
self.storage_slave = self.sentinel.slave_for(self.service_name)
|
limits/typing.py
CHANGED
limits/util.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: limits
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.8.0
|
|
4
4
|
Summary: Rate limiting utilities
|
|
5
5
|
Home-page: https://limits.readthedocs.org
|
|
6
6
|
Author: Ali-Akber Saifee
|
|
@@ -76,12 +76,6 @@ limits
|
|
|
76
76
|
|
|
77
77
|
**limits** is a python library to perform rate limiting with commonly used storage backends (Redis, Memcached, MongoDB & Etcd).
|
|
78
78
|
|
|
79
|
-
----
|
|
80
|
-
|
|
81
|
-
Sponsored by Zuplo - fully-managed, programmable API Management platform.
|
|
82
|
-
Add rate limiting and more to your public API in minutes, try it at `zuplo.com <https://zuplo.link/3DZM9Ej>`_
|
|
83
|
-
|
|
84
|
-
----
|
|
85
79
|
|
|
86
80
|
Supported Strategies
|
|
87
81
|
====================
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
limits/__init__.py,sha256=j_yVhgN9pdz8o5rQjVwdJTBSq8F-CTzof9kkiYgjRbw,728
|
|
2
|
+
limits/_version.py,sha256=VSokpHoRkI7WgCt1pxQTeRNyLsvBlTcgUT3Hkqv48Aw,497
|
|
3
|
+
limits/errors.py,sha256=xCKGOVJiD-g8FlsQQb17AW2pTUvalYSuizPpvEVoYJE,626
|
|
4
|
+
limits/limits.py,sha256=bkPQ_gtkggwqYO6QiLLiaSBKUOJTjDzzELIVQiOWYt4,4944
|
|
5
|
+
limits/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
limits/strategies.py,sha256=7pr2V34KdOEfxnYOf882Cl2qKY-KK6HwKjdYo_IsD4c,6690
|
|
7
|
+
limits/typing.py,sha256=RNfTbaRV5LGHdwM2KPcqH6gwep_SEMka9dPpOcw1wjs,3008
|
|
8
|
+
limits/util.py,sha256=9bSmdtiQnai4FHJ8yN4s6dpT0n5BPiDr6tXnqGhYmIw,5744
|
|
9
|
+
limits/version.py,sha256=YwkF3dtq1KGzvmL3iVGctA8NNtGlK_0arrzZkZGVjUs,47
|
|
10
|
+
limits/aio/__init__.py,sha256=IOetunwQy1c5GefzitK8lewbTzHGiE-kmE9NlqSdr3U,82
|
|
11
|
+
limits/aio/strategies.py,sha256=REaQ-lqgqkN5wrFZ26AZ3sCHO8oZBL_mWhI6nMRaBz8,6485
|
|
12
|
+
limits/aio/storage/__init__.py,sha256=CbtuSlVl1jPyN_vsEI_ApWblDblVaL46xcZ2M_oM0V8,595
|
|
13
|
+
limits/aio/storage/base.py,sha256=dSM39OguwdLdBIHaz98eWljHzxUfgcatweNSeo9d8OQ,4795
|
|
14
|
+
limits/aio/storage/etcd.py,sha256=Rjb_EYKFRr4F2Z6zvAPP9vQOyXJQHaju3VjxxUs75_c,4791
|
|
15
|
+
limits/aio/storage/memcached.py,sha256=6aTlACfCtchdcZqoisnei0MOlCH7yLV9A1yCjOE5f9g,4802
|
|
16
|
+
limits/aio/storage/memory.py,sha256=DT8GzExZsN0hKitS2HP-eiJfHYYJ9W_IRwGOj56uChY,5850
|
|
17
|
+
limits/aio/storage/mongodb.py,sha256=RrNtUdG0_yVcb3TV7BdG5idX8vM_ckvC4Sc7zrJvI0Y,9574
|
|
18
|
+
limits/aio/storage/redis.py,sha256=aY_WtwB6x_2p_5NkO8d7I5Mi2lZlHSHC70u1qB0Tuus,15640
|
|
19
|
+
limits/resources/redis/lua_scripts/acquire_moving_window.lua,sha256=5CFJX7D6T6RG5SFr6eVZ6zepmI1EkGWmKeVEO4QNrWo,483
|
|
20
|
+
limits/resources/redis/lua_scripts/clear_keys.lua,sha256=zU0cVfLGmapRQF9x9u0GclapM_IB2pJLszNzVQ1QRK4,184
|
|
21
|
+
limits/resources/redis/lua_scripts/incr_expire.lua,sha256=Uq9NcrrcDI-F87TDAJexoSJn2SDgeXIUEYozCp9S3oA,195
|
|
22
|
+
limits/resources/redis/lua_scripts/moving_window.lua,sha256=ir0SkuRVnrqkVSFNIuedTV_KW6zG70Z56u6-_FpR_20,352
|
|
23
|
+
limits/storage/__init__.py,sha256=8i1-SoTEV_XGAMYDepcLra7do-Tx4rUPbPrUQVVJgTw,2518
|
|
24
|
+
limits/storage/base.py,sha256=fDdYLa-RrnjhBTO1hE5aTTM8q8n3M5HD-65KyWWXBtg,4627
|
|
25
|
+
limits/storage/etcd.py,sha256=wkC_mj4Tsf2nwUKByMiHiGzA40N3mDepEwdLmvH8wmw,4484
|
|
26
|
+
limits/storage/memcached.py,sha256=bMzfZgYa_EWcZAjSZLcygpk3hpeOAErBpRE8dVwyXQs,6640
|
|
27
|
+
limits/storage/memory.py,sha256=sFBS8nYSxKFBCSXzhbK1YlX9BKc4whpjlyLbgrCnPsw,5555
|
|
28
|
+
limits/storage/mongodb.py,sha256=aK7PABV_3enpzq1wmlX0N2YOxLTpj-H7-3XHrs0vb84,8462
|
|
29
|
+
limits/storage/redis.py,sha256=3zJ1gDMDepT_pGN9d2aAN7Pea7tMBI49VK60IHv-Ooc,8452
|
|
30
|
+
limits/storage/redis_cluster.py,sha256=KwhWV0v3_TliRyS3OU15IlpeC8gRQr29U4FkcME01fo,5380
|
|
31
|
+
limits/storage/redis_sentinel.py,sha256=7PVB0hBl0I_enhN_h9QSJTE7zGuYtjkebotTqxm2iZo,3875
|
|
32
|
+
limits/storage/registry.py,sha256=xcBcxuu6srqmoS4WqDpkCXnRLB19ctH98v21P8S9kS8,708
|
|
33
|
+
limits-3.8.0.dist-info/LICENSE.txt,sha256=T6i7kq7F5gIPfcno9FCxU5Hcwm22Bjq0uHZV3ElcjsQ,1061
|
|
34
|
+
limits-3.8.0.dist-info/METADATA,sha256=TZ-3C78WxvC03_NNRgit9qvd9NxVActX3k5ByYWMsXo,7118
|
|
35
|
+
limits-3.8.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
36
|
+
limits-3.8.0.dist-info/top_level.txt,sha256=C7g5ahldPoU2s6iWTaJayUrbGmPK1d6e9t5Nn0vQ2jM,7
|
|
37
|
+
limits-3.8.0.dist-info/RECORD,,
|
limits-3.7.0.dist-info/RECORD
DELETED
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
limits/__init__.py,sha256=j_yVhgN9pdz8o5rQjVwdJTBSq8F-CTzof9kkiYgjRbw,728
|
|
2
|
-
limits/_version.py,sha256=-BJDgGzH-CbJma8-Y_Y0s314aB1Y3YzjezQxt_8tfDw,497
|
|
3
|
-
limits/errors.py,sha256=sUolBUfTFLQSzo6dfE2E9j_0K7_8Nr9_Hx-v5C4D0EU,416
|
|
4
|
-
limits/limits.py,sha256=lwQnA5wegkW_AXtplOH3tLuQ1LByMX9hqHJquYVYdTs,4943
|
|
5
|
-
limits/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
limits/strategies.py,sha256=7pr2V34KdOEfxnYOf882Cl2qKY-KK6HwKjdYo_IsD4c,6690
|
|
7
|
-
limits/typing.py,sha256=h6K8fbmgCyJih2qJFq52_6Ic1WmFNDQx1UbdEGHCD5o,2976
|
|
8
|
-
limits/util.py,sha256=LRsn6i3KmQE33Ea4FTf4IxD9SMtwiKVXN5iDxTEuOuc,5743
|
|
9
|
-
limits/version.py,sha256=YwkF3dtq1KGzvmL3iVGctA8NNtGlK_0arrzZkZGVjUs,47
|
|
10
|
-
limits/aio/__init__.py,sha256=IOetunwQy1c5GefzitK8lewbTzHGiE-kmE9NlqSdr3U,82
|
|
11
|
-
limits/aio/strategies.py,sha256=REaQ-lqgqkN5wrFZ26AZ3sCHO8oZBL_mWhI6nMRaBz8,6485
|
|
12
|
-
limits/aio/storage/__init__.py,sha256=CbtuSlVl1jPyN_vsEI_ApWblDblVaL46xcZ2M_oM0V8,595
|
|
13
|
-
limits/aio/storage/base.py,sha256=gsy_91lvm53AoAKi1plaegcEOSZXK3OiMP0mxA6lPAw,2941
|
|
14
|
-
limits/aio/storage/etcd.py,sha256=iGGuKHN5cCW9r4nLeiZ4TnoU1pfSXXLZhS2YOWznryw,4568
|
|
15
|
-
limits/aio/storage/memcached.py,sha256=aR7f5NY-mXtFFjphbaxoL7z1cAHZuRYjpGUVOktoI4A,4296
|
|
16
|
-
limits/aio/storage/memory.py,sha256=A506i0vncBmsaL9GGAqepQkPAXyc-HxmJ8gS8kJtqpE,5597
|
|
17
|
-
limits/aio/storage/mongodb.py,sha256=xnZTyogdLdYir5xQMs_yQTBJt6B5P1pJx0VYBAo_Ndc,9041
|
|
18
|
-
limits/aio/storage/redis.py,sha256=k0YgeXlvITUZEjql32KNm1vhN73zZHCcx0QJyK_Gg94,15080
|
|
19
|
-
limits/resources/redis/lua_scripts/acquire_moving_window.lua,sha256=5CFJX7D6T6RG5SFr6eVZ6zepmI1EkGWmKeVEO4QNrWo,483
|
|
20
|
-
limits/resources/redis/lua_scripts/clear_keys.lua,sha256=zU0cVfLGmapRQF9x9u0GclapM_IB2pJLszNzVQ1QRK4,184
|
|
21
|
-
limits/resources/redis/lua_scripts/incr_expire.lua,sha256=Uq9NcrrcDI-F87TDAJexoSJn2SDgeXIUEYozCp9S3oA,195
|
|
22
|
-
limits/resources/redis/lua_scripts/moving_window.lua,sha256=ir0SkuRVnrqkVSFNIuedTV_KW6zG70Z56u6-_FpR_20,352
|
|
23
|
-
limits/storage/__init__.py,sha256=8i1-SoTEV_XGAMYDepcLra7do-Tx4rUPbPrUQVVJgTw,2518
|
|
24
|
-
limits/storage/base.py,sha256=-JV-zAkss7pOETZyPYjo8ZZqTMGs-DPqgz_gcfArXfs,2818
|
|
25
|
-
limits/storage/etcd.py,sha256=SlDHRItliRaR2j5Rf_v_oQ57ITpmNMYQJSamM3SbwCA,4258
|
|
26
|
-
limits/storage/memcached.py,sha256=qMvRIEtRRzZXSgcZSUTBgUlBDeNOpIr_gDTV2r5SPak,6005
|
|
27
|
-
limits/storage/memory.py,sha256=plPsyLB26QN9I6jQq00fra-cdh8xoDAgaoM-kWuECtk,5302
|
|
28
|
-
limits/storage/mongodb.py,sha256=Z4_Og2Ys6uwQT_D4dUDdDAEndBY7UfCh_LiHgZf8IUk,7909
|
|
29
|
-
limits/storage/redis.py,sha256=-5_kQuvC6tjMUgDoYAsANqMouu0PujE-jocRqmxXvsc,7970
|
|
30
|
-
limits/storage/redis_cluster.py,sha256=fINAmdZSs_0lAr3p_TdZtpWAUqmiWmKFNKyfRcMkSTQ,5378
|
|
31
|
-
limits/storage/redis_sentinel.py,sha256=AxbtYZQXCNL-4yu7dQ8i6MhwO2PNLMRLHe3JcHkMKvA,3647
|
|
32
|
-
limits/storage/registry.py,sha256=xcBcxuu6srqmoS4WqDpkCXnRLB19ctH98v21P8S9kS8,708
|
|
33
|
-
limits-3.7.0.dist-info/LICENSE.txt,sha256=T6i7kq7F5gIPfcno9FCxU5Hcwm22Bjq0uHZV3ElcjsQ,1061
|
|
34
|
-
limits-3.7.0.dist-info/METADATA,sha256=EUabf_Y_kURN4qtkyeguZu_rlNI38lQUB1PgKPEvv1s,7314
|
|
35
|
-
limits-3.7.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
|
|
36
|
-
limits-3.7.0.dist-info/top_level.txt,sha256=C7g5ahldPoU2s6iWTaJayUrbGmPK1d6e9t5Nn0vQ2jM,7
|
|
37
|
-
limits-3.7.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|