limits 3.6.0__tar.gz → 3.9.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {limits-3.6.0 → limits-3.9.0}/HISTORY.rst +34 -0
- {limits-3.6.0 → limits-3.9.0}/PKG-INFO +26 -8
- {limits-3.6.0 → limits-3.9.0}/README.rst +0 -6
- {limits-3.6.0 → limits-3.9.0}/doc/source/api.rst +2 -0
- {limits-3.6.0 → limits-3.9.0}/limits/_version.py +3 -3
- {limits-3.6.0 → limits-3.9.0}/limits/aio/storage/base.py +78 -2
- {limits-3.6.0 → limits-3.9.0}/limits/aio/storage/etcd.py +7 -1
- {limits-3.6.0 → limits-3.9.0}/limits/aio/storage/memcached.py +19 -3
- {limits-3.6.0 → limits-3.9.0}/limits/aio/storage/memory.py +11 -3
- {limits-3.6.0 → limits-3.9.0}/limits/aio/storage/mongodb.py +21 -18
- {limits-3.6.0 → limits-3.9.0}/limits/aio/storage/redis.py +41 -11
- {limits-3.6.0 → limits-3.9.0}/limits/errors.py +9 -0
- {limits-3.6.0 → limits-3.9.0}/limits/limits.py +1 -0
- {limits-3.6.0 → limits-3.9.0}/limits/storage/base.py +77 -2
- {limits-3.6.0 → limits-3.9.0}/limits/storage/etcd.py +7 -1
- {limits-3.6.0 → limits-3.9.0}/limits/storage/memcached.py +31 -4
- {limits-3.6.0 → limits-3.9.0}/limits/storage/memory.py +11 -3
- {limits-3.6.0 → limits-3.9.0}/limits/storage/mongodb.py +23 -17
- {limits-3.6.0 → limits-3.9.0}/limits/storage/redis.py +32 -8
- {limits-3.6.0 → limits-3.9.0}/limits/storage/redis_cluster.py +5 -4
- {limits-3.6.0 → limits-3.9.0}/limits/storage/redis_sentinel.py +8 -3
- {limits-3.6.0 → limits-3.9.0}/limits/typing.py +3 -1
- {limits-3.6.0 → limits-3.9.0}/limits/util.py +1 -0
- {limits-3.6.0 → limits-3.9.0}/limits.egg-info/PKG-INFO +26 -8
- limits-3.9.0/pyproject.toml +10 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/dev.txt +0 -2
- {limits-3.6.0 → limits-3.9.0}/requirements/docs.txt +2 -2
- {limits-3.6.0 → limits-3.9.0}/requirements/test.txt +2 -2
- {limits-3.6.0 → limits-3.9.0}/setup.py +4 -1
- {limits-3.6.0 → limits-3.9.0}/tests/test_storage.py +106 -4
- {limits-3.6.0 → limits-3.9.0}/versioneer.py +161 -112
- limits-3.6.0/pyproject.toml +0 -3
- {limits-3.6.0 → limits-3.9.0}/CLASSIFIERS +0 -0
- {limits-3.6.0 → limits-3.9.0}/CONTRIBUTIONS.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/LICENSE.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/MANIFEST.in +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/Makefile +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/_static/custom.css +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/async.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/changelog.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/conf.py +2 -2
- {limits-3.6.0 → limits-3.9.0}/doc/source/custom-storage.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/index.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/installation.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/quickstart.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/storage.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/strategies.rst +0 -0
- {limits-3.6.0 → limits-3.9.0}/doc/source/theme_config.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/__init__.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/aio/__init__.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/aio/storage/__init__.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/aio/strategies.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/py.typed +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/resources/redis/lua_scripts/acquire_moving_window.lua +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/resources/redis/lua_scripts/clear_keys.lua +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/resources/redis/lua_scripts/incr_expire.lua +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/resources/redis/lua_scripts/moving_window.lua +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/storage/__init__.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/storage/registry.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/strategies.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits/version.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits.egg-info/SOURCES.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits.egg-info/dependency_links.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits.egg-info/not-zip-safe +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits.egg-info/requires.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/limits.egg-info/top_level.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/ci.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/main.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/async-etcd.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/async-memcached.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/async-mongodb.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/async-redis.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/etcd.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/memcached.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/mongodb.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/redis.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/requirements/storage/rediscluster.txt +0 -0
- {limits-3.6.0 → limits-3.9.0}/setup.cfg +0 -0
- {limits-3.6.0 → limits-3.9.0}/tests/test_limit_granularities.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/tests/test_limits.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/tests/test_ratelimit_parser.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/tests/test_strategy.py +0 -0
- {limits-3.6.0 → limits-3.9.0}/tests/test_utils.py +0 -0
|
@@ -3,6 +3,37 @@
|
|
|
3
3
|
Changelog
|
|
4
4
|
=========
|
|
5
5
|
|
|
6
|
+
v3.9.0
|
|
7
|
+
------
|
|
8
|
+
Release Date: 2024-02-17
|
|
9
|
+
|
|
10
|
+
* Bug Fix
|
|
11
|
+
|
|
12
|
+
* Remove excessively low defaults for mongodb storage and instead
|
|
13
|
+
delegate to the underlying dependency (pymongo, motor)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
v3.8.0
|
|
17
|
+
------
|
|
18
|
+
Release Date: 2024-02-14
|
|
19
|
+
|
|
20
|
+
* Features
|
|
21
|
+
|
|
22
|
+
* Add option to wrap storage errors with a ``StorageError``
|
|
23
|
+
exception
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
v3.7.0
|
|
27
|
+
------
|
|
28
|
+
Release Date: 2023-11-24
|
|
29
|
+
|
|
30
|
+
* Features
|
|
31
|
+
|
|
32
|
+
* Ensure rate limit keys in redis use are prefixed
|
|
33
|
+
with a `LIMITS` prefix. This allows for resetting
|
|
34
|
+
all keys generated by the library without implicit
|
|
35
|
+
knowledge of the key structure.
|
|
36
|
+
|
|
6
37
|
v3.6.0
|
|
7
38
|
------
|
|
8
39
|
Release Date: 2023-08-31
|
|
@@ -624,6 +655,9 @@ Release Date: 2015-01-08
|
|
|
624
655
|
|
|
625
656
|
|
|
626
657
|
|
|
658
|
+
|
|
659
|
+
|
|
660
|
+
|
|
627
661
|
|
|
628
662
|
|
|
629
663
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: limits
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.9.0
|
|
4
4
|
Summary: Rate limiting utilities
|
|
5
5
|
Home-page: https://limits.readthedocs.org
|
|
6
6
|
Author: Ali-Akber Saifee
|
|
@@ -20,17 +20,41 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
22
22
|
Requires-Python: >=3.7
|
|
23
|
+
License-File: LICENSE.txt
|
|
24
|
+
Requires-Dist: deprecated>=1.2
|
|
25
|
+
Requires-Dist: importlib_resources>=1.3
|
|
26
|
+
Requires-Dist: packaging<24,>=21
|
|
27
|
+
Requires-Dist: typing_extensions
|
|
23
28
|
Provides-Extra: redis
|
|
29
|
+
Requires-Dist: redis!=4.5.2,!=4.5.3,<6.0.0,>3; extra == "redis"
|
|
24
30
|
Provides-Extra: rediscluster
|
|
31
|
+
Requires-Dist: redis!=4.5.2,!=4.5.3,>=4.2.0; extra == "rediscluster"
|
|
25
32
|
Provides-Extra: memcached
|
|
33
|
+
Requires-Dist: pymemcache<5.0.0,>3; extra == "memcached"
|
|
26
34
|
Provides-Extra: mongodb
|
|
35
|
+
Requires-Dist: pymongo<5,>4.1; extra == "mongodb"
|
|
27
36
|
Provides-Extra: etcd
|
|
37
|
+
Requires-Dist: etcd3; extra == "etcd"
|
|
28
38
|
Provides-Extra: async-redis
|
|
39
|
+
Requires-Dist: coredis<5,>=3.4.0; extra == "async-redis"
|
|
29
40
|
Provides-Extra: async-memcached
|
|
41
|
+
Requires-Dist: emcache>=0.6.1; python_version < "3.11" and extra == "async-memcached"
|
|
42
|
+
Requires-Dist: emcache>=1; python_version >= "3.11" and extra == "async-memcached"
|
|
30
43
|
Provides-Extra: async-mongodb
|
|
44
|
+
Requires-Dist: motor<4,>=3; extra == "async-mongodb"
|
|
31
45
|
Provides-Extra: async-etcd
|
|
46
|
+
Requires-Dist: aetcd; extra == "async-etcd"
|
|
32
47
|
Provides-Extra: all
|
|
33
|
-
|
|
48
|
+
Requires-Dist: redis!=4.5.2,!=4.5.3,<6.0.0,>3; extra == "all"
|
|
49
|
+
Requires-Dist: redis!=4.5.2,!=4.5.3,>=4.2.0; extra == "all"
|
|
50
|
+
Requires-Dist: pymemcache<5.0.0,>3; extra == "all"
|
|
51
|
+
Requires-Dist: pymongo<5,>4.1; extra == "all"
|
|
52
|
+
Requires-Dist: etcd3; extra == "all"
|
|
53
|
+
Requires-Dist: coredis<5,>=3.4.0; extra == "all"
|
|
54
|
+
Requires-Dist: emcache>=0.6.1; python_version < "3.11" and extra == "all"
|
|
55
|
+
Requires-Dist: emcache>=1; python_version >= "3.11" and extra == "all"
|
|
56
|
+
Requires-Dist: motor<4,>=3; extra == "all"
|
|
57
|
+
Requires-Dist: aetcd; extra == "all"
|
|
34
58
|
|
|
35
59
|
.. |ci| image:: https://github.com/alisaifee/limits/workflows/CI/badge.svg?branch=master
|
|
36
60
|
:target: https://github.com/alisaifee/limits/actions?query=branch%3Amaster+workflow%3ACI
|
|
@@ -52,12 +76,6 @@ limits
|
|
|
52
76
|
|
|
53
77
|
**limits** is a python library to perform rate limiting with commonly used storage backends (Redis, Memcached, MongoDB & Etcd).
|
|
54
78
|
|
|
55
|
-
----
|
|
56
|
-
|
|
57
|
-
Sponsored by Zuplo - fully-managed, programmable API Management platform.
|
|
58
|
-
Add rate limiting and more to your public API in minutes, try it at `zuplo.com <https://zuplo.link/3DZM9Ej>`_
|
|
59
|
-
|
|
60
|
-
----
|
|
61
79
|
|
|
62
80
|
Supported Strategies
|
|
63
81
|
====================
|
|
@@ -18,12 +18,6 @@ limits
|
|
|
18
18
|
|
|
19
19
|
**limits** is a python library to perform rate limiting with commonly used storage backends (Redis, Memcached, MongoDB & Etcd).
|
|
20
20
|
|
|
21
|
-
----
|
|
22
|
-
|
|
23
|
-
Sponsored by Zuplo - fully-managed, programmable API Management platform.
|
|
24
|
-
Add rate limiting and more to your public API in minutes, try it at `zuplo.com <https://zuplo.link/3DZM9Ej>`_
|
|
25
|
-
|
|
26
|
-
----
|
|
27
21
|
|
|
28
22
|
Supported Strategies
|
|
29
23
|
====================
|
|
@@ -8,11 +8,11 @@ import json
|
|
|
8
8
|
|
|
9
9
|
version_json = '''
|
|
10
10
|
{
|
|
11
|
-
"date": "
|
|
11
|
+
"date": "2024-02-17T17:36:20-0800",
|
|
12
12
|
"dirty": false,
|
|
13
13
|
"error": null,
|
|
14
|
-
"full-revisionid": "
|
|
15
|
-
"version": "3.
|
|
14
|
+
"full-revisionid": "7acdd2228f4f414bc411c698875dc24fea2d303b",
|
|
15
|
+
"version": "3.9.0"
|
|
16
16
|
}
|
|
17
17
|
''' # END VERSION_JSON
|
|
18
18
|
|
|
@@ -1,12 +1,43 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import functools
|
|
1
4
|
from abc import ABC, abstractmethod
|
|
5
|
+
from typing import Any, cast
|
|
2
6
|
|
|
3
7
|
from deprecated.sphinx import versionadded
|
|
4
8
|
|
|
9
|
+
from limits import errors
|
|
5
10
|
from limits.storage.registry import StorageRegistry
|
|
6
|
-
from limits.typing import
|
|
11
|
+
from limits.typing import (
|
|
12
|
+
Awaitable,
|
|
13
|
+
Callable,
|
|
14
|
+
List,
|
|
15
|
+
Optional,
|
|
16
|
+
P,
|
|
17
|
+
R,
|
|
18
|
+
Tuple,
|
|
19
|
+
Type,
|
|
20
|
+
Union,
|
|
21
|
+
)
|
|
7
22
|
from limits.util import LazyDependency
|
|
8
23
|
|
|
9
24
|
|
|
25
|
+
def _wrap_errors(
|
|
26
|
+
storage: Storage,
|
|
27
|
+
fn: Callable[P, Awaitable[R]],
|
|
28
|
+
) -> Callable[P, Awaitable[R]]:
|
|
29
|
+
@functools.wraps(fn)
|
|
30
|
+
async def inner(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
31
|
+
try:
|
|
32
|
+
return await fn(*args, **kwargs)
|
|
33
|
+
except storage.base_exceptions as exc:
|
|
34
|
+
if storage.wrap_exceptions:
|
|
35
|
+
raise errors.StorageError(exc) from exc
|
|
36
|
+
raise
|
|
37
|
+
|
|
38
|
+
return inner
|
|
39
|
+
|
|
40
|
+
|
|
10
41
|
@versionadded(version="2.1")
|
|
11
42
|
class Storage(LazyDependency, metaclass=StorageRegistry):
|
|
12
43
|
"""
|
|
@@ -16,10 +47,38 @@ class Storage(LazyDependency, metaclass=StorageRegistry):
|
|
|
16
47
|
STORAGE_SCHEME: Optional[List[str]]
|
|
17
48
|
"""The storage schemes to register against this implementation"""
|
|
18
49
|
|
|
50
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Storage: # type: ignore[misc]
|
|
51
|
+
inst = super().__new__(cls)
|
|
52
|
+
|
|
53
|
+
for method in {
|
|
54
|
+
"incr",
|
|
55
|
+
"get",
|
|
56
|
+
"get_expiry",
|
|
57
|
+
"check",
|
|
58
|
+
"reset",
|
|
59
|
+
"clear",
|
|
60
|
+
}:
|
|
61
|
+
setattr(inst, method, _wrap_errors(inst, getattr(inst, method)))
|
|
62
|
+
|
|
63
|
+
return inst
|
|
64
|
+
|
|
19
65
|
def __init__(
|
|
20
|
-
self,
|
|
66
|
+
self,
|
|
67
|
+
uri: Optional[str] = None,
|
|
68
|
+
wrap_exceptions: bool = False,
|
|
69
|
+
**options: Union[float, str, bool],
|
|
21
70
|
) -> None:
|
|
71
|
+
"""
|
|
72
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
73
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
74
|
+
"""
|
|
22
75
|
super().__init__()
|
|
76
|
+
self.wrap_exceptions = wrap_exceptions
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
@abstractmethod
|
|
80
|
+
def base_exceptions(self) -> Union[Type[Exception], Tuple[Type[Exception], ...]]:
|
|
81
|
+
raise NotImplementedError
|
|
23
82
|
|
|
24
83
|
@abstractmethod
|
|
25
84
|
async def incr(
|
|
@@ -80,6 +139,22 @@ class MovingWindowSupport(ABC):
|
|
|
80
139
|
the moving window strategy
|
|
81
140
|
"""
|
|
82
141
|
|
|
142
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> MovingWindowSupport: # type: ignore[misc]
|
|
143
|
+
inst = super().__new__(cls)
|
|
144
|
+
|
|
145
|
+
for method in {
|
|
146
|
+
"acquire_entry",
|
|
147
|
+
"get_moving_window",
|
|
148
|
+
}:
|
|
149
|
+
setattr(
|
|
150
|
+
inst,
|
|
151
|
+
method,
|
|
152
|
+
_wrap_errors(cast(Storage, inst), getattr(inst, method)),
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
return inst
|
|
156
|
+
|
|
157
|
+
@abstractmethod
|
|
83
158
|
async def acquire_entry(
|
|
84
159
|
self, key: str, limit: int, expiry: int, amount: int = 1
|
|
85
160
|
) -> bool:
|
|
@@ -91,6 +166,7 @@ class MovingWindowSupport(ABC):
|
|
|
91
166
|
"""
|
|
92
167
|
raise NotImplementedError
|
|
93
168
|
|
|
169
|
+
@abstractmethod
|
|
94
170
|
async def get_moving_window(
|
|
95
171
|
self, key: str, limit: int, expiry: int
|
|
96
172
|
) -> Tuple[int, int]:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import time
|
|
3
3
|
import urllib.parse
|
|
4
|
-
from typing import TYPE_CHECKING, Optional
|
|
4
|
+
from typing import TYPE_CHECKING, Optional, Tuple, Type, Union
|
|
5
5
|
|
|
6
6
|
from limits.aio.storage.base import Storage
|
|
7
7
|
from limits.errors import ConcurrentUpdateError
|
|
@@ -46,6 +46,12 @@ class EtcdStorage(Storage):
|
|
|
46
46
|
)
|
|
47
47
|
self.max_retries = max_retries
|
|
48
48
|
|
|
49
|
+
@property
|
|
50
|
+
def base_exceptions(
|
|
51
|
+
self,
|
|
52
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
53
|
+
return self.lib.ClientError # type: ignore[no-any-return]
|
|
54
|
+
|
|
49
55
|
def prefixed_key(self, key: str) -> bytes:
|
|
50
56
|
return f"{self.PREFIX}/{key}".encode()
|
|
51
57
|
|
|
@@ -4,7 +4,7 @@ import urllib.parse
|
|
|
4
4
|
from deprecated.sphinx import versionadded
|
|
5
5
|
|
|
6
6
|
from limits.aio.storage.base import Storage
|
|
7
|
-
from limits.typing import EmcacheClientP, Optional, Union
|
|
7
|
+
from limits.typing import EmcacheClientP, Optional, Tuple, Type, Union
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
@versionadded(version="2.1")
|
|
@@ -20,10 +20,17 @@ class MemcachedStorage(Storage):
|
|
|
20
20
|
|
|
21
21
|
DEPENDENCIES = ["emcache"]
|
|
22
22
|
|
|
23
|
-
def __init__(
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
uri: str,
|
|
26
|
+
wrap_exceptions: bool = False,
|
|
27
|
+
**options: Union[float, str, bool],
|
|
28
|
+
) -> None:
|
|
24
29
|
"""
|
|
25
30
|
:param uri: memcached location of the form
|
|
26
31
|
``async+memcached://host:port,host:port``
|
|
32
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
33
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
27
34
|
:param options: all remaining keyword arguments are passed
|
|
28
35
|
directly to the constructor of :class:`emcache.Client`
|
|
29
36
|
:raise ConfigurationError: when :pypi:`emcache` is not available
|
|
@@ -38,9 +45,18 @@ class MemcachedStorage(Storage):
|
|
|
38
45
|
|
|
39
46
|
self._options = options
|
|
40
47
|
self._storage = None
|
|
41
|
-
super().__init__(uri, **options)
|
|
48
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
42
49
|
self.dependency = self.dependencies["emcache"].module
|
|
43
50
|
|
|
51
|
+
@property
|
|
52
|
+
def base_exceptions(
|
|
53
|
+
self,
|
|
54
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
55
|
+
return (
|
|
56
|
+
self.dependency.ClusterNoAvailableNodes,
|
|
57
|
+
self.dependency.CommandError,
|
|
58
|
+
)
|
|
59
|
+
|
|
44
60
|
async def get_storage(self) -> EmcacheClientP:
|
|
45
61
|
if not self._storage:
|
|
46
62
|
self._storage = await self.dependency.create_client(
|
|
@@ -6,7 +6,7 @@ from deprecated.sphinx import versionadded
|
|
|
6
6
|
|
|
7
7
|
import limits.typing
|
|
8
8
|
from limits.aio.storage.base import MovingWindowSupport, Storage
|
|
9
|
-
from limits.typing import Dict, List, Optional, Tuple
|
|
9
|
+
from limits.typing import Dict, List, Optional, Tuple, Type, Union
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class LockableEntry(asyncio.Lock):
|
|
@@ -30,12 +30,20 @@ class MemoryStorage(Storage, MovingWindowSupport):
|
|
|
30
30
|
async context
|
|
31
31
|
"""
|
|
32
32
|
|
|
33
|
-
def __init__(
|
|
33
|
+
def __init__(
|
|
34
|
+
self, uri: Optional[str] = None, wrap_exceptions: bool = False, **_: str
|
|
35
|
+
) -> None:
|
|
34
36
|
self.storage: limits.typing.Counter[str] = Counter()
|
|
35
37
|
self.expirations: Dict[str, float] = {}
|
|
36
38
|
self.events: Dict[str, List[LockableEntry]] = {}
|
|
37
39
|
self.timer: Optional[asyncio.Task[None]] = None
|
|
38
|
-
super().__init__(uri, **_)
|
|
40
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **_)
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def base_exceptions(
|
|
44
|
+
self,
|
|
45
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
46
|
+
return ValueError
|
|
39
47
|
|
|
40
48
|
async def __expire_events(self) -> None:
|
|
41
49
|
for key in self.events.keys():
|
|
@@ -4,12 +4,13 @@ import asyncio
|
|
|
4
4
|
import calendar
|
|
5
5
|
import datetime
|
|
6
6
|
import time
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any, cast
|
|
8
8
|
|
|
9
9
|
from deprecated.sphinx import versionadded
|
|
10
10
|
|
|
11
11
|
from limits.aio.storage.base import MovingWindowSupport, Storage
|
|
12
|
-
from limits.typing import Dict, Optional, ParamSpec, Tuple, TypeVar, Union
|
|
12
|
+
from limits.typing import Dict, Optional, ParamSpec, Tuple, Type, TypeVar, Union
|
|
13
|
+
from limits.util import get_dependency
|
|
13
14
|
|
|
14
15
|
P = ParamSpec("P")
|
|
15
16
|
R = TypeVar("R")
|
|
@@ -28,18 +29,13 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
28
29
|
The storage scheme for MongoDB for use in an async context
|
|
29
30
|
"""
|
|
30
31
|
|
|
31
|
-
DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = {
|
|
32
|
-
"serverSelectionTimeoutMS": 1000,
|
|
33
|
-
"connectTimeoutMS": 1000,
|
|
34
|
-
}
|
|
35
|
-
"Default options passed to :class:`~motor.motor_asyncio.AsyncIOMotorClient`"
|
|
36
|
-
|
|
37
32
|
DEPENDENCIES = ["motor.motor_asyncio", "pymongo"]
|
|
38
33
|
|
|
39
34
|
def __init__(
|
|
40
35
|
self,
|
|
41
36
|
uri: str,
|
|
42
37
|
database_name: str = "limits",
|
|
38
|
+
wrap_exceptions: bool = False,
|
|
43
39
|
**options: Union[float, str, bool],
|
|
44
40
|
) -> None:
|
|
45
41
|
"""
|
|
@@ -47,23 +43,23 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
47
43
|
This uri is passed directly to :class:`~motor.motor_asyncio.AsyncIOMotorClient`
|
|
48
44
|
:param database_name: The database to use for storing the rate limit
|
|
49
45
|
collections.
|
|
50
|
-
:param
|
|
51
|
-
:
|
|
52
|
-
|
|
46
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
47
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
48
|
+
:param options: all remaining keyword arguments are passed
|
|
49
|
+
to the constructor of :class:`~motor.motor_asyncio.AsyncIOMotorClient`
|
|
53
50
|
:raise ConfigurationError: when the :pypi:`motor` or :pypi:`pymongo` are
|
|
54
51
|
not available
|
|
55
52
|
"""
|
|
56
53
|
|
|
57
|
-
mongo_opts = options.copy()
|
|
58
|
-
[mongo_opts.setdefault(k, v) for k, v in self.DEFAULT_OPTIONS.items()]
|
|
59
54
|
uri = uri.replace("async+mongodb", "mongodb", 1)
|
|
60
55
|
|
|
61
|
-
super().__init__(uri, **options)
|
|
56
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
62
57
|
|
|
63
58
|
self.dependency = self.dependencies["motor.motor_asyncio"]
|
|
64
59
|
self.proxy_dependency = self.dependencies["pymongo"]
|
|
60
|
+
self.lib_errors, _ = get_dependency("pymongo.errors")
|
|
65
61
|
|
|
66
|
-
self.storage = self.dependency.module.AsyncIOMotorClient(uri, **
|
|
62
|
+
self.storage = self.dependency.module.AsyncIOMotorClient(uri, **options)
|
|
67
63
|
# TODO: Fix this hack. It was noticed when running a benchmark
|
|
68
64
|
# with FastAPI - however - doesn't appear in unit tests or in an isolated
|
|
69
65
|
# use. Reference: https://jira.mongodb.org/browse/MOTOR-822
|
|
@@ -72,6 +68,12 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
72
68
|
self.__database_name = database_name
|
|
73
69
|
self.__indices_created = False
|
|
74
70
|
|
|
71
|
+
@property
|
|
72
|
+
def base_exceptions(
|
|
73
|
+
self,
|
|
74
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
75
|
+
return self.lib_errors.PyMongoError # type: ignore
|
|
76
|
+
|
|
75
77
|
@property
|
|
76
78
|
def database(self): # type: ignore
|
|
77
79
|
return self.storage.get_database(self.__database_name)
|
|
@@ -98,7 +100,7 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
98
100
|
self.database.counters.drop(), self.database.windows.drop()
|
|
99
101
|
)
|
|
100
102
|
|
|
101
|
-
return num_keys
|
|
103
|
+
return cast(int, num_keys)
|
|
102
104
|
|
|
103
105
|
async def clear(self, key: str) -> None:
|
|
104
106
|
"""
|
|
@@ -257,8 +259,9 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
257
259
|
await self.database.windows.update_one(
|
|
258
260
|
{
|
|
259
261
|
"_id": key,
|
|
260
|
-
"entries.%d"
|
|
261
|
-
|
|
262
|
+
"entries.%d" % (limit - amount): {
|
|
263
|
+
"$not": {"$gte": timestamp - expiry}
|
|
264
|
+
},
|
|
262
265
|
},
|
|
263
266
|
updates,
|
|
264
267
|
upsert=True,
|
|
@@ -7,7 +7,7 @@ from packaging.version import Version
|
|
|
7
7
|
|
|
8
8
|
from limits.aio.storage.base import MovingWindowSupport, Storage
|
|
9
9
|
from limits.errors import ConfigurationError
|
|
10
|
-
from limits.typing import AsyncRedisClient, Dict, Optional, Tuple, Union
|
|
10
|
+
from limits.typing import AsyncRedisClient, Dict, Optional, Tuple, Type, Union
|
|
11
11
|
from limits.util import get_package_data
|
|
12
12
|
|
|
13
13
|
if TYPE_CHECKING:
|
|
@@ -30,6 +30,11 @@ class RedisInteractor:
|
|
|
30
30
|
lua_clear_keys: "coredis.commands.Script[bytes]"
|
|
31
31
|
lua_incr_expire: "coredis.commands.Script[bytes]"
|
|
32
32
|
|
|
33
|
+
PREFIX = "LIMITS"
|
|
34
|
+
|
|
35
|
+
def prefixed_key(self, key: str) -> str:
|
|
36
|
+
return f"{self.PREFIX}:{key}"
|
|
37
|
+
|
|
33
38
|
async def _incr(
|
|
34
39
|
self,
|
|
35
40
|
key: str,
|
|
@@ -46,6 +51,7 @@ class RedisInteractor:
|
|
|
46
51
|
:param expiry: amount in seconds for the key to expire in
|
|
47
52
|
:param amount: the number to increment by
|
|
48
53
|
"""
|
|
54
|
+
key = self.prefixed_key(key)
|
|
49
55
|
value = await connection.incrby(key, amount)
|
|
50
56
|
|
|
51
57
|
if elastic_expiry or value == amount:
|
|
@@ -59,6 +65,7 @@ class RedisInteractor:
|
|
|
59
65
|
:param key: the key to get the counter value for
|
|
60
66
|
"""
|
|
61
67
|
|
|
68
|
+
key = self.prefixed_key(key)
|
|
62
69
|
return int(await connection.get(key) or 0)
|
|
63
70
|
|
|
64
71
|
async def _clear(self, key: str, connection: AsyncRedisClient) -> None:
|
|
@@ -66,6 +73,7 @@ class RedisInteractor:
|
|
|
66
73
|
:param key: the key to clear rate limits for
|
|
67
74
|
:param connection: Redis connection
|
|
68
75
|
"""
|
|
76
|
+
key = self.prefixed_key(key)
|
|
69
77
|
await connection.delete([key])
|
|
70
78
|
|
|
71
79
|
async def get_moving_window(
|
|
@@ -79,6 +87,7 @@ class RedisInteractor:
|
|
|
79
87
|
:param expiry: expiry of entry
|
|
80
88
|
:return: (start of window, number of acquired entries)
|
|
81
89
|
"""
|
|
90
|
+
key = self.prefixed_key(key)
|
|
82
91
|
timestamp = int(time.time())
|
|
83
92
|
window = await self.lua_moving_window.execute(
|
|
84
93
|
[key], [int(timestamp - expiry), limit]
|
|
@@ -101,6 +110,7 @@ class RedisInteractor:
|
|
|
101
110
|
:param expiry: expiry of the entry
|
|
102
111
|
:param connection: Redis connection
|
|
103
112
|
"""
|
|
113
|
+
key = self.prefixed_key(key)
|
|
104
114
|
timestamp = time.time()
|
|
105
115
|
acquired = await self.lua_acquire_window.execute(
|
|
106
116
|
[key], [timestamp, limit, expiry, amount]
|
|
@@ -114,6 +124,7 @@ class RedisInteractor:
|
|
|
114
124
|
:param connection: Redis connection
|
|
115
125
|
"""
|
|
116
126
|
|
|
127
|
+
key = self.prefixed_key(key)
|
|
117
128
|
return int(max(await connection.ttl(key), 0) + time.time())
|
|
118
129
|
|
|
119
130
|
async def _check(self, connection: AsyncRedisClient) -> bool:
|
|
@@ -148,6 +159,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
148
159
|
self,
|
|
149
160
|
uri: str,
|
|
150
161
|
connection_pool: Optional["coredis.ConnectionPool"] = None,
|
|
162
|
+
wrap_exceptions: bool = False,
|
|
151
163
|
**options: Union[float, str, bool],
|
|
152
164
|
) -> None:
|
|
153
165
|
"""
|
|
@@ -156,13 +168,15 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
156
168
|
- ``async+redis://[:password]@host:port``
|
|
157
169
|
- ``async+redis://[:password]@host:port/db``
|
|
158
170
|
- ``async+rediss://[:password]@host:port``
|
|
159
|
-
- ``async+unix:///path/to/sock`` etc...
|
|
171
|
+
- ``async+redis+unix:///path/to/sock?db=0`` etc...
|
|
160
172
|
|
|
161
173
|
This uri is passed directly to :meth:`coredis.Redis.from_url` with
|
|
162
174
|
the initial ``async`` removed, except for the case of ``async+redis+unix``
|
|
163
175
|
where it is replaced with ``unix``.
|
|
164
176
|
:param connection_pool: if provided, the redis client is initialized with
|
|
165
177
|
the connection pool and any other params passed as :paramref:`options`
|
|
178
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
179
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
166
180
|
:param options: all remaining keyword arguments are passed
|
|
167
181
|
directly to the constructor of :class:`coredis.Redis`
|
|
168
182
|
:raise ConfigurationError: when the redis library is not available
|
|
@@ -170,7 +184,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
170
184
|
uri = uri.replace("async+redis", "redis", 1)
|
|
171
185
|
uri = uri.replace("redis+unix", "unix")
|
|
172
186
|
|
|
173
|
-
super().__init__(uri, **options)
|
|
187
|
+
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
174
188
|
|
|
175
189
|
self.dependency = self.dependencies["coredis"].module
|
|
176
190
|
|
|
@@ -183,6 +197,12 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
183
197
|
|
|
184
198
|
self.initialize_storage(uri)
|
|
185
199
|
|
|
200
|
+
@property
|
|
201
|
+
def base_exceptions(
|
|
202
|
+
self,
|
|
203
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
204
|
+
return self.dependency.RedisError # type: ignore[no-any-return]
|
|
205
|
+
|
|
186
206
|
def initialize_storage(self, _uri: str) -> None:
|
|
187
207
|
# all these methods are coroutines, so must be called with await
|
|
188
208
|
self.lua_moving_window = self.storage.register_script(self.SCRIPT_MOVING_WINDOW)
|
|
@@ -210,6 +230,7 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
210
230
|
key, expiry, self.storage, elastic_expiry, amount
|
|
211
231
|
)
|
|
212
232
|
else:
|
|
233
|
+
key = self.prefixed_key(key)
|
|
213
234
|
return cast(
|
|
214
235
|
int, await self.lua_incr_expire.execute([key], [expiry, amount])
|
|
215
236
|
)
|
|
@@ -256,15 +277,16 @@ class RedisStorage(RedisInteractor, Storage, MovingWindowSupport):
|
|
|
256
277
|
|
|
257
278
|
async def reset(self) -> Optional[int]:
|
|
258
279
|
"""
|
|
259
|
-
This function calls a Lua Script to delete keys prefixed with
|
|
260
|
-
in
|
|
280
|
+
This function calls a Lua Script to delete keys prefixed with
|
|
281
|
+
``self.PREFIX`` in blocks of 5000.
|
|
261
282
|
|
|
262
283
|
.. warning:: This operation was designed to be fast, but was not tested
|
|
263
284
|
on a large production based system. Be careful with its usage as it
|
|
264
285
|
could be slow on very large data sets.
|
|
265
286
|
"""
|
|
266
287
|
|
|
267
|
-
|
|
288
|
+
prefix = self.prefixed_key("*")
|
|
289
|
+
return cast(int, await self.lua_clear_keys.execute([prefix]))
|
|
268
290
|
|
|
269
291
|
|
|
270
292
|
@versionadded(version="2.1")
|
|
@@ -285,7 +307,12 @@ class RedisClusterStorage(RedisStorage):
|
|
|
285
307
|
}
|
|
286
308
|
"Default options passed to :class:`coredis.RedisCluster`"
|
|
287
309
|
|
|
288
|
-
def __init__(
|
|
310
|
+
def __init__(
|
|
311
|
+
self,
|
|
312
|
+
uri: str,
|
|
313
|
+
wrap_exceptions: bool = False,
|
|
314
|
+
**options: Union[float, str, bool],
|
|
315
|
+
) -> None:
|
|
289
316
|
"""
|
|
290
317
|
:param uri: url of the form
|
|
291
318
|
``async+redis+cluster://[:password]@host:port,host:port``
|
|
@@ -309,7 +336,9 @@ class RedisClusterStorage(RedisStorage):
|
|
|
309
336
|
host, port = loc.split(":")
|
|
310
337
|
cluster_hosts.append({"host": host, "port": int(port)})
|
|
311
338
|
|
|
312
|
-
super(RedisStorage, self).__init__(
|
|
339
|
+
super(RedisStorage, self).__init__(
|
|
340
|
+
uri, wrap_exceptions=wrap_exceptions, **options
|
|
341
|
+
)
|
|
313
342
|
|
|
314
343
|
self.dependency = self.dependencies["coredis"].module
|
|
315
344
|
|
|
@@ -323,15 +352,16 @@ class RedisClusterStorage(RedisStorage):
|
|
|
323
352
|
"""
|
|
324
353
|
Redis Clusters are sharded and deleting across shards
|
|
325
354
|
can't be done atomically. Because of this, this reset loops over all
|
|
326
|
-
keys that are prefixed with
|
|
327
|
-
a time.
|
|
355
|
+
keys that are prefixed with ``self.PREFIX`` and calls delete on them,
|
|
356
|
+
one at a time.
|
|
328
357
|
|
|
329
358
|
.. warning:: This operation was not tested with extremely large data sets.
|
|
330
359
|
On a large production based system, care should be taken with its
|
|
331
360
|
usage as it could be slow on very large data sets
|
|
332
361
|
"""
|
|
333
362
|
|
|
334
|
-
|
|
363
|
+
prefix = self.prefixed_key("*")
|
|
364
|
+
keys = await self.storage.keys(prefix)
|
|
335
365
|
count = 0
|
|
336
366
|
for key in keys:
|
|
337
367
|
count += await self.storage.delete([key])
|
|
@@ -17,3 +17,12 @@ class ConcurrentUpdateError(Exception):
|
|
|
17
17
|
|
|
18
18
|
def __init__(self, key: str, attempts: int) -> None:
|
|
19
19
|
super().__init__(f"Unable to update {key} after {attempts} retries")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class StorageError(Exception):
|
|
23
|
+
"""
|
|
24
|
+
Error raised when an error is encountered in a storage
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self, storage_error: Exception) -> None:
|
|
28
|
+
self.storage_error = storage_error
|