limits 3.13.0__tar.gz → 3.14.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {limits-3.13.0 → limits-3.14.1}/HISTORY.rst +28 -0
- {limits-3.13.0 → limits-3.14.1}/PKG-INFO +4 -5
- {limits-3.13.0 → limits-3.14.1}/limits/_version.py +3 -3
- {limits-3.13.0 → limits-3.14.1}/limits/aio/storage/mongodb.py +66 -34
- {limits-3.13.0 → limits-3.14.1}/limits/storage/mongodb.py +16 -3
- limits-3.14.1/limits/storage/redis_cluster.py +105 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/redis_sentinel.py +7 -1
- {limits-3.13.0 → limits-3.14.1}/limits/typing.py +2 -2
- {limits-3.13.0 → limits-3.14.1}/limits/util.py +3 -3
- {limits-3.13.0 → limits-3.14.1}/limits.egg-info/PKG-INFO +4 -5
- {limits-3.13.0 → limits-3.14.1}/limits.egg-info/requires.txt +2 -3
- {limits-3.13.0 → limits-3.14.1}/requirements/docs.txt +3 -3
- {limits-3.13.0 → limits-3.14.1}/requirements/main.txt +0 -1
- limits-3.14.1/requirements/storage/async-memcached.txt +2 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/test.txt +2 -2
- {limits-3.13.0 → limits-3.14.1}/setup.py +1 -1
- {limits-3.13.0 → limits-3.14.1}/tests/test_storage.py +14 -13
- limits-3.13.0/limits/storage/redis_cluster.py +0 -139
- limits-3.13.0/requirements/storage/async-memcached.txt +0 -2
- {limits-3.13.0 → limits-3.14.1}/CLASSIFIERS +0 -0
- {limits-3.13.0 → limits-3.14.1}/CONTRIBUTIONS.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/LICENSE.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/MANIFEST.in +0 -0
- {limits-3.13.0 → limits-3.14.1}/README.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/Makefile +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/_static/custom.css +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/api.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/async.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/changelog.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/conf.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/custom-storage.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/index.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/installation.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/quickstart.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/storage.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/strategies.rst +0 -0
- {limits-3.13.0 → limits-3.14.1}/doc/source/theme_config.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/__init__.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/__init__.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/storage/__init__.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/storage/base.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/storage/etcd.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/storage/memcached.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/storage/memory.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/storage/redis.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/aio/strategies.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/errors.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/limits.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/py.typed +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/resources/redis/lua_scripts/acquire_moving_window.lua +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/resources/redis/lua_scripts/clear_keys.lua +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/resources/redis/lua_scripts/incr_expire.lua +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/resources/redis/lua_scripts/moving_window.lua +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/__init__.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/base.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/etcd.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/memcached.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/memory.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/redis.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/storage/registry.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/strategies.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits/version.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits.egg-info/SOURCES.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits.egg-info/dependency_links.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits.egg-info/not-zip-safe +0 -0
- {limits-3.13.0 → limits-3.14.1}/limits.egg-info/top_level.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/pyproject.toml +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/ci.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/dev.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/async-etcd.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/async-mongodb.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/async-redis.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/etcd.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/memcached.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/mongodb.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/redis.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/requirements/storage/rediscluster.txt +0 -0
- {limits-3.13.0 → limits-3.14.1}/setup.cfg +0 -0
- {limits-3.13.0 → limits-3.14.1}/tests/test_limit_granularities.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/tests/test_limits.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/tests/test_ratelimit_parser.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/tests/test_strategy.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/tests/test_utils.py +0 -0
- {limits-3.13.0 → limits-3.14.1}/versioneer.py +0 -0
|
@@ -3,6 +3,32 @@
|
|
|
3
3
|
Changelog
|
|
4
4
|
=========
|
|
5
5
|
|
|
6
|
+
v3.14.1
|
|
7
|
+
-------
|
|
8
|
+
Release Date: 2024-11-30
|
|
9
|
+
|
|
10
|
+
* Chore
|
|
11
|
+
|
|
12
|
+
* Fix benchmark artifact upload/download issue during release
|
|
13
|
+
creation
|
|
14
|
+
|
|
15
|
+
v3.14.0
|
|
16
|
+
-------
|
|
17
|
+
Release Date: 2024-11-29
|
|
18
|
+
|
|
19
|
+
* Feature
|
|
20
|
+
|
|
21
|
+
* Allow custom collection names in mongodb storage
|
|
22
|
+
|
|
23
|
+
* Compatibility
|
|
24
|
+
|
|
25
|
+
* Add support for python 3.13
|
|
26
|
+
* Drop support for python 3.8
|
|
27
|
+
|
|
28
|
+
* Deprecations
|
|
29
|
+
|
|
30
|
+
* Remove fallback support to use redis-py-cluster
|
|
31
|
+
|
|
6
32
|
v3.13.0
|
|
7
33
|
-------
|
|
8
34
|
Release Date: 2024-06-22
|
|
@@ -715,6 +741,8 @@ Release Date: 2015-01-08
|
|
|
715
741
|
|
|
716
742
|
|
|
717
743
|
|
|
744
|
+
|
|
745
|
+
|
|
718
746
|
|
|
719
747
|
|
|
720
748
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: limits
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.14.1
|
|
4
4
|
Summary: Rate limiting utilities
|
|
5
5
|
Home-page: https://limits.readthedocs.org
|
|
6
6
|
Author: Ali-Akber Saifee
|
|
@@ -20,10 +20,9 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
23
|
-
Requires-Python: >=3.
|
|
23
|
+
Requires-Python: >=3.9
|
|
24
24
|
License-File: LICENSE.txt
|
|
25
25
|
Requires-Dist: deprecated>=1.2
|
|
26
|
-
Requires-Dist: importlib_resources>=1.3
|
|
27
26
|
Requires-Dist: packaging<25,>=21
|
|
28
27
|
Requires-Dist: typing_extensions
|
|
29
28
|
Provides-Extra: redis
|
|
@@ -40,7 +39,7 @@ Provides-Extra: async-redis
|
|
|
40
39
|
Requires-Dist: coredis<5,>=3.4.0; extra == "async-redis"
|
|
41
40
|
Provides-Extra: async-memcached
|
|
42
41
|
Requires-Dist: emcache>=0.6.1; python_version < "3.11" and extra == "async-memcached"
|
|
43
|
-
Requires-Dist: emcache>=1; python_version >= "3.11" and extra == "async-memcached"
|
|
42
|
+
Requires-Dist: emcache>=1; (python_version >= "3.11" and python_version < "3.13.0") and extra == "async-memcached"
|
|
44
43
|
Provides-Extra: async-mongodb
|
|
45
44
|
Requires-Dist: motor<4,>=3; extra == "async-mongodb"
|
|
46
45
|
Provides-Extra: async-etcd
|
|
@@ -53,7 +52,7 @@ Requires-Dist: pymongo<5,>4.1; extra == "all"
|
|
|
53
52
|
Requires-Dist: etcd3; extra == "all"
|
|
54
53
|
Requires-Dist: coredis<5,>=3.4.0; extra == "all"
|
|
55
54
|
Requires-Dist: emcache>=0.6.1; python_version < "3.11" and extra == "all"
|
|
56
|
-
Requires-Dist: emcache>=1; python_version >= "3.11" and extra == "all"
|
|
55
|
+
Requires-Dist: emcache>=1; (python_version >= "3.11" and python_version < "3.13.0") and extra == "all"
|
|
57
56
|
Requires-Dist: motor<4,>=3; extra == "all"
|
|
58
57
|
Requires-Dist: aetcd; extra == "all"
|
|
59
58
|
|
|
@@ -8,11 +8,11 @@ import json
|
|
|
8
8
|
|
|
9
9
|
version_json = '''
|
|
10
10
|
{
|
|
11
|
-
"date": "2024-
|
|
11
|
+
"date": "2024-11-30T11:04:11-0800",
|
|
12
12
|
"dirty": false,
|
|
13
13
|
"error": null,
|
|
14
|
-
"full-revisionid": "
|
|
15
|
-
"version": "3.
|
|
14
|
+
"full-revisionid": "0671723f54aed5692d4c9d9b47cf0326d5263de5",
|
|
15
|
+
"version": "3.14.1"
|
|
16
16
|
}
|
|
17
17
|
''' # END VERSION_JSON
|
|
18
18
|
|
|
@@ -6,7 +6,7 @@ import datetime
|
|
|
6
6
|
import time
|
|
7
7
|
from typing import Any, cast
|
|
8
8
|
|
|
9
|
-
from deprecated.sphinx import versionadded
|
|
9
|
+
from deprecated.sphinx import versionadded, versionchanged
|
|
10
10
|
|
|
11
11
|
from limits.aio.storage.base import MovingWindowSupport, Storage
|
|
12
12
|
from limits.typing import Dict, Optional, ParamSpec, Tuple, Type, TypeVar, Union
|
|
@@ -17,6 +17,10 @@ R = TypeVar("R")
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
@versionadded(version="2.1")
|
|
20
|
+
@versionchanged(
|
|
21
|
+
version="3.14.0",
|
|
22
|
+
reason="Added option to select custom collection names for windows & counters",
|
|
23
|
+
)
|
|
20
24
|
class MongoDBStorage(Storage, MovingWindowSupport):
|
|
21
25
|
"""
|
|
22
26
|
Rate limit storage with MongoDB as backend.
|
|
@@ -35,6 +39,8 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
35
39
|
self,
|
|
36
40
|
uri: str,
|
|
37
41
|
database_name: str = "limits",
|
|
42
|
+
counter_collection_name: str = "counters",
|
|
43
|
+
window_collection_name: str = "windows",
|
|
38
44
|
wrap_exceptions: bool = False,
|
|
39
45
|
**options: Union[float, str, bool],
|
|
40
46
|
) -> None:
|
|
@@ -43,6 +49,9 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
43
49
|
This uri is passed directly to :class:`~motor.motor_asyncio.AsyncIOMotorClient`
|
|
44
50
|
:param database_name: The database to use for storing the rate limit
|
|
45
51
|
collections.
|
|
52
|
+
:param counter_collection_name: The collection name to use for individual counters
|
|
53
|
+
used in fixed window strategies
|
|
54
|
+
:param window_collection_name: The collection name to use for moving window storage
|
|
46
55
|
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
47
56
|
:exc:`limits.errors.StorageError` before raising it.
|
|
48
57
|
:param options: all remaining keyword arguments are passed
|
|
@@ -66,6 +75,10 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
66
75
|
self.storage.get_io_loop = asyncio.get_running_loop
|
|
67
76
|
|
|
68
77
|
self.__database_name = database_name
|
|
78
|
+
self.__collection_mapping = {
|
|
79
|
+
"counters": counter_collection_name,
|
|
80
|
+
"windows": window_collection_name,
|
|
81
|
+
}
|
|
69
82
|
self.__indices_created = False
|
|
70
83
|
|
|
71
84
|
@property
|
|
@@ -81,8 +94,12 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
81
94
|
async def create_indices(self) -> None:
|
|
82
95
|
if not self.__indices_created:
|
|
83
96
|
await asyncio.gather(
|
|
84
|
-
self.database.counters.create_index(
|
|
85
|
-
|
|
97
|
+
self.database[self.__collection_mapping["counters"]].create_index(
|
|
98
|
+
"expireAt", expireAfterSeconds=0
|
|
99
|
+
),
|
|
100
|
+
self.database[self.__collection_mapping["windows"]].create_index(
|
|
101
|
+
"expireAt", expireAfterSeconds=0
|
|
102
|
+
),
|
|
86
103
|
)
|
|
87
104
|
self.__indices_created = True
|
|
88
105
|
|
|
@@ -92,12 +109,15 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
92
109
|
"""
|
|
93
110
|
num_keys = sum(
|
|
94
111
|
await asyncio.gather(
|
|
95
|
-
self.database.counters.count_documents(
|
|
96
|
-
|
|
112
|
+
self.database[self.__collection_mapping["counters"]].count_documents(
|
|
113
|
+
{}
|
|
114
|
+
),
|
|
115
|
+
self.database[self.__collection_mapping["windows"]].count_documents({}),
|
|
97
116
|
)
|
|
98
117
|
)
|
|
99
118
|
await asyncio.gather(
|
|
100
|
-
self.database.counters.drop(),
|
|
119
|
+
self.database[self.__collection_mapping["counters"]].drop(),
|
|
120
|
+
self.database[self.__collection_mapping["windows"]].drop(),
|
|
101
121
|
)
|
|
102
122
|
|
|
103
123
|
return cast(int, num_keys)
|
|
@@ -107,15 +127,21 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
107
127
|
:param key: the key to clear rate limits for
|
|
108
128
|
"""
|
|
109
129
|
await asyncio.gather(
|
|
110
|
-
self.database.counters.find_one_and_delete(
|
|
111
|
-
|
|
130
|
+
self.database[self.__collection_mapping["counters"]].find_one_and_delete(
|
|
131
|
+
{"_id": key}
|
|
132
|
+
),
|
|
133
|
+
self.database[self.__collection_mapping["windows"]].find_one_and_delete(
|
|
134
|
+
{"_id": key}
|
|
135
|
+
),
|
|
112
136
|
)
|
|
113
137
|
|
|
114
138
|
async def get_expiry(self, key: str) -> int:
|
|
115
139
|
"""
|
|
116
140
|
:param key: the key to get the expiry for
|
|
117
141
|
"""
|
|
118
|
-
counter = await self.database.counters.find_one(
|
|
142
|
+
counter = await self.database[self.__collection_mapping["counters"]].find_one(
|
|
143
|
+
{"_id": key}
|
|
144
|
+
)
|
|
119
145
|
expiry = (
|
|
120
146
|
counter["expireAt"]
|
|
121
147
|
if counter
|
|
@@ -128,7 +154,7 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
128
154
|
"""
|
|
129
155
|
:param key: the key to get the counter value for
|
|
130
156
|
"""
|
|
131
|
-
counter = await self.database.counters.find_one(
|
|
157
|
+
counter = await self.database[self.__collection_mapping["counters"]].find_one(
|
|
132
158
|
{
|
|
133
159
|
"_id": key,
|
|
134
160
|
"expireAt": {"$gte": datetime.datetime.now(datetime.timezone.utc)},
|
|
@@ -156,7 +182,9 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
156
182
|
seconds=expiry
|
|
157
183
|
)
|
|
158
184
|
|
|
159
|
-
response = await self.database
|
|
185
|
+
response = await self.database[
|
|
186
|
+
self.__collection_mapping["counters"]
|
|
187
|
+
].find_one_and_update(
|
|
160
188
|
{"_id": key},
|
|
161
189
|
[
|
|
162
190
|
{
|
|
@@ -209,30 +237,34 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
209
237
|
:return: (start of window, number of acquired entries)
|
|
210
238
|
"""
|
|
211
239
|
timestamp = time.time()
|
|
212
|
-
result =
|
|
213
|
-
[
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
"$
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
"
|
|
221
|
-
|
|
240
|
+
result = (
|
|
241
|
+
await self.database[self.__collection_mapping["windows"]]
|
|
242
|
+
.aggregate(
|
|
243
|
+
[
|
|
244
|
+
{"$match": {"_id": key}},
|
|
245
|
+
{
|
|
246
|
+
"$project": {
|
|
247
|
+
"entries": {
|
|
248
|
+
"$filter": {
|
|
249
|
+
"input": "$entries",
|
|
250
|
+
"as": "entry",
|
|
251
|
+
"cond": {"$gte": ["$$entry", timestamp - expiry]},
|
|
252
|
+
}
|
|
222
253
|
}
|
|
223
254
|
}
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
}
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
255
|
+
},
|
|
256
|
+
{"$unwind": "$entries"},
|
|
257
|
+
{
|
|
258
|
+
"$group": {
|
|
259
|
+
"_id": "$_id",
|
|
260
|
+
"min": {"$min": "$entries"},
|
|
261
|
+
"count": {"$sum": 1},
|
|
262
|
+
}
|
|
263
|
+
},
|
|
264
|
+
]
|
|
265
|
+
)
|
|
266
|
+
.to_list(length=1)
|
|
267
|
+
)
|
|
236
268
|
|
|
237
269
|
if result:
|
|
238
270
|
return (int(result[0]["min"]), result[0]["count"])
|
|
@@ -266,7 +298,7 @@ class MongoDBStorage(Storage, MovingWindowSupport):
|
|
|
266
298
|
)
|
|
267
299
|
}
|
|
268
300
|
updates["$push"]["entries"]["$each"] = [timestamp] * amount
|
|
269
|
-
await self.database.windows.update_one(
|
|
301
|
+
await self.database[self.__collection_mapping["windows"]].update_one(
|
|
270
302
|
{
|
|
271
303
|
"_id": key,
|
|
272
304
|
"entries.%d" % (limit - amount): {
|
|
@@ -6,7 +6,7 @@ import time
|
|
|
6
6
|
from abc import ABC, abstractmethod
|
|
7
7
|
from typing import Any, cast
|
|
8
8
|
|
|
9
|
-
from deprecated.sphinx import versionadded
|
|
9
|
+
from deprecated.sphinx import versionadded, versionchanged
|
|
10
10
|
|
|
11
11
|
from limits.typing import (
|
|
12
12
|
Dict,
|
|
@@ -36,6 +36,8 @@ class MongoDBStorageBase(Storage, MovingWindowSupport, ABC):
|
|
|
36
36
|
self,
|
|
37
37
|
uri: str,
|
|
38
38
|
database_name: str = "limits",
|
|
39
|
+
counter_collection_name: str = "counters",
|
|
40
|
+
window_collection_name: str = "windows",
|
|
39
41
|
wrap_exceptions: bool = False,
|
|
40
42
|
**options: Union[int, str, bool],
|
|
41
43
|
) -> None:
|
|
@@ -44,6 +46,9 @@ class MongoDBStorageBase(Storage, MovingWindowSupport, ABC):
|
|
|
44
46
|
This uri is passed directly to :class:`~pymongo.mongo_client.MongoClient`
|
|
45
47
|
:param database_name: The database to use for storing the rate limit
|
|
46
48
|
collections.
|
|
49
|
+
:param counter_collection_name: The collection name to use for individual counters
|
|
50
|
+
used in fixed window strategies
|
|
51
|
+
:param window_collection_name: The collection name to use for moving window storage
|
|
47
52
|
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
48
53
|
:exc:`limits.errors.StorageError` before raising it.
|
|
49
54
|
:param options: all remaining keyword arguments are passed to the
|
|
@@ -53,6 +58,10 @@ class MongoDBStorageBase(Storage, MovingWindowSupport, ABC):
|
|
|
53
58
|
|
|
54
59
|
super().__init__(uri, wrap_exceptions=wrap_exceptions, **options)
|
|
55
60
|
self._database_name = database_name
|
|
61
|
+
self._collection_mapping = {
|
|
62
|
+
"counters": counter_collection_name,
|
|
63
|
+
"windows": window_collection_name,
|
|
64
|
+
}
|
|
56
65
|
self.lib = self.dependencies["pymongo"].module
|
|
57
66
|
self.lib_errors, _ = get_dependency("pymongo.errors")
|
|
58
67
|
self._storage_uri = uri
|
|
@@ -74,11 +83,11 @@ class MongoDBStorageBase(Storage, MovingWindowSupport, ABC):
|
|
|
74
83
|
|
|
75
84
|
@property
|
|
76
85
|
def counters(self) -> MongoCollection:
|
|
77
|
-
return self._database["counters"]
|
|
86
|
+
return self._database[self._collection_mapping["counters"]]
|
|
78
87
|
|
|
79
88
|
@property
|
|
80
89
|
def windows(self) -> MongoCollection:
|
|
81
|
-
return self._database["windows"]
|
|
90
|
+
return self._database[self._collection_mapping["windows"]]
|
|
82
91
|
|
|
83
92
|
@abstractmethod
|
|
84
93
|
def _init_mongo_client(
|
|
@@ -278,6 +287,10 @@ class MongoDBStorageBase(Storage, MovingWindowSupport, ABC):
|
|
|
278
287
|
|
|
279
288
|
|
|
280
289
|
@versionadded(version="2.1")
|
|
290
|
+
@versionchanged(
|
|
291
|
+
version="3.14.0",
|
|
292
|
+
reason="Added option to select custom collection names for windows & counters",
|
|
293
|
+
)
|
|
281
294
|
class MongoDBStorage(MongoDBStorageBase):
|
|
282
295
|
STORAGE_SCHEME = ["mongodb", "mongodb+srv"]
|
|
283
296
|
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import urllib
|
|
2
|
+
|
|
3
|
+
from deprecated.sphinx import versionchanged
|
|
4
|
+
from packaging.version import Version
|
|
5
|
+
|
|
6
|
+
from limits.storage.redis import RedisStorage
|
|
7
|
+
from limits.typing import Dict, Optional, Union
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@versionchanged(
|
|
11
|
+
version="3.14.0",
|
|
12
|
+
reason="""
|
|
13
|
+
Dropped support for the :pypi:`redis-py-cluster` library
|
|
14
|
+
which has been abandoned/deprecated.
|
|
15
|
+
""",
|
|
16
|
+
)
|
|
17
|
+
@versionchanged(
|
|
18
|
+
version="2.5.0",
|
|
19
|
+
reason="""
|
|
20
|
+
Cluster support was provided by the :pypi:`redis-py-cluster` library
|
|
21
|
+
which has been absorbed into the official :pypi:`redis` client. By
|
|
22
|
+
default the :class:`redis.cluster.RedisCluster` client will be used
|
|
23
|
+
however if the version of the package is lower than ``4.2.0`` the implementation
|
|
24
|
+
will fallback to trying to use :class:`rediscluster.RedisCluster`.
|
|
25
|
+
""",
|
|
26
|
+
)
|
|
27
|
+
class RedisClusterStorage(RedisStorage):
|
|
28
|
+
"""
|
|
29
|
+
Rate limit storage with redis cluster as backend
|
|
30
|
+
|
|
31
|
+
Depends on :pypi:`redis`.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
STORAGE_SCHEME = ["redis+cluster"]
|
|
35
|
+
"""The storage scheme for redis cluster"""
|
|
36
|
+
|
|
37
|
+
DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = {
|
|
38
|
+
"max_connections": 1000,
|
|
39
|
+
}
|
|
40
|
+
"Default options passed to the :class:`~redis.cluster.RedisCluster`"
|
|
41
|
+
|
|
42
|
+
DEPENDENCIES = {
|
|
43
|
+
"redis": Version("4.2.0"),
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
uri: str,
|
|
49
|
+
wrap_exceptions: bool = False,
|
|
50
|
+
**options: Union[float, str, bool],
|
|
51
|
+
) -> None:
|
|
52
|
+
"""
|
|
53
|
+
:param uri: url of the form
|
|
54
|
+
``redis+cluster://[:password]@host:port,host:port``
|
|
55
|
+
:param wrap_exceptions: Whether to wrap storage exceptions in
|
|
56
|
+
:exc:`limits.errors.StorageError` before raising it.
|
|
57
|
+
:param options: all remaining keyword arguments are passed
|
|
58
|
+
directly to the constructor of :class:`redis.cluster.RedisCluster`
|
|
59
|
+
:raise ConfigurationError: when the :pypi:`redis` library is not
|
|
60
|
+
available or if the redis cluster cannot be reached.
|
|
61
|
+
"""
|
|
62
|
+
parsed = urllib.parse.urlparse(uri)
|
|
63
|
+
parsed_auth: Dict[str, Union[float, str, bool]] = {}
|
|
64
|
+
|
|
65
|
+
if parsed.username:
|
|
66
|
+
parsed_auth["username"] = parsed.username
|
|
67
|
+
if parsed.password:
|
|
68
|
+
parsed_auth["password"] = parsed.password
|
|
69
|
+
|
|
70
|
+
sep = parsed.netloc.find("@") + 1
|
|
71
|
+
cluster_hosts = []
|
|
72
|
+
for loc in parsed.netloc[sep:].split(","):
|
|
73
|
+
host, port = loc.split(":")
|
|
74
|
+
cluster_hosts.append((host, int(port)))
|
|
75
|
+
|
|
76
|
+
self.storage = None
|
|
77
|
+
merged_options = {**self.DEFAULT_OPTIONS, **parsed_auth, **options}
|
|
78
|
+
self.dependency = self.dependencies["redis"].module
|
|
79
|
+
startup_nodes = [self.dependency.cluster.ClusterNode(*c) for c in cluster_hosts]
|
|
80
|
+
self.storage = self.dependency.cluster.RedisCluster(
|
|
81
|
+
startup_nodes=startup_nodes, **merged_options
|
|
82
|
+
)
|
|
83
|
+
assert self.storage
|
|
84
|
+
self.initialize_storage(uri)
|
|
85
|
+
super(RedisStorage, self).__init__(uri, wrap_exceptions, **options)
|
|
86
|
+
|
|
87
|
+
def reset(self) -> Optional[int]:
|
|
88
|
+
"""
|
|
89
|
+
Redis Clusters are sharded and deleting across shards
|
|
90
|
+
can't be done atomically. Because of this, this reset loops over all
|
|
91
|
+
keys that are prefixed with ``self.PREFIX`` and calls delete on them,
|
|
92
|
+
one at a time.
|
|
93
|
+
|
|
94
|
+
.. warning::
|
|
95
|
+
This operation was not tested with extremely large data sets.
|
|
96
|
+
On a large production based system, care should be taken with its
|
|
97
|
+
usage as it could be slow on very large data sets"""
|
|
98
|
+
|
|
99
|
+
prefix = self.prefixed_key("*")
|
|
100
|
+
count = 0
|
|
101
|
+
for primary in self.storage.get_primaries():
|
|
102
|
+
node = self.storage.get_redis_connection(primary)
|
|
103
|
+
keys = node.keys(prefix)
|
|
104
|
+
count += sum([node.delete(k.decode("utf-8")) for k in keys])
|
|
105
|
+
return count
|
|
@@ -5,7 +5,7 @@ from packaging.version import Version
|
|
|
5
5
|
|
|
6
6
|
from limits.errors import ConfigurationError
|
|
7
7
|
from limits.storage.redis import RedisStorage
|
|
8
|
-
from limits.typing import Dict, Optional, Union
|
|
8
|
+
from limits.typing import Dict, Optional, Tuple, Type, Union
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
11
11
|
import redis.sentinel
|
|
@@ -86,6 +86,12 @@ class RedisSentinelStorage(RedisStorage):
|
|
|
86
86
|
self.use_replicas = use_replicas
|
|
87
87
|
self.initialize_storage(uri)
|
|
88
88
|
|
|
89
|
+
@property
|
|
90
|
+
def base_exceptions(
|
|
91
|
+
self,
|
|
92
|
+
) -> Union[Type[Exception], Tuple[Type[Exception], ...]]: # pragma: no cover
|
|
93
|
+
return self.dependencies["redis"].RedisError # type: ignore[no-any-return, attr-defined]
|
|
94
|
+
|
|
89
95
|
def get(self, key: str) -> int:
|
|
90
96
|
"""
|
|
91
97
|
:param key: the key to get the counter value for
|
|
@@ -110,8 +110,8 @@ class ScriptP(Protocol[R_co]):
|
|
|
110
110
|
|
|
111
111
|
|
|
112
112
|
MongoClient: TypeAlias = "pymongo.MongoClient[Dict[str, Any]]" # type:ignore[misc]
|
|
113
|
-
MongoDatabase: TypeAlias = "pymongo.database.Database[Dict[str, Any]]" # type:ignore
|
|
114
|
-
MongoCollection: TypeAlias = "pymongo.collection.Collection[Dict[str, Any]]" # type:ignore
|
|
113
|
+
MongoDatabase: TypeAlias = "pymongo.database.Database[Dict[str, Any]]" # type:ignore
|
|
114
|
+
MongoCollection: TypeAlias = "pymongo.collection.Collection[Dict[str, Any]]" # type:ignore
|
|
115
115
|
|
|
116
116
|
__all__ = [
|
|
117
117
|
"AsyncRedisClient",
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
""" """
|
|
2
2
|
|
|
3
3
|
import dataclasses
|
|
4
|
+
import importlib.resources
|
|
4
5
|
import re
|
|
5
6
|
import sys
|
|
6
7
|
from collections import UserDict
|
|
7
8
|
from types import ModuleType
|
|
8
|
-
from typing import TYPE_CHECKING
|
|
9
|
+
from typing import TYPE_CHECKING
|
|
9
10
|
|
|
10
|
-
import importlib_resources
|
|
11
11
|
from packaging.version import Version
|
|
12
12
|
|
|
13
13
|
from limits.typing import Dict, List, NamedTuple, Optional, Tuple, Type, Union
|
|
@@ -142,7 +142,7 @@ def get_dependency(module_path: str) -> Tuple[Optional[ModuleType], Optional[Ver
|
|
|
142
142
|
|
|
143
143
|
|
|
144
144
|
def get_package_data(path: str) -> bytes:
|
|
145
|
-
return
|
|
145
|
+
return importlib.resources.files("limits").joinpath(path).read_bytes()
|
|
146
146
|
|
|
147
147
|
|
|
148
148
|
def parse_many(limit_string: str) -> List[RateLimitItem]:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: limits
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.14.1
|
|
4
4
|
Summary: Rate limiting utilities
|
|
5
5
|
Home-page: https://limits.readthedocs.org
|
|
6
6
|
Author: Ali-Akber Saifee
|
|
@@ -20,10 +20,9 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
23
|
-
Requires-Python: >=3.
|
|
23
|
+
Requires-Python: >=3.9
|
|
24
24
|
License-File: LICENSE.txt
|
|
25
25
|
Requires-Dist: deprecated>=1.2
|
|
26
|
-
Requires-Dist: importlib_resources>=1.3
|
|
27
26
|
Requires-Dist: packaging<25,>=21
|
|
28
27
|
Requires-Dist: typing_extensions
|
|
29
28
|
Provides-Extra: redis
|
|
@@ -40,7 +39,7 @@ Provides-Extra: async-redis
|
|
|
40
39
|
Requires-Dist: coredis<5,>=3.4.0; extra == "async-redis"
|
|
41
40
|
Provides-Extra: async-memcached
|
|
42
41
|
Requires-Dist: emcache>=0.6.1; python_version < "3.11" and extra == "async-memcached"
|
|
43
|
-
Requires-Dist: emcache>=1; python_version >= "3.11" and extra == "async-memcached"
|
|
42
|
+
Requires-Dist: emcache>=1; (python_version >= "3.11" and python_version < "3.13.0") and extra == "async-memcached"
|
|
44
43
|
Provides-Extra: async-mongodb
|
|
45
44
|
Requires-Dist: motor<4,>=3; extra == "async-mongodb"
|
|
46
45
|
Provides-Extra: async-etcd
|
|
@@ -53,7 +52,7 @@ Requires-Dist: pymongo<5,>4.1; extra == "all"
|
|
|
53
52
|
Requires-Dist: etcd3; extra == "all"
|
|
54
53
|
Requires-Dist: coredis<5,>=3.4.0; extra == "all"
|
|
55
54
|
Requires-Dist: emcache>=0.6.1; python_version < "3.11" and extra == "all"
|
|
56
|
-
Requires-Dist: emcache>=1; python_version >= "3.11" and extra == "all"
|
|
55
|
+
Requires-Dist: emcache>=1; (python_version >= "3.11" and python_version < "3.13.0") and extra == "all"
|
|
57
56
|
Requires-Dist: motor<4,>=3; extra == "all"
|
|
58
57
|
Requires-Dist: aetcd; extra == "all"
|
|
59
58
|
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
deprecated>=1.2
|
|
2
|
-
importlib_resources>=1.3
|
|
3
2
|
packaging<25,>=21
|
|
4
3
|
typing_extensions
|
|
5
4
|
|
|
@@ -16,7 +15,7 @@ aetcd
|
|
|
16
15
|
[all:python_version < "3.11"]
|
|
17
16
|
emcache>=0.6.1
|
|
18
17
|
|
|
19
|
-
[all:python_version >= "3.11"]
|
|
18
|
+
[all:python_version >= "3.11" and python_version < "3.13.0"]
|
|
20
19
|
emcache>=1
|
|
21
20
|
|
|
22
21
|
[async-etcd]
|
|
@@ -27,7 +26,7 @@ aetcd
|
|
|
27
26
|
[async-memcached:python_version < "3.11"]
|
|
28
27
|
emcache>=0.6.1
|
|
29
28
|
|
|
30
|
-
[async-memcached:python_version >= "3.11"]
|
|
29
|
+
[async-memcached:python_version >= "3.11" and python_version < "3.13.0"]
|
|
31
30
|
emcache>=1
|
|
32
31
|
|
|
33
32
|
[async-mongodb]
|
|
@@ -58,7 +58,7 @@ setup(
|
|
|
58
58
|
description="Rate limiting utilities",
|
|
59
59
|
long_description=open("README.rst").read(),
|
|
60
60
|
packages=find_packages(exclude=["tests*"]),
|
|
61
|
-
python_requires=">=3.
|
|
61
|
+
python_requires=">=3.9",
|
|
62
62
|
extras_require=EXTRA_REQUIREMENTS,
|
|
63
63
|
include_package_data=True,
|
|
64
64
|
package_data={
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import time
|
|
2
2
|
|
|
3
3
|
import pytest
|
|
4
|
+
from pytest_lazy_fixtures import lf
|
|
4
5
|
|
|
5
6
|
from limits import RateLimitItemPerMinute, RateLimitItemPerSecond
|
|
6
7
|
from limits.errors import ConfigurationError, StorageError
|
|
@@ -104,7 +105,7 @@ class TestBaseStorage:
|
|
|
104
105
|
"redis://localhost:7379",
|
|
105
106
|
{},
|
|
106
107
|
RedisStorage,
|
|
107
|
-
|
|
108
|
+
lf("redis_basic"),
|
|
108
109
|
marks=pytest.mark.redis,
|
|
109
110
|
id="redis",
|
|
110
111
|
),
|
|
@@ -112,7 +113,7 @@ class TestBaseStorage:
|
|
|
112
113
|
"redis+unix:///tmp/limits.redis.sock",
|
|
113
114
|
{},
|
|
114
115
|
RedisStorage,
|
|
115
|
-
|
|
116
|
+
lf("redis_uds"),
|
|
116
117
|
marks=pytest.mark.redis,
|
|
117
118
|
id="redis-uds",
|
|
118
119
|
),
|
|
@@ -120,7 +121,7 @@ class TestBaseStorage:
|
|
|
120
121
|
"redis+unix://:password/tmp/limits.redis.sock",
|
|
121
122
|
{},
|
|
122
123
|
RedisStorage,
|
|
123
|
-
|
|
124
|
+
lf("redis_uds"),
|
|
124
125
|
marks=pytest.mark.redis,
|
|
125
126
|
id="redis-uds-auth",
|
|
126
127
|
),
|
|
@@ -128,7 +129,7 @@ class TestBaseStorage:
|
|
|
128
129
|
"memcached://localhost:22122",
|
|
129
130
|
{},
|
|
130
131
|
MemcachedStorage,
|
|
131
|
-
|
|
132
|
+
lf("memcached"),
|
|
132
133
|
marks=pytest.mark.memcached,
|
|
133
134
|
id="memcached",
|
|
134
135
|
),
|
|
@@ -136,7 +137,7 @@ class TestBaseStorage:
|
|
|
136
137
|
"memcached://localhost:22122,localhost:22123",
|
|
137
138
|
{},
|
|
138
139
|
MemcachedStorage,
|
|
139
|
-
|
|
140
|
+
lf("memcached_cluster"),
|
|
140
141
|
marks=pytest.mark.memcached,
|
|
141
142
|
id="memcached-cluster",
|
|
142
143
|
),
|
|
@@ -144,7 +145,7 @@ class TestBaseStorage:
|
|
|
144
145
|
"memcached:///tmp/limits.memcached.sock",
|
|
145
146
|
{},
|
|
146
147
|
MemcachedStorage,
|
|
147
|
-
|
|
148
|
+
lf("memcached_uds"),
|
|
148
149
|
marks=pytest.mark.memcached,
|
|
149
150
|
id="memcached-uds",
|
|
150
151
|
),
|
|
@@ -152,7 +153,7 @@ class TestBaseStorage:
|
|
|
152
153
|
"redis+sentinel://localhost:26379",
|
|
153
154
|
{"service_name": "mymaster"},
|
|
154
155
|
RedisSentinelStorage,
|
|
155
|
-
|
|
156
|
+
lf("redis_sentinel"),
|
|
156
157
|
marks=pytest.mark.redis_sentinel,
|
|
157
158
|
id="redis-sentinel",
|
|
158
159
|
),
|
|
@@ -160,7 +161,7 @@ class TestBaseStorage:
|
|
|
160
161
|
"redis+sentinel://localhost:26379/mymaster",
|
|
161
162
|
{},
|
|
162
163
|
RedisSentinelStorage,
|
|
163
|
-
|
|
164
|
+
lf("redis_sentinel"),
|
|
164
165
|
marks=pytest.mark.redis_sentinel,
|
|
165
166
|
id="redis-sentinel-service-name-url",
|
|
166
167
|
),
|
|
@@ -168,7 +169,7 @@ class TestBaseStorage:
|
|
|
168
169
|
"redis+sentinel://:sekret@localhost:36379/mymaster",
|
|
169
170
|
{"password": "sekret"},
|
|
170
171
|
RedisSentinelStorage,
|
|
171
|
-
|
|
172
|
+
lf("redis_sentinel_auth"),
|
|
172
173
|
marks=pytest.mark.redis_sentinel,
|
|
173
174
|
id="redis-sentinel-auth",
|
|
174
175
|
),
|
|
@@ -176,7 +177,7 @@ class TestBaseStorage:
|
|
|
176
177
|
"redis+cluster://localhost:7001/",
|
|
177
178
|
{},
|
|
178
179
|
RedisClusterStorage,
|
|
179
|
-
|
|
180
|
+
lf("redis_cluster"),
|
|
180
181
|
marks=pytest.mark.redis_cluster,
|
|
181
182
|
id="redis-cluster",
|
|
182
183
|
),
|
|
@@ -184,7 +185,7 @@ class TestBaseStorage:
|
|
|
184
185
|
"redis+cluster://:sekret@localhost:8400/",
|
|
185
186
|
{},
|
|
186
187
|
RedisClusterStorage,
|
|
187
|
-
|
|
188
|
+
lf("redis_auth_cluster"),
|
|
188
189
|
marks=pytest.mark.redis_cluster,
|
|
189
190
|
id="redis-cluster-auth",
|
|
190
191
|
),
|
|
@@ -192,7 +193,7 @@ class TestBaseStorage:
|
|
|
192
193
|
"mongodb://localhost:37017/",
|
|
193
194
|
{},
|
|
194
195
|
MongoDBStorage,
|
|
195
|
-
|
|
196
|
+
lf("mongodb"),
|
|
196
197
|
marks=pytest.mark.mongodb,
|
|
197
198
|
id="mongodb",
|
|
198
199
|
),
|
|
@@ -200,7 +201,7 @@ class TestBaseStorage:
|
|
|
200
201
|
"etcd://localhost:2379",
|
|
201
202
|
{},
|
|
202
203
|
EtcdStorage,
|
|
203
|
-
|
|
204
|
+
lf("etcd"),
|
|
204
205
|
marks=pytest.mark.etcd,
|
|
205
206
|
id="etcd",
|
|
206
207
|
),
|
|
@@ -1,139 +0,0 @@
|
|
|
1
|
-
import urllib
|
|
2
|
-
import warnings
|
|
3
|
-
from typing import cast
|
|
4
|
-
|
|
5
|
-
from deprecated.sphinx import versionchanged
|
|
6
|
-
from packaging.version import Version
|
|
7
|
-
|
|
8
|
-
from limits.errors import ConfigurationError
|
|
9
|
-
from limits.storage.redis import RedisStorage
|
|
10
|
-
from limits.typing import Dict, List, Optional, Tuple, Union
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
@versionchanged(
|
|
14
|
-
version="2.5.0",
|
|
15
|
-
reason="""
|
|
16
|
-
Cluster support was provided by the :pypi:`redis-py-cluster` library
|
|
17
|
-
which has been absorbed into the official :pypi:`redis` client. By
|
|
18
|
-
default the :class:`redis.cluster.RedisCluster` client will be used
|
|
19
|
-
however if the version of the package is lower than ``4.2.0`` the implementation
|
|
20
|
-
will fallback to trying to use :class:`rediscluster.RedisCluster`.
|
|
21
|
-
""",
|
|
22
|
-
)
|
|
23
|
-
class RedisClusterStorage(RedisStorage):
|
|
24
|
-
"""
|
|
25
|
-
Rate limit storage with redis cluster as backend
|
|
26
|
-
|
|
27
|
-
Depends on :pypi:`redis`.
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
STORAGE_SCHEME = ["redis+cluster"]
|
|
31
|
-
"""The storage scheme for redis cluster"""
|
|
32
|
-
|
|
33
|
-
DEFAULT_OPTIONS: Dict[str, Union[float, str, bool]] = {
|
|
34
|
-
"max_connections": 1000,
|
|
35
|
-
}
|
|
36
|
-
"Default options passed to the :class:`~redis.cluster.RedisCluster`"
|
|
37
|
-
|
|
38
|
-
DEPENDENCIES = {
|
|
39
|
-
"redis": Version("4.2.0"),
|
|
40
|
-
"rediscluster": Version("2.0.0"), # Deprecated since 2.6.0
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
def __init__(self, uri: str, **options: Union[float, str, bool]) -> None:
|
|
44
|
-
"""
|
|
45
|
-
:param uri: url of the form
|
|
46
|
-
``redis+cluster://[:password]@host:port,host:port``
|
|
47
|
-
:param options: all remaining keyword arguments are passed
|
|
48
|
-
directly to the constructor of :class:`redis.cluster.RedisCluster`
|
|
49
|
-
:raise ConfigurationError: when the :pypi:`redis` library is not
|
|
50
|
-
available or if the redis cluster cannot be reached.
|
|
51
|
-
"""
|
|
52
|
-
parsed = urllib.parse.urlparse(uri)
|
|
53
|
-
parsed_auth: Dict[str, Union[float, str, bool]] = {}
|
|
54
|
-
|
|
55
|
-
if parsed.username:
|
|
56
|
-
parsed_auth["username"] = parsed.username
|
|
57
|
-
if parsed.password:
|
|
58
|
-
parsed_auth["password"] = parsed.password
|
|
59
|
-
|
|
60
|
-
sep = parsed.netloc.find("@") + 1
|
|
61
|
-
cluster_hosts = []
|
|
62
|
-
for loc in parsed.netloc[sep:].split(","):
|
|
63
|
-
host, port = loc.split(":")
|
|
64
|
-
cluster_hosts.append((host, int(port)))
|
|
65
|
-
|
|
66
|
-
self.storage = None
|
|
67
|
-
self.using_redis_py = False
|
|
68
|
-
self.__pick_storage(
|
|
69
|
-
cluster_hosts, **{**self.DEFAULT_OPTIONS, **parsed_auth, **options}
|
|
70
|
-
)
|
|
71
|
-
assert self.storage
|
|
72
|
-
self.initialize_storage(uri)
|
|
73
|
-
super(RedisStorage, self).__init__(uri, **options)
|
|
74
|
-
|
|
75
|
-
def __pick_storage(
|
|
76
|
-
self, cluster_hosts: List[Tuple[str, int]], **options: Union[float, str, bool]
|
|
77
|
-
) -> None:
|
|
78
|
-
try:
|
|
79
|
-
redis_py = self.dependencies["redis"].module
|
|
80
|
-
startup_nodes = [redis_py.cluster.ClusterNode(*c) for c in cluster_hosts]
|
|
81
|
-
self.storage = redis_py.cluster.RedisCluster(
|
|
82
|
-
startup_nodes=startup_nodes, **options
|
|
83
|
-
)
|
|
84
|
-
self.using_redis_py = True
|
|
85
|
-
return
|
|
86
|
-
except ConfigurationError: # pragma: no cover
|
|
87
|
-
self.__use_legacy_cluster_implementation(cluster_hosts, **options)
|
|
88
|
-
if not self.storage:
|
|
89
|
-
raise ConfigurationError(
|
|
90
|
-
(
|
|
91
|
-
"Unable to find an implementation for redis cluster"
|
|
92
|
-
" Cluster support requires either redis-py>=4.2 or"
|
|
93
|
-
" redis-py-cluster"
|
|
94
|
-
)
|
|
95
|
-
)
|
|
96
|
-
|
|
97
|
-
def __use_legacy_cluster_implementation(
|
|
98
|
-
self, cluster_hosts: List[Tuple[str, int]], **options: Union[float, str, bool]
|
|
99
|
-
) -> None: # pragma: no cover
|
|
100
|
-
redis_cluster = self.dependencies["rediscluster"].module
|
|
101
|
-
warnings.warn(
|
|
102
|
-
(
|
|
103
|
-
"Using redis-py-cluster is deprecated as the library has been"
|
|
104
|
-
" absorbed by redis-py (>=4.2). The support will be eventually "
|
|
105
|
-
" removed from the limits library and is no longer tested "
|
|
106
|
-
" against since version: 2.6. To get rid of this warning, "
|
|
107
|
-
" uninstall redis-py-cluster and ensure redis-py>=4.2.0 is installed"
|
|
108
|
-
)
|
|
109
|
-
)
|
|
110
|
-
self.storage = redis_cluster.RedisCluster(
|
|
111
|
-
startup_nodes=[{"host": c[0], "port": c[1]} for c in cluster_hosts],
|
|
112
|
-
**options,
|
|
113
|
-
)
|
|
114
|
-
|
|
115
|
-
def reset(self) -> Optional[int]:
|
|
116
|
-
"""
|
|
117
|
-
Redis Clusters are sharded and deleting across shards
|
|
118
|
-
can't be done atomically. Because of this, this reset loops over all
|
|
119
|
-
keys that are prefixed with ``self.PREFIX`` and calls delete on them,
|
|
120
|
-
one at a time.
|
|
121
|
-
|
|
122
|
-
.. warning::
|
|
123
|
-
This operation was not tested with extremely large data sets.
|
|
124
|
-
On a large production based system, care should be taken with its
|
|
125
|
-
usage as it could be slow on very large data sets"""
|
|
126
|
-
|
|
127
|
-
prefix = self.prefixed_key("*")
|
|
128
|
-
if self.using_redis_py:
|
|
129
|
-
count = 0
|
|
130
|
-
for primary in self.storage.get_primaries():
|
|
131
|
-
node = self.storage.get_redis_connection(primary)
|
|
132
|
-
keys = node.keys(prefix)
|
|
133
|
-
count += sum([node.delete(k.decode("utf-8")) for k in keys])
|
|
134
|
-
return count
|
|
135
|
-
else: # pragma: no cover
|
|
136
|
-
keys = self.storage.keys(prefix)
|
|
137
|
-
return cast(
|
|
138
|
-
int, sum([self.storage.delete(k.decode("utf-8")) for k in keys])
|
|
139
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{limits-3.13.0 → limits-3.14.1}/limits/resources/redis/lua_scripts/acquire_moving_window.lua
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|