everysk-lib 1.10.2__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- everysk/__init__.py +30 -0
- everysk/_version.py +683 -0
- everysk/api/__init__.py +61 -0
- everysk/api/api_requestor.py +167 -0
- everysk/api/api_resources/__init__.py +23 -0
- everysk/api/api_resources/api_resource.py +371 -0
- everysk/api/api_resources/calculation.py +779 -0
- everysk/api/api_resources/custom_index.py +42 -0
- everysk/api/api_resources/datastore.py +81 -0
- everysk/api/api_resources/file.py +42 -0
- everysk/api/api_resources/market_data.py +223 -0
- everysk/api/api_resources/parser.py +66 -0
- everysk/api/api_resources/portfolio.py +43 -0
- everysk/api/api_resources/private_security.py +42 -0
- everysk/api/api_resources/report.py +65 -0
- everysk/api/api_resources/report_template.py +39 -0
- everysk/api/api_resources/tests.py +115 -0
- everysk/api/api_resources/worker_execution.py +64 -0
- everysk/api/api_resources/workflow.py +65 -0
- everysk/api/api_resources/workflow_execution.py +93 -0
- everysk/api/api_resources/workspace.py +42 -0
- everysk/api/http_client.py +63 -0
- everysk/api/tests.py +32 -0
- everysk/api/utils.py +262 -0
- everysk/config.py +451 -0
- everysk/core/_tests/serialize/test_json.py +336 -0
- everysk/core/_tests/serialize/test_orjson.py +295 -0
- everysk/core/_tests/serialize/test_pickle.py +48 -0
- everysk/core/cloud_function/main.py +78 -0
- everysk/core/cloud_function/tests.py +86 -0
- everysk/core/compress.py +245 -0
- everysk/core/datetime/__init__.py +12 -0
- everysk/core/datetime/calendar.py +144 -0
- everysk/core/datetime/date.py +424 -0
- everysk/core/datetime/date_expression.py +299 -0
- everysk/core/datetime/date_mixin.py +1475 -0
- everysk/core/datetime/date_settings.py +30 -0
- everysk/core/datetime/datetime.py +713 -0
- everysk/core/exceptions.py +435 -0
- everysk/core/fields.py +1176 -0
- everysk/core/firestore.py +555 -0
- everysk/core/fixtures/_settings.py +29 -0
- everysk/core/fixtures/other/_settings.py +18 -0
- everysk/core/fixtures/user_agents.json +88 -0
- everysk/core/http.py +691 -0
- everysk/core/lists.py +92 -0
- everysk/core/log.py +709 -0
- everysk/core/number.py +37 -0
- everysk/core/object.py +1469 -0
- everysk/core/redis.py +1021 -0
- everysk/core/retry.py +51 -0
- everysk/core/serialize.py +674 -0
- everysk/core/sftp.py +414 -0
- everysk/core/signing.py +53 -0
- everysk/core/slack.py +127 -0
- everysk/core/string.py +199 -0
- everysk/core/tests.py +240 -0
- everysk/core/threads.py +199 -0
- everysk/core/undefined.py +70 -0
- everysk/core/unittests.py +73 -0
- everysk/core/workers.py +241 -0
- everysk/sdk/__init__.py +23 -0
- everysk/sdk/base.py +98 -0
- everysk/sdk/brutils/cnpj.py +391 -0
- everysk/sdk/brutils/cnpj_pd.py +129 -0
- everysk/sdk/engines/__init__.py +26 -0
- everysk/sdk/engines/cache.py +185 -0
- everysk/sdk/engines/compliance.py +37 -0
- everysk/sdk/engines/cryptography.py +69 -0
- everysk/sdk/engines/expression.cp312-win_amd64.pyd +0 -0
- everysk/sdk/engines/expression.pyi +55 -0
- everysk/sdk/engines/helpers.cp312-win_amd64.pyd +0 -0
- everysk/sdk/engines/helpers.pyi +26 -0
- everysk/sdk/engines/lock.py +120 -0
- everysk/sdk/engines/market_data.py +244 -0
- everysk/sdk/engines/settings.py +19 -0
- everysk/sdk/entities/__init__.py +23 -0
- everysk/sdk/entities/base.py +784 -0
- everysk/sdk/entities/base_list.py +131 -0
- everysk/sdk/entities/custom_index/base.py +209 -0
- everysk/sdk/entities/custom_index/settings.py +29 -0
- everysk/sdk/entities/datastore/base.py +160 -0
- everysk/sdk/entities/datastore/settings.py +17 -0
- everysk/sdk/entities/fields.py +375 -0
- everysk/sdk/entities/file/base.py +215 -0
- everysk/sdk/entities/file/settings.py +63 -0
- everysk/sdk/entities/portfolio/base.py +248 -0
- everysk/sdk/entities/portfolio/securities.py +241 -0
- everysk/sdk/entities/portfolio/security.py +580 -0
- everysk/sdk/entities/portfolio/settings.py +97 -0
- everysk/sdk/entities/private_security/base.py +226 -0
- everysk/sdk/entities/private_security/settings.py +17 -0
- everysk/sdk/entities/query.py +603 -0
- everysk/sdk/entities/report/base.py +214 -0
- everysk/sdk/entities/report/settings.py +23 -0
- everysk/sdk/entities/script.py +310 -0
- everysk/sdk/entities/secrets/base.py +128 -0
- everysk/sdk/entities/secrets/script.py +119 -0
- everysk/sdk/entities/secrets/settings.py +17 -0
- everysk/sdk/entities/settings.py +48 -0
- everysk/sdk/entities/tags.py +174 -0
- everysk/sdk/entities/worker_execution/base.py +307 -0
- everysk/sdk/entities/worker_execution/settings.py +63 -0
- everysk/sdk/entities/workflow_execution/base.py +113 -0
- everysk/sdk/entities/workflow_execution/settings.py +32 -0
- everysk/sdk/entities/workspace/base.py +99 -0
- everysk/sdk/entities/workspace/settings.py +27 -0
- everysk/sdk/settings.py +67 -0
- everysk/sdk/tests.py +105 -0
- everysk/sdk/worker_base.py +47 -0
- everysk/server/__init__.py +9 -0
- everysk/server/applications.py +63 -0
- everysk/server/endpoints.py +516 -0
- everysk/server/example_api.py +69 -0
- everysk/server/middlewares.py +80 -0
- everysk/server/requests.py +62 -0
- everysk/server/responses.py +119 -0
- everysk/server/routing.py +64 -0
- everysk/server/settings.py +36 -0
- everysk/server/tests.py +36 -0
- everysk/settings.py +98 -0
- everysk/sql/__init__.py +9 -0
- everysk/sql/connection.py +232 -0
- everysk/sql/model.py +376 -0
- everysk/sql/query.py +417 -0
- everysk/sql/row_factory.py +63 -0
- everysk/sql/settings.py +49 -0
- everysk/sql/utils.py +129 -0
- everysk/tests.py +23 -0
- everysk/utils.py +81 -0
- everysk/version.py +15 -0
- everysk_lib-1.10.2.dist-info/.gitignore +5 -0
- everysk_lib-1.10.2.dist-info/METADATA +326 -0
- everysk_lib-1.10.2.dist-info/RECORD +137 -0
- everysk_lib-1.10.2.dist-info/WHEEL +5 -0
- everysk_lib-1.10.2.dist-info/licenses/LICENSE.txt +9 -0
- everysk_lib-1.10.2.dist-info/top_level.txt +2 -0
everysk/core/redis.py
ADDED
|
@@ -0,0 +1,1021 @@
|
|
|
1
|
+
###############################################################################
|
|
2
|
+
#
|
|
3
|
+
# (C) Copyright 2023 EVERYSK TECHNOLOGIES
|
|
4
|
+
#
|
|
5
|
+
# This is an unpublished work containing confidential and proprietary
|
|
6
|
+
# information of EVERYSK TECHNOLOGIES. Disclosure, use, or reproduction
|
|
7
|
+
# without authorization of EVERYSK TECHNOLOGIES is prohibited.
|
|
8
|
+
#
|
|
9
|
+
###############################################################################
|
|
10
|
+
import socket
|
|
11
|
+
import traceback
|
|
12
|
+
from collections.abc import Callable, Iterable
|
|
13
|
+
from functools import _HashedSeq, wraps
|
|
14
|
+
from hashlib import sha256
|
|
15
|
+
from time import sleep
|
|
16
|
+
from typing import Any
|
|
17
|
+
from uuid import uuid1
|
|
18
|
+
|
|
19
|
+
from redis import Redis, client, exceptions
|
|
20
|
+
from redis.backoff import ExponentialBackoff # pylint: disable=import-error, no-name-in-module
|
|
21
|
+
from redis.lock import Lock # pylint: disable=import-error, no-name-in-module
|
|
22
|
+
from redis.retry import Retry # pylint: disable=import-error, no-name-in-module
|
|
23
|
+
|
|
24
|
+
from everysk.config import settings
|
|
25
|
+
from everysk.core.compress import compress, decompress
|
|
26
|
+
from everysk.core.exceptions import RedisEmptyListError
|
|
27
|
+
from everysk.core.fields import BoolField, FloatField, StrField
|
|
28
|
+
from everysk.core.log import Logger
|
|
29
|
+
from everysk.core.object import BaseObject
|
|
30
|
+
from everysk.core.serialize import dumps, loads
|
|
31
|
+
|
|
32
|
+
log = Logger(name='everysk-redis')
|
|
33
|
+
DEFAULT_ERROR_LIST = [exceptions.ConnectionError, exceptions.TimeoutError, socket.timeout]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
###############################################################################
|
|
37
|
+
# Cache decorator functions Implementation
|
|
38
|
+
###############################################################################
|
|
39
|
+
def _make_key(args: tuple, kwargs: dict) -> str:
|
|
40
|
+
"""
|
|
41
|
+
Create a key from args and kwargs to be used on cache.
|
|
42
|
+
This function is a based on functools._make_key
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
args (tuple): The received args.
|
|
46
|
+
kwargs (dict): The received kwargs.
|
|
47
|
+
"""
|
|
48
|
+
# /usr/local/lib/python3.11/functools.py: 448 - def _make_key
|
|
49
|
+
key = args
|
|
50
|
+
if kwargs:
|
|
51
|
+
for item in kwargs.items():
|
|
52
|
+
key += item
|
|
53
|
+
elif len(key) == 1 and type(key[0]) in {int, float, str}:
|
|
54
|
+
# To be faster if we have only one item and it's a int, float or str
|
|
55
|
+
# we return it directly
|
|
56
|
+
return str(key[0])
|
|
57
|
+
|
|
58
|
+
return str(_HashedSeq(key))
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def cache(*, timeout: int) -> Callable:
|
|
62
|
+
"""
|
|
63
|
+
Decorator to cache the result of a function in Redis.
|
|
64
|
+
To disable the cache, set the timeout to None otherwise it must be > 0.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
timeout (int): The time in seconds that the result will be stored in cache.
|
|
68
|
+
|
|
69
|
+
Raises:
|
|
70
|
+
ValueError: If timeout is not an integer or is less than 0.
|
|
71
|
+
"""
|
|
72
|
+
if timeout is not None and (not isinstance(timeout, int) or timeout < 1):
|
|
73
|
+
raise ValueError('Timeout must be an integer greater than 0.')
|
|
74
|
+
|
|
75
|
+
# Because we have a parameter we need to create another nested function
|
|
76
|
+
def decorator(func: Callable) -> Callable:
|
|
77
|
+
info = {'hits': 0, 'misses': 0}
|
|
78
|
+
redis_cache = RedisCache(prefix=func.__name__)
|
|
79
|
+
|
|
80
|
+
# We use wraps to keep the original function name and docstring
|
|
81
|
+
@wraps(func)
|
|
82
|
+
def wrapper(*args, **kwargs) -> Any:
|
|
83
|
+
key = _make_key(args, kwargs)
|
|
84
|
+
value = redis_cache.get(key)
|
|
85
|
+
if value:
|
|
86
|
+
info['hits'] += 1
|
|
87
|
+
return loads(value, protocol='pickle')
|
|
88
|
+
|
|
89
|
+
info['misses'] += 1
|
|
90
|
+
value = func(*args, **kwargs)
|
|
91
|
+
redis_cache.set(key, dumps(value, protocol='pickle'), timeout)
|
|
92
|
+
return value
|
|
93
|
+
|
|
94
|
+
# We add some extra attributes to the wrapper
|
|
95
|
+
wrapper.info = info
|
|
96
|
+
wrapper.clear = redis_cache.delete_prefix
|
|
97
|
+
|
|
98
|
+
return wrapper
|
|
99
|
+
|
|
100
|
+
return decorator
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
###############################################################################
|
|
104
|
+
# RedisClient Class Implementation
|
|
105
|
+
###############################################################################
|
|
106
|
+
class RetryLog(Retry):
|
|
107
|
+
def call_with_retry(self, do: Callable, fail: Callable) -> Any:
|
|
108
|
+
"""
|
|
109
|
+
Execute an operation that might fail and returns its result, or
|
|
110
|
+
raise the exception that was thrown depending on the `Backoff` object.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
do: the operation to call. Expects no argument.
|
|
114
|
+
fail: the failure handler, expects the last error that was thrown.
|
|
115
|
+
"""
|
|
116
|
+
# For redis-py 6.4.0 the fail function discards the error and only calls self.close_connection
|
|
117
|
+
# So we copied the original function to insert the log.
|
|
118
|
+
self._backoff.reset()
|
|
119
|
+
failures = 0
|
|
120
|
+
while True:
|
|
121
|
+
try:
|
|
122
|
+
return do()
|
|
123
|
+
except self._supported_errors as error:
|
|
124
|
+
failures += 1
|
|
125
|
+
fail(error)
|
|
126
|
+
if self._retries >= 0 and failures > self._retries:
|
|
127
|
+
raise
|
|
128
|
+
|
|
129
|
+
log.error('Redis connection error: %s - %s/%s', error, failures, self._retries)
|
|
130
|
+
backoff = self._backoff.compute(failures)
|
|
131
|
+
if backoff > 0:
|
|
132
|
+
sleep(backoff)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class RedisClient(BaseObject):
|
|
136
|
+
## Private attributes
|
|
137
|
+
_connection: dict = {} # noqa: RUF012
|
|
138
|
+
_separator = ':'
|
|
139
|
+
|
|
140
|
+
## Public attributes
|
|
141
|
+
timeout_default: int = None
|
|
142
|
+
prefix: str = None
|
|
143
|
+
host: str = None
|
|
144
|
+
port: int = None
|
|
145
|
+
|
|
146
|
+
## Private methods
|
|
147
|
+
def _connect(self) -> None:
|
|
148
|
+
"""Create a Redis connection and stores to later use."""
|
|
149
|
+
# https://redis-py.readthedocs.io/en/stable/retry.html
|
|
150
|
+
if not settings.REDIS_SHOW_LOGS:
|
|
151
|
+
retry_class = Retry
|
|
152
|
+
else:
|
|
153
|
+
retry_class = RetryLog
|
|
154
|
+
backoff = ExponentialBackoff(base=settings.REDIS_RETRY_BACKOFF_MIN, cap=settings.REDIS_RETRY_BACKOFF_MAX)
|
|
155
|
+
retry = retry_class(backoff=backoff, retries=settings.REDIS_RETRY_ATTEMPTS)
|
|
156
|
+
|
|
157
|
+
# https://github.com/redis/redis-py/issues/722
|
|
158
|
+
# We use RedisClient._connection to create a Singleton connection
|
|
159
|
+
log.debug('Connecting on Redis(%s:%s).....', self.host, self.port)
|
|
160
|
+
|
|
161
|
+
error_list = settings.REDIS_RETRY_EXTRA_ERROR_LIST or []
|
|
162
|
+
RedisClient._connection[self._connection_key()] = Redis(
|
|
163
|
+
host=self.host,
|
|
164
|
+
port=self.port,
|
|
165
|
+
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL, # seconds
|
|
166
|
+
socket_keepalive=settings.REDIS_SOCKET_KEEPALIVE,
|
|
167
|
+
socket_timeout=settings.REDIS_SOCKET_TIMEOUT, # seconds
|
|
168
|
+
retry=retry,
|
|
169
|
+
retry_on_error=error_list + DEFAULT_ERROR_LIST,
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
def __init__(self, **kwargs) -> None:
|
|
173
|
+
super().__init__(**kwargs)
|
|
174
|
+
if self.host is None:
|
|
175
|
+
self.host = settings.REDIS_HOST
|
|
176
|
+
|
|
177
|
+
if self.port is None:
|
|
178
|
+
self.port = settings.REDIS_PORT
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def connection(self) -> Redis:
|
|
182
|
+
"""
|
|
183
|
+
We use this property to check if Redis is online
|
|
184
|
+
then returning the working connection.
|
|
185
|
+
"""
|
|
186
|
+
try:
|
|
187
|
+
RedisClient._connection[self._connection_key()].ping()
|
|
188
|
+
except Exception: # noqa: BLE001
|
|
189
|
+
# Create a new connection
|
|
190
|
+
self._connect()
|
|
191
|
+
|
|
192
|
+
return RedisClient._connection[self._connection_key()]
|
|
193
|
+
|
|
194
|
+
def _connection_key(self) -> str:
|
|
195
|
+
return f'{self.host}:{self.port}'
|
|
196
|
+
|
|
197
|
+
def _build_prefix(self, prefix: str | None = None) -> str:
|
|
198
|
+
"""
|
|
199
|
+
Build the prefix for the Redis key using the namespace and prefix.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
prefix (str | None): The prefix to use. If None, use the class prefix.
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
str: The constructed prefix.
|
|
206
|
+
"""
|
|
207
|
+
if prefix is None:
|
|
208
|
+
prefix = self.prefix
|
|
209
|
+
|
|
210
|
+
parts = []
|
|
211
|
+
if settings.REDIS_NAMESPACE:
|
|
212
|
+
parts.append(settings.REDIS_NAMESPACE)
|
|
213
|
+
if prefix:
|
|
214
|
+
parts.append(prefix)
|
|
215
|
+
|
|
216
|
+
return self._separator.join(parts)
|
|
217
|
+
|
|
218
|
+
def _build_key(self, key: str | None = None, parts: list | None = None) -> str:
|
|
219
|
+
"""
|
|
220
|
+
Build the key for the Redis key using the prefix, namespace and key.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
key (str | None): The key to use. If None, use the class key.
|
|
224
|
+
parts (list | None): The list of parts to use.
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
str: The constructed key.
|
|
228
|
+
"""
|
|
229
|
+
parts = parts or []
|
|
230
|
+
prefix = self._build_prefix()
|
|
231
|
+
if prefix:
|
|
232
|
+
parts.append(prefix)
|
|
233
|
+
if key:
|
|
234
|
+
parts.append(key)
|
|
235
|
+
|
|
236
|
+
return self._separator.join(parts)
|
|
237
|
+
|
|
238
|
+
def _encode(self, value: Any) -> bytes:
|
|
239
|
+
"""
|
|
240
|
+
Encodes a value before storing it in Redis.
|
|
241
|
+
|
|
242
|
+
This method can be overridden by subclasses when specific serialization
|
|
243
|
+
or transformation logic is needed before saving the value to Redis. By centralizing
|
|
244
|
+
this behavior here, other methods that interact with Redis do not need to be modified.
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
value (Any): The value to be stored.
|
|
248
|
+
|
|
249
|
+
Returns:
|
|
250
|
+
bytes: The serialized or transformed value ready for storage.
|
|
251
|
+
"""
|
|
252
|
+
return value
|
|
253
|
+
|
|
254
|
+
def _decode(self, value: bytes) -> Any:
|
|
255
|
+
"""
|
|
256
|
+
Decodes a value retrieved from Redis.
|
|
257
|
+
|
|
258
|
+
This method can be overridden by subclasses when specific deserialization
|
|
259
|
+
or transformation is required after retrieving the value from Redis. This provides
|
|
260
|
+
a single point of customization without needing to override higher-level logic.
|
|
261
|
+
|
|
262
|
+
Args:
|
|
263
|
+
value (bytes): The raw value retrieved from Redis.
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
Any: The original or transformed value.
|
|
267
|
+
"""
|
|
268
|
+
return value
|
|
269
|
+
|
|
270
|
+
def get_hash_key(self, key: bytes | str) -> str:
|
|
271
|
+
"""
|
|
272
|
+
Convert the key to a SHA256 hash to avoid strange chars on name that can broke Redis.
|
|
273
|
+
This method adds the prefix and namespaces to the key.
|
|
274
|
+
|
|
275
|
+
Args:
|
|
276
|
+
key (bytes | str): The original key to hash.
|
|
277
|
+
|
|
278
|
+
Returns:
|
|
279
|
+
str: The fully constructed and hashed Redis key.
|
|
280
|
+
"""
|
|
281
|
+
if key is None:
|
|
282
|
+
raise ValueError('Key cannot be None.')
|
|
283
|
+
|
|
284
|
+
if isinstance(key, str):
|
|
285
|
+
key = key.encode('utf-8')
|
|
286
|
+
|
|
287
|
+
hashed_key = sha256(key).hexdigest()
|
|
288
|
+
|
|
289
|
+
return self._build_key(hashed_key)
|
|
290
|
+
|
|
291
|
+
def flush_all(self) -> bool:
|
|
292
|
+
"""Clear all keys from Redis."""
|
|
293
|
+
if self.connection.flushall():
|
|
294
|
+
if settings.REDIS_SHOW_LOGS:
|
|
295
|
+
log.info('Redis flushed all keys.')
|
|
296
|
+
return True
|
|
297
|
+
log.error('Redis flush all keys failed.')
|
|
298
|
+
return False
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
###############################################################################
|
|
302
|
+
# RedisCache Class Implementation
|
|
303
|
+
###############################################################################
|
|
304
|
+
class RedisCache(RedisClient):
|
|
305
|
+
"""Redis cache client"""
|
|
306
|
+
|
|
307
|
+
def get_set(self, key: str, func: Callable, timeout: int | None = None, **kwargs) -> Any:
|
|
308
|
+
"""
|
|
309
|
+
Method execute a get on Redis server, if this returns None then we execute the set method.
|
|
310
|
+
For the set method we use the func(**kwargs).
|
|
311
|
+
To avoid race conditions we use a RedisLock to run only one set method.
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
key (str): The key that will be used to cache the result from func.
|
|
315
|
+
func (Callable): The function that generates the desired cached result.
|
|
316
|
+
timeout (int, optional): The timeout that this key will be keep on cache. Defaults to None.
|
|
317
|
+
**kwargs (dict, optional): Extra params that will be send to the func.
|
|
318
|
+
|
|
319
|
+
Returns:
|
|
320
|
+
Any: The result from cache get or the result from func.
|
|
321
|
+
"""
|
|
322
|
+
result = self.get(key)
|
|
323
|
+
if result is None:
|
|
324
|
+
# We create a lock on Redis
|
|
325
|
+
lock = RedisLock(name=f'redis-get-set-lock-{key}')
|
|
326
|
+
if lock.acquire(blocking=False):
|
|
327
|
+
# If we can acquire the lock then we don't have race conditions
|
|
328
|
+
# and proceed normally
|
|
329
|
+
try:
|
|
330
|
+
result = func(**kwargs)
|
|
331
|
+
self.set(key=key, value=result, timeout=timeout)
|
|
332
|
+
except Exception: # noqa: BLE001
|
|
333
|
+
# We generate a log for this exception
|
|
334
|
+
log.error('Redis get_set method: %s', traceback.format_exc())
|
|
335
|
+
|
|
336
|
+
# Then we release the the lock
|
|
337
|
+
lock.release()
|
|
338
|
+
|
|
339
|
+
else:
|
|
340
|
+
# If we can't acquire the lock that means we have race condition
|
|
341
|
+
# in this case we need to wait for the key be set or if some error
|
|
342
|
+
# occur the lock will be released.
|
|
343
|
+
result = self.get(key)
|
|
344
|
+
while result is None:
|
|
345
|
+
result = self.get(key)
|
|
346
|
+
if result is None and lock.acquire(blocking=False):
|
|
347
|
+
# Then we release the the lock and exit
|
|
348
|
+
lock.release()
|
|
349
|
+
break
|
|
350
|
+
|
|
351
|
+
# We wait 0.5 second until next try
|
|
352
|
+
sleep(0.5)
|
|
353
|
+
|
|
354
|
+
if isinstance(result, bytes):
|
|
355
|
+
result = result.decode('utf-8')
|
|
356
|
+
|
|
357
|
+
return result
|
|
358
|
+
|
|
359
|
+
def get(self, key: bytes) -> Any:
|
|
360
|
+
"""
|
|
361
|
+
Get the value from Redis using the key.
|
|
362
|
+
|
|
363
|
+
Args:
|
|
364
|
+
key (bytes): The key to retrieve the value.
|
|
365
|
+
|
|
366
|
+
Returns:
|
|
367
|
+
bytes: The value associated with the key.
|
|
368
|
+
"""
|
|
369
|
+
key = self.get_hash_key(key)
|
|
370
|
+
value = self.connection.get(key)
|
|
371
|
+
return self._decode(value)
|
|
372
|
+
|
|
373
|
+
def get_multi(self, keys: list[str]) -> dict:
|
|
374
|
+
"""
|
|
375
|
+
Get multiple values from Redis using a list of keys.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
keys (list[str]): The list of keys to retrieve values for.
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
dict: A dictionary containing the keys and their corresponding values.
|
|
382
|
+
"""
|
|
383
|
+
keys_ = [self.get_hash_key(key) for key in keys]
|
|
384
|
+
data_list = self.connection.mget(keys_)
|
|
385
|
+
return {key: self._decode(value) for key, value in zip(keys, data_list, strict=False)}
|
|
386
|
+
|
|
387
|
+
def set(self, key: bytes, value: Any, timeout: int | None = None) -> bool:
|
|
388
|
+
"""
|
|
389
|
+
Set key/value on connection for timeout in seconds,
|
|
390
|
+
if timeout is None the key/value will be keep forever.
|
|
391
|
+
Value must be one of these: bytes, str, int or float.
|
|
392
|
+
|
|
393
|
+
Args:
|
|
394
|
+
key (bytes): The key to set.
|
|
395
|
+
value (Any): The value to set.
|
|
396
|
+
timeout (int, optional): The timeout in seconds. Defaults to None.
|
|
397
|
+
|
|
398
|
+
Raises:
|
|
399
|
+
ValueError: If the value is not one of the allowed types.
|
|
400
|
+
|
|
401
|
+
Returns:
|
|
402
|
+
bool: True if the operation was successful, False otherwise.
|
|
403
|
+
"""
|
|
404
|
+
key = self.get_hash_key(key)
|
|
405
|
+
value = self._encode(value)
|
|
406
|
+
ret = self.connection.set(name=key, value=value, ex=timeout or self.timeout_default)
|
|
407
|
+
return bool(ret)
|
|
408
|
+
|
|
409
|
+
def set_multi(self, data_dict: dict, timeout: int | None = None) -> None:
|
|
410
|
+
"""
|
|
411
|
+
Set multiple key/value pairs in Redis.
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
data_dict (dict): A dictionary containing the key/value pairs to set.
|
|
415
|
+
timeout (int | None, optional): The timeout in seconds. Defaults to None.
|
|
416
|
+
"""
|
|
417
|
+
timeout = timeout or self.timeout_default
|
|
418
|
+
|
|
419
|
+
pipe = self.connection.pipeline()
|
|
420
|
+
for key, data in data_dict.items():
|
|
421
|
+
pipe.set(self.get_hash_key(key), self._encode(data), ex=timeout)
|
|
422
|
+
ret = pipe.execute()
|
|
423
|
+
|
|
424
|
+
if False in ret:
|
|
425
|
+
log.error(
|
|
426
|
+
'Error RedisCache set_multi',
|
|
427
|
+
extra={'labels': {'REDIS_NAMESPACE': settings.REDIS_NAMESPACE, 'data': data_dict, 'time': timeout}},
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
def delete(self, key: bytes | str | Iterable) -> bool:
|
|
431
|
+
"""
|
|
432
|
+
Delete one or more keys from Redis.
|
|
433
|
+
|
|
434
|
+
Args:
|
|
435
|
+
key (bytes | str | Iterable): The key or keys to be deleted.
|
|
436
|
+
|
|
437
|
+
Returns:
|
|
438
|
+
bool: True if the operation was successful, False otherwise.
|
|
439
|
+
"""
|
|
440
|
+
if not isinstance(key, (list, set, tuple)):
|
|
441
|
+
keys = [self.get_hash_key(key)]
|
|
442
|
+
else:
|
|
443
|
+
keys = [self.get_hash_key(k) for k in key]
|
|
444
|
+
|
|
445
|
+
if not keys:
|
|
446
|
+
return False
|
|
447
|
+
|
|
448
|
+
ret = self.connection.delete(*keys) == len(keys)
|
|
449
|
+
return bool(ret)
|
|
450
|
+
|
|
451
|
+
def delete_multi(self, keys: Iterable) -> bool:
|
|
452
|
+
"""
|
|
453
|
+
Delete multiple keys from Redis.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
keys (Iterable): The keys to be deleted.
|
|
457
|
+
|
|
458
|
+
Returns:
|
|
459
|
+
bool: True if the operation was successful, False otherwise.
|
|
460
|
+
"""
|
|
461
|
+
return self.delete(keys)
|
|
462
|
+
|
|
463
|
+
def delete_prefix(self, prefix: str | None = None) -> None:
|
|
464
|
+
"""
|
|
465
|
+
Delete all keys with the given prefix.
|
|
466
|
+
If prefix is None, we use the prefix from the class.
|
|
467
|
+
To search for all keys with this prefix we need to add a ':*' at the end.
|
|
468
|
+
We use the scan method to search for all keys with this prefix and pipe to delete them.
|
|
469
|
+
|
|
470
|
+
Args:
|
|
471
|
+
prefix (str | None): The prefix to use. If None, use the class prefix.
|
|
472
|
+
"""
|
|
473
|
+
# If prefix is None, we use the prefix from the class
|
|
474
|
+
# To search for all keys with this prefix we need to add a ':*' at the end
|
|
475
|
+
prefix = self._build_prefix(prefix)
|
|
476
|
+
if '*' not in prefix:
|
|
477
|
+
prefix = f'{prefix}{self._separator}*'
|
|
478
|
+
|
|
479
|
+
# We use the scan method to search for all keys with this prefix and pipe to delete them
|
|
480
|
+
pipe = self.connection.pipeline()
|
|
481
|
+
cursor = None
|
|
482
|
+
while cursor != 0:
|
|
483
|
+
cursor, keys = self.connection.scan(cursor=cursor or 0, match=prefix, count=1000)
|
|
484
|
+
if keys:
|
|
485
|
+
pipe.delete(*keys)
|
|
486
|
+
|
|
487
|
+
pipe.execute()
|
|
488
|
+
|
|
489
|
+
def incr(self, key: str, delta: int = 1, initial_value: Any = None, timeout: int | None = None) -> int:
|
|
490
|
+
"""
|
|
491
|
+
Increment the value of a key in Redis. If the key does not exist, it will be created with the initial value.
|
|
492
|
+
|
|
493
|
+
Args:
|
|
494
|
+
key (str): The key to increment.
|
|
495
|
+
delta (int, optional): The amount to increment. Defaults to 1.
|
|
496
|
+
initial_value (Any, optional): The initial value to set if the key does not exist. Defaults to None.
|
|
497
|
+
timeout (int | None, optional): The expiration time for the key in seconds. Defaults to None.
|
|
498
|
+
|
|
499
|
+
Returns:
|
|
500
|
+
int: The new value of the key after incrementing.
|
|
501
|
+
"""
|
|
502
|
+
if initial_value is None:
|
|
503
|
+
raise ValueError('Initial value must be set.')
|
|
504
|
+
|
|
505
|
+
key_ = self.get_hash_key(key)
|
|
506
|
+
|
|
507
|
+
pipe = self.connection.pipeline()
|
|
508
|
+
pipe.multi()
|
|
509
|
+
pipe.set(key_, initial_value, nx=True, ex=timeout or self.timeout_default)
|
|
510
|
+
pipe.incr(key_, delta)
|
|
511
|
+
ret = pipe.execute()
|
|
512
|
+
|
|
513
|
+
if ret[1] is None:
|
|
514
|
+
log.error(
|
|
515
|
+
'Error RedisCache incr',
|
|
516
|
+
extra={
|
|
517
|
+
'labels': {
|
|
518
|
+
'REDIS_NAMESPACE': settings.REDIS_NAMESPACE,
|
|
519
|
+
'key': key,
|
|
520
|
+
'delta': delta,
|
|
521
|
+
'initial_value': initial_value,
|
|
522
|
+
}
|
|
523
|
+
},
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
return ret[1]
|
|
527
|
+
|
|
528
|
+
def decr(self, key: str, delta: int = 1, initial_value: Any = None, timeout: int | None = None) -> int | None:
|
|
529
|
+
"""
|
|
530
|
+
Decrement the value of a key in Redis. If the key does not exist, it will be created with the initial value.
|
|
531
|
+
|
|
532
|
+
Args:
|
|
533
|
+
key (str): The key to decrement.
|
|
534
|
+
delta (int, optional): The amount to decrement. Defaults to 1.
|
|
535
|
+
initial_value (Any, optional): The initial value to set if the key does not exist. Defaults to None.
|
|
536
|
+
timeout (int | None, optional): The expiration time for the key in seconds. Defaults to None.
|
|
537
|
+
|
|
538
|
+
Returns:
|
|
539
|
+
int | None: The new value of the key after decrementing, or None if the operation failed.
|
|
540
|
+
"""
|
|
541
|
+
if initial_value is None:
|
|
542
|
+
raise ValueError('Initial value must be set.')
|
|
543
|
+
|
|
544
|
+
key_ = self.get_hash_key(key)
|
|
545
|
+
with self.connection.pipeline(transaction=True) as pipe:
|
|
546
|
+
while True:
|
|
547
|
+
try:
|
|
548
|
+
pipe.watch(key_)
|
|
549
|
+
current = self._decode(pipe.get(key_))
|
|
550
|
+
|
|
551
|
+
if isinstance(current, bytes):
|
|
552
|
+
current = int(current.decode())
|
|
553
|
+
|
|
554
|
+
if current is None:
|
|
555
|
+
current = 0
|
|
556
|
+
|
|
557
|
+
pipe.multi()
|
|
558
|
+
if current >= delta:
|
|
559
|
+
pipe.decr(key_, delta)
|
|
560
|
+
else:
|
|
561
|
+
value = initial_value - delta if current == 0 else current - delta
|
|
562
|
+
value = max(0, value)
|
|
563
|
+
pipe.set(key_, value, ex=timeout or self.timeout_default)
|
|
564
|
+
exec_value = pipe.execute()
|
|
565
|
+
break
|
|
566
|
+
except exceptions.WatchError:
|
|
567
|
+
continue
|
|
568
|
+
|
|
569
|
+
return value if exec_value[0] is True else exec_value[0]
|
|
570
|
+
|
|
571
|
+
def blpop(self, keys: list, timeout: int = 0) -> tuple:
|
|
572
|
+
"""
|
|
573
|
+
Pop the first item from the list, blocking until a item exists
|
|
574
|
+
or timeout was reached.
|
|
575
|
+
If timeout is 0, then block indefinitely.
|
|
576
|
+
|
|
577
|
+
Args:
|
|
578
|
+
keys (list): The list of keys to pop from.
|
|
579
|
+
timeout (int, optional): The timeout in seconds. Defaults to 0.
|
|
580
|
+
|
|
581
|
+
Returns:
|
|
582
|
+
tuple: (name, value)
|
|
583
|
+
"""
|
|
584
|
+
if not isinstance(keys, list):
|
|
585
|
+
keys = [keys]
|
|
586
|
+
keys_mapping = {self.get_hash_key(key): key for key in keys}
|
|
587
|
+
values = self.connection.blpop(list(keys_mapping.keys()), timeout=timeout)
|
|
588
|
+
if values is None:
|
|
589
|
+
return None
|
|
590
|
+
|
|
591
|
+
return (keys_mapping[values[0].decode()], self._decode(values[1]))
|
|
592
|
+
|
|
593
|
+
def lpop(self, name: str, count: int | None = None) -> Any:
|
|
594
|
+
"""
|
|
595
|
+
Pop the first item from the list.
|
|
596
|
+
|
|
597
|
+
Args:
|
|
598
|
+
name (str): The name of the list.
|
|
599
|
+
count (int | None, optional): The number of items to pop. Defaults to None.
|
|
600
|
+
|
|
601
|
+
Returns:
|
|
602
|
+
Any: The popped value.
|
|
603
|
+
"""
|
|
604
|
+
value = self.connection.lpop(self.get_hash_key(name), count=count)
|
|
605
|
+
return self._decode(value)
|
|
606
|
+
|
|
607
|
+
def rpush(self, name: str, *values, timeout: int | None = None) -> bool:
|
|
608
|
+
"""
|
|
609
|
+
Push one or more values to the end of the list.
|
|
610
|
+
If the list does not exist, it will be created.
|
|
611
|
+
If the timeout is None, the list will be kept forever.
|
|
612
|
+
If the timeout is > 0, the list will be kept for the specified time.
|
|
613
|
+
|
|
614
|
+
Args:
|
|
615
|
+
name (str): The name of the list.
|
|
616
|
+
*values: The values to push to the list.
|
|
617
|
+
timeout (int | None, optional): The expiration time for the list in seconds. Defaults to None.
|
|
618
|
+
|
|
619
|
+
Returns:
|
|
620
|
+
bool: True if the operation was successful, False otherwise.
|
|
621
|
+
"""
|
|
622
|
+
key = self.get_hash_key(name)
|
|
623
|
+
timeout = timeout or self.timeout_default
|
|
624
|
+
|
|
625
|
+
datas = [self._encode(value) for value in values]
|
|
626
|
+
with self.connection.pipeline() as pipe:
|
|
627
|
+
pipe.rpush(key, *datas)
|
|
628
|
+
if timeout:
|
|
629
|
+
pipe.expire(key, timeout)
|
|
630
|
+
ret = pipe.execute()
|
|
631
|
+
|
|
632
|
+
return bool(ret)
|
|
633
|
+
|
|
634
|
+
def lrange(self, name: str, start: int, end: int) -> list:
|
|
635
|
+
"""
|
|
636
|
+
Get a range of values from the list.
|
|
637
|
+
|
|
638
|
+
Args:
|
|
639
|
+
name (str): The name of the list.
|
|
640
|
+
start (int): The starting index.
|
|
641
|
+
end (int): The ending index.
|
|
642
|
+
|
|
643
|
+
Returns:
|
|
644
|
+
list: The list of values in the specified range.
|
|
645
|
+
"""
|
|
646
|
+
key = self.get_hash_key(name)
|
|
647
|
+
ret = self.connection.lrange(key, start, end)
|
|
648
|
+
return [self._decode(data) for data in ret]
|
|
649
|
+
|
|
650
|
+
def hset(self, name: str, field: str, value: Any, timeout: int | None = None) -> bool:
|
|
651
|
+
"""
|
|
652
|
+
Set a field in a hash stored at key.
|
|
653
|
+
If the key does not exist, a new key will be created.
|
|
654
|
+
If the field already exists, it will be overwritten.
|
|
655
|
+
If the timeout is None, the hash will be kept forever.
|
|
656
|
+
If the timeout is > 0, the hash will be kept for the specified time.
|
|
657
|
+
|
|
658
|
+
Args:
|
|
659
|
+
name (str): The name of the hash.
|
|
660
|
+
field (str): The field to set.
|
|
661
|
+
value (Any): The value to set.
|
|
662
|
+
timeout (int | None, optional): The expiration time for the hash in seconds. Defaults to None.
|
|
663
|
+
|
|
664
|
+
Returns:
|
|
665
|
+
bool: True if the operation was successful, False otherwise.
|
|
666
|
+
"""
|
|
667
|
+
key = self.get_hash_key(name)
|
|
668
|
+
timeout = timeout or self.timeout_default
|
|
669
|
+
|
|
670
|
+
value = self._encode(value)
|
|
671
|
+
with self.connection.pipeline() as pipe:
|
|
672
|
+
pipe.hset(key, field, value)
|
|
673
|
+
if timeout:
|
|
674
|
+
pipe.expire(key, timeout or self.timeout_default)
|
|
675
|
+
ret = pipe.execute()
|
|
676
|
+
|
|
677
|
+
return bool(ret)
|
|
678
|
+
|
|
679
|
+
def hgetall(self, name: str) -> dict:
|
|
680
|
+
"""
|
|
681
|
+
Get all fields and values in a hash stored at key.
|
|
682
|
+
If the key does not exist, an empty dictionary will be returned.
|
|
683
|
+
If the key is not a hash, an error will be raised.
|
|
684
|
+
|
|
685
|
+
Args:
|
|
686
|
+
name (str): The name of the hash.
|
|
687
|
+
|
|
688
|
+
Returns:
|
|
689
|
+
dict: A dictionary containing all fields and values in the hash.
|
|
690
|
+
"""
|
|
691
|
+
key = self.get_hash_key(name)
|
|
692
|
+
ret = self.connection.hgetall(key)
|
|
693
|
+
|
|
694
|
+
out = {}
|
|
695
|
+
for field, data in ret.items():
|
|
696
|
+
out[field] = self._decode(data)
|
|
697
|
+
|
|
698
|
+
return out
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+
###############################################################################
|
|
702
|
+
# RedisCacheCompressed Class Implementation
|
|
703
|
+
###############################################################################
|
|
704
|
+
class RedisCacheCompressed(RedisCache):
|
|
705
|
+
"""
|
|
706
|
+
Store data on Redis server using pickle and zlib
|
|
707
|
+
Use this if you need to store objects ons Redis.
|
|
708
|
+
"""
|
|
709
|
+
|
|
710
|
+
def _encode(self, value: Any) -> bytes:
|
|
711
|
+
"""
|
|
712
|
+
Encode the value to bytes using pickle and zlib.
|
|
713
|
+
|
|
714
|
+
Args:
|
|
715
|
+
value (Any): The value to encode.
|
|
716
|
+
|
|
717
|
+
Returns:
|
|
718
|
+
bytes: The encoded value.
|
|
719
|
+
"""
|
|
720
|
+
# https://everysk.atlassian.net/browse/COD-11777
|
|
721
|
+
return compress(value, serialize='pickle') if value is not None else None
|
|
722
|
+
|
|
723
|
+
def _decode(self, value: bytes) -> Any:
|
|
724
|
+
"""
|
|
725
|
+
Decode the value from bytes using pickle and zlib.
|
|
726
|
+
|
|
727
|
+
Args:
|
|
728
|
+
value (bytes): The value to decode.
|
|
729
|
+
|
|
730
|
+
Returns:
|
|
731
|
+
Any: The decoded value.
|
|
732
|
+
"""
|
|
733
|
+
# https://everysk.atlassian.net/browse/COD-11777
|
|
734
|
+
return decompress(value, serialize='pickle') if value is not None else None
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+
###############################################################################
|
|
738
|
+
# RedisList Class Implementation
|
|
739
|
+
###############################################################################
|
|
740
|
+
class RedisList(RedisCacheCompressed):
|
|
741
|
+
"""
|
|
742
|
+
First in, first out Redis list implementation.
|
|
743
|
+
-> https://redis.io/docs/data-types/lists/
|
|
744
|
+
-> https://koalatea.io/python-redis-lists/
|
|
745
|
+
|
|
746
|
+
"""
|
|
747
|
+
|
|
748
|
+
name = StrField(required=True)
|
|
749
|
+
|
|
750
|
+
def bpop(self, timeout: int = 0) -> tuple:
|
|
751
|
+
"""
|
|
752
|
+
Pop the first item from the list, blocking until a item exists
|
|
753
|
+
or timeout was reached.
|
|
754
|
+
If timeout is 0, then block indefinitely.
|
|
755
|
+
|
|
756
|
+
Returns:
|
|
757
|
+
tuple: (list name, value)
|
|
758
|
+
"""
|
|
759
|
+
value = super().blpop(self.name, timeout=timeout)
|
|
760
|
+
if value is None:
|
|
761
|
+
msg = f"The RedisList(name='{self.name}') is empty."
|
|
762
|
+
raise RedisEmptyListError(msg)
|
|
763
|
+
return value
|
|
764
|
+
|
|
765
|
+
def pop(self) -> Any:
|
|
766
|
+
"""
|
|
767
|
+
Pop the first item from the list.
|
|
768
|
+
|
|
769
|
+
Raises:
|
|
770
|
+
RedisEmptyListError: If the return is None/empty.
|
|
771
|
+
"""
|
|
772
|
+
value = super().lpop(self.name)
|
|
773
|
+
if value is None:
|
|
774
|
+
msg = f"The RedisList(name='{self.name}') is empty."
|
|
775
|
+
raise RedisEmptyListError(msg)
|
|
776
|
+
|
|
777
|
+
return value
|
|
778
|
+
|
|
779
|
+
def push(self, value: Any) -> None:
|
|
780
|
+
"""
|
|
781
|
+
Puts value on the last position of the list.
|
|
782
|
+
|
|
783
|
+
Args:
|
|
784
|
+
value (Any): the value to be inserted into the last position
|
|
785
|
+
"""
|
|
786
|
+
super().rpush(self.name, *[value])
|
|
787
|
+
|
|
788
|
+
def clear(self) -> None:
|
|
789
|
+
"""Clear all keys."""
|
|
790
|
+
super().delete(self.name)
|
|
791
|
+
|
|
792
|
+
|
|
793
|
+
###############################################################################
|
|
794
|
+
# RedisChannel Class Implementation
|
|
795
|
+
###############################################################################
|
|
796
|
+
class RedisChannel(RedisClient):
|
|
797
|
+
"""
|
|
798
|
+
Base class to work with channels on Redis.
|
|
799
|
+
https://blog.devgenius.io/how-to-use-redis-pub-sub-in-your-python-application-b6d5e11fc8de
|
|
800
|
+
"""
|
|
801
|
+
|
|
802
|
+
_channel: client.PubSub = None
|
|
803
|
+
exit_message = StrField(default='exit', readonly=True)
|
|
804
|
+
name = StrField(required=True)
|
|
805
|
+
|
|
806
|
+
def send(self, message: dict) -> None:
|
|
807
|
+
self.connection.publish(self.name, message)
|
|
808
|
+
|
|
809
|
+
@property
|
|
810
|
+
def channel(self) -> client.PubSub:
|
|
811
|
+
"""Create a connection with name"""
|
|
812
|
+
if self._channel is None:
|
|
813
|
+
self._channel = self.connection.pubsub()
|
|
814
|
+
self._channel.subscribe(self.name)
|
|
815
|
+
|
|
816
|
+
return self._channel
|
|
817
|
+
|
|
818
|
+
def parse_message(self, message: dict) -> tuple:
|
|
819
|
+
"""
|
|
820
|
+
Convert message data from bytes to str
|
|
821
|
+
|
|
822
|
+
Args:
|
|
823
|
+
message (dict): The message dictionary to be parsed.
|
|
824
|
+
|
|
825
|
+
Returns:
|
|
826
|
+
tuple: Containing the channel name and its corresponding data.
|
|
827
|
+
"""
|
|
828
|
+
# message format
|
|
829
|
+
# {'type': None, 'pattern': None, 'channel': None, 'data': None} # noqa: ERA001
|
|
830
|
+
channel_name = None
|
|
831
|
+
data = None
|
|
832
|
+
if message:
|
|
833
|
+
channel_name = message.get('channel') or None
|
|
834
|
+
data = message.get('data', '') or ''
|
|
835
|
+
if isinstance(channel_name, bytes):
|
|
836
|
+
channel_name = channel_name.decode()
|
|
837
|
+
if isinstance(data, bytes):
|
|
838
|
+
data = data.decode()
|
|
839
|
+
|
|
840
|
+
return (channel_name, data)
|
|
841
|
+
|
|
842
|
+
def consume(self, callback: Callable | None = None) -> None:
|
|
843
|
+
"""Loop for consume message from channel when they arrive."""
|
|
844
|
+
for message in self.channel.listen():
|
|
845
|
+
channel_name, data = self.parse_message(message)
|
|
846
|
+
# Only care if the message is sent to this channel
|
|
847
|
+
if channel_name == self.name:
|
|
848
|
+
# Stop iteration on exit_message
|
|
849
|
+
if data == self.exit_message:
|
|
850
|
+
break
|
|
851
|
+
|
|
852
|
+
# We can use a function for callback or self.process_message
|
|
853
|
+
if callback:
|
|
854
|
+
callback(data)
|
|
855
|
+
else:
|
|
856
|
+
self.process_message(data)
|
|
857
|
+
|
|
858
|
+
def process_message(self, message: str) -> None:
|
|
859
|
+
"""Use it on child classes to manipulate the received message."""
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
###############################################################################
|
|
863
|
+
# RedisLock Class Implementation
|
|
864
|
+
###############################################################################
|
|
865
|
+
class RedisLock(RedisClient):
|
|
866
|
+
"""
|
|
867
|
+
Class used to create a lock on Redis
|
|
868
|
+
https://rohansaraf.medium.com/distributed-locking-with-redis-ecb0773e7695
|
|
869
|
+
https://redis-py.readthedocs.io/en/latest/lock.html
|
|
870
|
+
"""
|
|
871
|
+
|
|
872
|
+
## Public attributes
|
|
873
|
+
prefix = StrField(default='redis-lock', readonly=True)
|
|
874
|
+
token = StrField()
|
|
875
|
+
name = StrField(required=True)
|
|
876
|
+
timeout = FloatField(default=None) # timeout indicates a maximum life for the lock in seconds.
|
|
877
|
+
blocking = BoolField(default=True) # If True, the lock will block until it can be acquired.
|
|
878
|
+
|
|
879
|
+
def _get_lock(self) -> Lock:
|
|
880
|
+
"""
|
|
881
|
+
Create a lock object with the name and timeout.
|
|
882
|
+
If the token is set, it will be encoded.
|
|
883
|
+
The token is used to identify the lock owner.
|
|
884
|
+
The lock is created with the name and timeout.
|
|
885
|
+
The timeout is the maximum life for the lock in seconds.
|
|
886
|
+
If the timeout is None, the lock will be kept forever.
|
|
887
|
+
|
|
888
|
+
Returns:
|
|
889
|
+
Lock: The lock object.
|
|
890
|
+
"""
|
|
891
|
+
lock = self.connection.lock(name=self._get_name(), timeout=self.timeout, blocking=self.blocking)
|
|
892
|
+
|
|
893
|
+
if self.token:
|
|
894
|
+
lock.local.token = self._encode_token()
|
|
895
|
+
|
|
896
|
+
return lock
|
|
897
|
+
|
|
898
|
+
def _encode_token(self, token: str | None = None) -> bytes:
|
|
899
|
+
"""
|
|
900
|
+
Encode the token to bytes using the Redis encoder.
|
|
901
|
+
This is used to identify the lock owner.
|
|
902
|
+
|
|
903
|
+
Args:
|
|
904
|
+
token (str | None): The token to encode. If None, use the class token.
|
|
905
|
+
|
|
906
|
+
Raises:
|
|
907
|
+
LockError: If the token is None or empty.
|
|
908
|
+
|
|
909
|
+
Returns:
|
|
910
|
+
bytes: The encoded token.
|
|
911
|
+
"""
|
|
912
|
+
token = token or self.token
|
|
913
|
+
if not token:
|
|
914
|
+
raise exceptions.LockError('Cannot encode an empty token')
|
|
915
|
+
|
|
916
|
+
redis_encoder = self.connection.get_encoder()
|
|
917
|
+
return redis_encoder.encode(self._get_token(token))
|
|
918
|
+
|
|
919
|
+
def _get_name(self) -> str:
|
|
920
|
+
"""Convert self.name to a SHA256 hash, this avoid strange chars on name that can broke Redis."""
|
|
921
|
+
return self.get_hash_key(self.name)
|
|
922
|
+
|
|
923
|
+
def _get_token(self, token: str | bytes | None = None) -> str:
|
|
924
|
+
"""Convert self.token to a SHA256 hash, this avoid strange chars on token that can broke Redis."""
|
|
925
|
+
return self.get_hash_key(token or self.token)
|
|
926
|
+
|
|
927
|
+
def acquire(
|
|
928
|
+
self, token: str | None = None, *, blocking: bool | None = None, blocking_timeout: float | None = None
|
|
929
|
+
) -> bool:
|
|
930
|
+
"""
|
|
931
|
+
Try to acquire a lock with self.name, if lock is already acquired returns False.
|
|
932
|
+
If blocking is False, always return immediately,
|
|
933
|
+
if blocking is True it will waiting until block can be acquired.
|
|
934
|
+
blocking_timeout specifies the maximum number of seconds to wait trying to acquire the lock.
|
|
935
|
+
If token is None, a new token will be generated.
|
|
936
|
+
If token is not None, the lock will be acquired with this token.
|
|
937
|
+
This token is used to identify the lock owner.
|
|
938
|
+
|
|
939
|
+
Args:
|
|
940
|
+
token (str | None): The token to use for the lock. If None, a new token will be generated.
|
|
941
|
+
blocking (bool | None): If True, wait until the lock is acquired. Defaults to None.
|
|
942
|
+
blocking_timeout (float | None): The maximum number of seconds to wait for the lock. Defaults to None.
|
|
943
|
+
|
|
944
|
+
Returns:
|
|
945
|
+
bool: True if the lock was acquired, False otherwise.
|
|
946
|
+
"""
|
|
947
|
+
if not token and not self.token:
|
|
948
|
+
self.token = uuid1().hex
|
|
949
|
+
elif token:
|
|
950
|
+
self.token = token
|
|
951
|
+
|
|
952
|
+
redis_lock: Lock = self._get_lock()
|
|
953
|
+
|
|
954
|
+
if redis_lock.acquire(token=self._encode_token(), blocking=blocking, blocking_timeout=blocking_timeout):
|
|
955
|
+
return True
|
|
956
|
+
|
|
957
|
+
# If we can't acquire the lock, we need to check if the lock is owned by this instance
|
|
958
|
+
self.token = None
|
|
959
|
+
return False
|
|
960
|
+
|
|
961
|
+
def owned(self) -> bool:
|
|
962
|
+
"""
|
|
963
|
+
Returns True if this key is locked by this lock, otherwise False.
|
|
964
|
+
This method is used to check if the lock is owned by this instance.
|
|
965
|
+
|
|
966
|
+
Returns:
|
|
967
|
+
bool: True if the lock is owned by this instance, False otherwise.
|
|
968
|
+
"""
|
|
969
|
+
if not self.token:
|
|
970
|
+
return False
|
|
971
|
+
redis_lock: Lock = self._get_lock()
|
|
972
|
+
redis_lock.local.token = self._encode_token()
|
|
973
|
+
return redis_lock.owned()
|
|
974
|
+
|
|
975
|
+
def release(self, *, force: bool = False) -> None:
|
|
976
|
+
"""
|
|
977
|
+
Release the lock if it is owned by this instance.
|
|
978
|
+
If the lock is not owned, it will raise a LockError.
|
|
979
|
+
This method is used to release the lock and set the token to None.
|
|
980
|
+
|
|
981
|
+
Args:
|
|
982
|
+
force (bool): If True, force the release of the lock even if it is not owned.
|
|
983
|
+
This will delete the key from Redis. Defaults to False.
|
|
984
|
+
"""
|
|
985
|
+
if force:
|
|
986
|
+
self.connection.delete(self._get_name())
|
|
987
|
+
return
|
|
988
|
+
|
|
989
|
+
if not self.token:
|
|
990
|
+
raise exceptions.LockError('Cannot release an unlocked lock')
|
|
991
|
+
|
|
992
|
+
redis_lock: Lock = self._get_lock()
|
|
993
|
+
redis_lock.release()
|
|
994
|
+
self.token = None
|
|
995
|
+
|
|
996
|
+
return
|
|
997
|
+
|
|
998
|
+
def do_release(self, expected_token: str) -> None:
|
|
999
|
+
"""
|
|
1000
|
+
Force release lock by an token.
|
|
1001
|
+
|
|
1002
|
+
Args:
|
|
1003
|
+
expected_token (str): lock key token.
|
|
1004
|
+
"""
|
|
1005
|
+
if not expected_token:
|
|
1006
|
+
raise exceptions.LockError('Cannot release an unlocked lock')
|
|
1007
|
+
|
|
1008
|
+
redis_lock = self._get_lock()
|
|
1009
|
+
if redis_lock.locked():
|
|
1010
|
+
redis_lock.do_release(expected_token=self._encode_token(expected_token))
|
|
1011
|
+
|
|
1012
|
+
def get_lock_info(self) -> dict:
|
|
1013
|
+
"""
|
|
1014
|
+
Get information about the lock status.
|
|
1015
|
+
This includes whether the lock is currently held and the name of the lock.
|
|
1016
|
+
|
|
1017
|
+
Returns:
|
|
1018
|
+
dict: A dictionary containing the lock status and name.
|
|
1019
|
+
"""
|
|
1020
|
+
redis_lock = self._get_lock()
|
|
1021
|
+
return {'locked': redis_lock.locked(), 'name': self.name}
|