cledar-sdk 2.0.2__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cledar/__init__.py +1 -0
- cledar/kafka/README.md +239 -0
- cledar/kafka/__init__.py +42 -0
- cledar/kafka/clients/base.py +117 -0
- cledar/kafka/clients/consumer.py +138 -0
- cledar/kafka/clients/producer.py +97 -0
- cledar/kafka/config/schemas.py +262 -0
- cledar/kafka/exceptions.py +17 -0
- cledar/kafka/handlers/dead_letter.py +88 -0
- cledar/kafka/handlers/parser.py +83 -0
- cledar/kafka/logger.py +5 -0
- cledar/kafka/models/input.py +17 -0
- cledar/kafka/models/message.py +14 -0
- cledar/kafka/models/output.py +12 -0
- cledar/kafka/tests/.env.test.kafka +3 -0
- cledar/kafka/tests/README.md +216 -0
- cledar/kafka/tests/conftest.py +104 -0
- cledar/kafka/tests/integration/__init__.py +1 -0
- cledar/kafka/tests/integration/conftest.py +78 -0
- cledar/kafka/tests/integration/helpers.py +47 -0
- cledar/kafka/tests/integration/test_consumer_integration.py +375 -0
- cledar/kafka/tests/integration/test_integration.py +394 -0
- cledar/kafka/tests/integration/test_producer_consumer_interaction.py +388 -0
- cledar/kafka/tests/integration/test_producer_integration.py +217 -0
- cledar/kafka/tests/unit/__init__.py +1 -0
- cledar/kafka/tests/unit/test_base_kafka_client.py +391 -0
- cledar/kafka/tests/unit/test_config_validation.py +609 -0
- cledar/kafka/tests/unit/test_dead_letter_handler.py +443 -0
- cledar/kafka/tests/unit/test_error_handling.py +674 -0
- cledar/kafka/tests/unit/test_input_parser.py +310 -0
- cledar/kafka/tests/unit/test_input_parser_comprehensive.py +489 -0
- cledar/kafka/tests/unit/test_utils.py +25 -0
- cledar/kafka/tests/unit/test_utils_comprehensive.py +408 -0
- cledar/kafka/utils/callbacks.py +28 -0
- cledar/kafka/utils/messages.py +39 -0
- cledar/kafka/utils/topics.py +15 -0
- cledar/kserve/README.md +352 -0
- cledar/kserve/__init__.py +5 -0
- cledar/kserve/tests/__init__.py +0 -0
- cledar/kserve/tests/test_utils.py +64 -0
- cledar/kserve/utils.py +30 -0
- cledar/logging/README.md +53 -0
- cledar/logging/__init__.py +5 -0
- cledar/logging/tests/test_universal_plaintext_formatter.py +249 -0
- cledar/logging/universal_plaintext_formatter.py +99 -0
- cledar/monitoring/README.md +71 -0
- cledar/monitoring/__init__.py +5 -0
- cledar/monitoring/monitoring_server.py +156 -0
- cledar/monitoring/tests/integration/test_monitoring_server_int.py +162 -0
- cledar/monitoring/tests/test_monitoring_server.py +59 -0
- cledar/nonce/README.md +99 -0
- cledar/nonce/__init__.py +5 -0
- cledar/nonce/nonce_service.py +62 -0
- cledar/nonce/tests/__init__.py +0 -0
- cledar/nonce/tests/test_nonce_service.py +136 -0
- cledar/redis/README.md +536 -0
- cledar/redis/__init__.py +17 -0
- cledar/redis/async_example.py +112 -0
- cledar/redis/example.py +67 -0
- cledar/redis/exceptions.py +25 -0
- cledar/redis/logger.py +5 -0
- cledar/redis/model.py +14 -0
- cledar/redis/redis.py +764 -0
- cledar/redis/redis_config_store.py +333 -0
- cledar/redis/tests/test_async_integration_redis.py +158 -0
- cledar/redis/tests/test_async_redis_service.py +380 -0
- cledar/redis/tests/test_integration_redis.py +119 -0
- cledar/redis/tests/test_redis_service.py +319 -0
- cledar/storage/README.md +529 -0
- cledar/storage/__init__.py +6 -0
- cledar/storage/constants.py +5 -0
- cledar/storage/exceptions.py +79 -0
- cledar/storage/models.py +41 -0
- cledar/storage/object_storage.py +1274 -0
- cledar/storage/tests/conftest.py +18 -0
- cledar/storage/tests/test_abfs.py +164 -0
- cledar/storage/tests/test_integration_filesystem.py +359 -0
- cledar/storage/tests/test_integration_s3.py +453 -0
- cledar/storage/tests/test_local.py +384 -0
- cledar/storage/tests/test_s3.py +521 -0
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/METADATA +1 -1
- cledar_sdk-2.1.0.dist-info/RECORD +84 -0
- cledar_sdk-2.0.2.dist-info/RECORD +0 -4
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/WHEEL +0 -0
- {cledar_sdk-2.0.2.dist-info → cledar_sdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
"""Redis-based configuration store with caching and watching capabilities."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
import time
|
|
6
|
+
from collections.abc import Callable
|
|
7
|
+
from dataclasses import asdict
|
|
8
|
+
from threading import Thread
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from redis import ConnectionError as RedisConnectionError
|
|
12
|
+
from redis import Redis
|
|
13
|
+
|
|
14
|
+
from .logger import logger
|
|
15
|
+
from .model import ConfigAbstract as T
|
|
16
|
+
|
|
17
|
+
KEY_EVENT_FORMAT = "__keyspace@{DB}__:{KEY}"
|
|
18
|
+
KEY_EVENT_REGEX = r"__keyspace@(?P<db>\d+)__:(?P<key>.+)"
|
|
19
|
+
|
|
20
|
+
OP_EVENT_FORMAT = "__keyevent@{DB}__:{OPERATION}"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class RedisConfigStore:
|
|
24
|
+
"""Store for configuration objects in Redis with local caching and pub/sub.
|
|
25
|
+
|
|
26
|
+
Provides updates on configuration changes.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
TYPE_NONE = "none"
|
|
30
|
+
TYPE_LIST = "list"
|
|
31
|
+
TYPE_STRING = "string"
|
|
32
|
+
|
|
33
|
+
EVENT_DELETE = "del"
|
|
34
|
+
EVENT_SET = "set"
|
|
35
|
+
EVENT_RPUSH = "rpush"
|
|
36
|
+
EVENT_LSET = "lset"
|
|
37
|
+
|
|
38
|
+
def __init__(self, redis: Redis, prefix: str | None = None) -> None:
|
|
39
|
+
"""Initialize the RedisConfigStore.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
redis: An initialized Redis client.
|
|
43
|
+
prefix: Optional prefix for keys stored in Redis.
|
|
44
|
+
|
|
45
|
+
"""
|
|
46
|
+
self._redis: Redis = redis
|
|
47
|
+
self._pubsub = redis.pubsub() # type: ignore
|
|
48
|
+
self._db: int = redis.connection_pool.connection_kwargs.get("db")
|
|
49
|
+
self._prefix: str = prefix or ""
|
|
50
|
+
self._cache: dict[str, str] = {}
|
|
51
|
+
self._cache_verisons: dict[str, int] = {}
|
|
52
|
+
self._monitoring: dict[
|
|
53
|
+
str, list[Callable[[int, str, str, str], None] | None]
|
|
54
|
+
] = {}
|
|
55
|
+
self._watcher_thread: Thread = Thread(target=self._watcher)
|
|
56
|
+
self._watcher_thread.start()
|
|
57
|
+
|
|
58
|
+
def is_ready(self) -> bool:
|
|
59
|
+
"""Check if the Redis connection is ready.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
bool: True if Redis responds to ping, False otherwise.
|
|
63
|
+
|
|
64
|
+
"""
|
|
65
|
+
try:
|
|
66
|
+
return self._redis.ping() # type: ignore
|
|
67
|
+
except RedisConnectionError:
|
|
68
|
+
return False
|
|
69
|
+
|
|
70
|
+
def versions(self, key: str) -> int | None:
|
|
71
|
+
"""Get the version (e.g., list length or existence) of a key.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
key: The key to check.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
int | None: The version of the key.
|
|
78
|
+
|
|
79
|
+
"""
|
|
80
|
+
return self._key_versions(key)
|
|
81
|
+
|
|
82
|
+
def cached_version(self, key: str) -> int | None:
|
|
83
|
+
"""Get the version of a key stored in the local cache.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
key: The key to check.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
int | None: The cached version of the key.
|
|
90
|
+
|
|
91
|
+
"""
|
|
92
|
+
return self._cache_verisons.get(key)
|
|
93
|
+
|
|
94
|
+
def fetch(self, cls: type[T], key: str) -> T | None:
|
|
95
|
+
"""Fetch a configuration object from Redis or local cache.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
cls: The class to instantiate with the fetched data.
|
|
99
|
+
key: The key associated with the configuration.
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
T | None: The configuration object, or None if not found.
|
|
103
|
+
|
|
104
|
+
"""
|
|
105
|
+
if key not in self._cache:
|
|
106
|
+
new_value = self._key_fetch(key)
|
|
107
|
+
if new_value is None:
|
|
108
|
+
return None
|
|
109
|
+
self._cache[key] = new_value
|
|
110
|
+
self._cache_verisons[key] = self._key_versions(key) or -1
|
|
111
|
+
self._key_watch(key)
|
|
112
|
+
return cls(**json.loads(self._cache[key]))
|
|
113
|
+
|
|
114
|
+
def update(self, key: str, value: T) -> None:
|
|
115
|
+
"""Update a configuration object in Redis and local cache.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
key: The key to update.
|
|
119
|
+
value: The configuration object to store.
|
|
120
|
+
|
|
121
|
+
"""
|
|
122
|
+
self._cache[key] = self._key_update(key, value)
|
|
123
|
+
self._cache_verisons[key] = self._key_versions(key) or -1
|
|
124
|
+
self._key_watch(key)
|
|
125
|
+
|
|
126
|
+
def delete(self, key: str) -> None:
|
|
127
|
+
"""Delete a configuration object from Redis and local cache.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
key: The key to delete.
|
|
131
|
+
|
|
132
|
+
"""
|
|
133
|
+
if key in self._cache:
|
|
134
|
+
del self._cache[key]
|
|
135
|
+
del self._cache_verisons[key]
|
|
136
|
+
self._key_delete(key)
|
|
137
|
+
self._key_watch(key)
|
|
138
|
+
|
|
139
|
+
def watch(
|
|
140
|
+
self, key: str, callback: Callable[[int, str, str, str], None] | None = None
|
|
141
|
+
) -> None:
|
|
142
|
+
"""Watch a key for changes and execute a callback.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
key: The key to watch.
|
|
146
|
+
callback: The callback function to execute on change.
|
|
147
|
+
|
|
148
|
+
"""
|
|
149
|
+
self._key_watch(key, callback)
|
|
150
|
+
|
|
151
|
+
def __setitem__(self, key: str, value: T) -> None:
|
|
152
|
+
"""Alias for update method.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
key: The key to update.
|
|
156
|
+
value: The value to set.
|
|
157
|
+
|
|
158
|
+
"""
|
|
159
|
+
self.update(key, value)
|
|
160
|
+
|
|
161
|
+
def __delitem__(self, key: str) -> None:
|
|
162
|
+
"""Alias for delete method.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
key: The key to delete.
|
|
166
|
+
|
|
167
|
+
"""
|
|
168
|
+
self.delete(key)
|
|
169
|
+
|
|
170
|
+
def _key_watch(
|
|
171
|
+
self, key: str, callback: Callable[[int, str, str, str], None] | None = None
|
|
172
|
+
) -> None:
|
|
173
|
+
if key not in self._monitoring:
|
|
174
|
+
self._monitoring[key] = []
|
|
175
|
+
|
|
176
|
+
if callback in self._monitoring[key]:
|
|
177
|
+
return
|
|
178
|
+
|
|
179
|
+
self._monitoring[key].append(callback)
|
|
180
|
+
callbacks = list(self._monitoring[key])
|
|
181
|
+
event_key = KEY_EVENT_FORMAT.format(DB=self._db, KEY=self._build_key(key))
|
|
182
|
+
|
|
183
|
+
def callback_wrapper(message: dict[str, bytes]) -> None:
|
|
184
|
+
event_db, event_key, event_type = self._decode_event(message)
|
|
185
|
+
if event_db is None or event_key is None:
|
|
186
|
+
return
|
|
187
|
+
int_event_db = int(event_db)
|
|
188
|
+
str_event_key = str(event_key)
|
|
189
|
+
logger.info(
|
|
190
|
+
"Redis: Handling `%s` for key `%s` (in db %d)...",
|
|
191
|
+
event_type,
|
|
192
|
+
event_key,
|
|
193
|
+
event_db,
|
|
194
|
+
)
|
|
195
|
+
key_value: str = self._key_fetch(str_event_key) or ""
|
|
196
|
+
# NOTE: Reimplement to non-blocking and parallel,
|
|
197
|
+
# instead of sequencial and blocking
|
|
198
|
+
# NOTE: pubsub requires synchronous callbacks
|
|
199
|
+
for callback in callbacks:
|
|
200
|
+
if callback is None:
|
|
201
|
+
self._on_key_event(
|
|
202
|
+
int_event_db, str_event_key, event_type, key_value
|
|
203
|
+
)
|
|
204
|
+
else:
|
|
205
|
+
callback(int_event_db, str_event_key, event_type, key_value)
|
|
206
|
+
|
|
207
|
+
self._pubsub.psubscribe(**{event_key: callback_wrapper})
|
|
208
|
+
|
|
209
|
+
def _key_versions(self, key: str) -> int | None:
|
|
210
|
+
try:
|
|
211
|
+
key = self._build_key(key)
|
|
212
|
+
key_type = self._key_type(key)
|
|
213
|
+
|
|
214
|
+
if key_type == self.TYPE_LIST:
|
|
215
|
+
return self._redis.llen(key) # type: ignore
|
|
216
|
+
if key_type == self.TYPE_STRING:
|
|
217
|
+
return 1
|
|
218
|
+
return 0
|
|
219
|
+
except RedisConnectionError:
|
|
220
|
+
logger.error("Redis version: Failed - no connection")
|
|
221
|
+
return self._cache_verisons.get(key)
|
|
222
|
+
|
|
223
|
+
def _key_fetch(self, key: str) -> str | None:
|
|
224
|
+
try:
|
|
225
|
+
key = self._build_key(key)
|
|
226
|
+
key_type = self._key_type(key)
|
|
227
|
+
value = None
|
|
228
|
+
|
|
229
|
+
if key_type == self.TYPE_LIST:
|
|
230
|
+
value = self._redis.lindex(key, -1)
|
|
231
|
+
if key_type == self.TYPE_STRING:
|
|
232
|
+
value = self._redis.get(key)
|
|
233
|
+
|
|
234
|
+
if value is not None:
|
|
235
|
+
return value.decode() # type: ignore
|
|
236
|
+
|
|
237
|
+
return value
|
|
238
|
+
except RedisConnectionError:
|
|
239
|
+
logger.error("Redis fetch: Failed - no connection")
|
|
240
|
+
return self._cache.get(key)
|
|
241
|
+
|
|
242
|
+
def _key_update(self, key: str, value: T) -> Any:
|
|
243
|
+
key = self._build_key(key)
|
|
244
|
+
key_type = self._key_type(key)
|
|
245
|
+
value_new = json.dumps(asdict(value))
|
|
246
|
+
|
|
247
|
+
try:
|
|
248
|
+
if value_new == json.dumps(self._key_fetch(key)):
|
|
249
|
+
logger.info("Redis update: Identical, skiping change...")
|
|
250
|
+
return value_new
|
|
251
|
+
if key_type in [self.TYPE_LIST, self.TYPE_NONE]:
|
|
252
|
+
self._redis.rpush(key, value_new)
|
|
253
|
+
elif key_type == self.TYPE_STRING:
|
|
254
|
+
value_old = self._redis.get(key)
|
|
255
|
+
self._redis.delete(key)
|
|
256
|
+
self._redis.rpush(key, value_old, value_new) # type: ignore
|
|
257
|
+
|
|
258
|
+
return value_new
|
|
259
|
+
except RedisConnectionError:
|
|
260
|
+
logger.error("Redis update: Failed - no connection")
|
|
261
|
+
return self._cache[key]
|
|
262
|
+
|
|
263
|
+
def _key_delete(self, key: str) -> None:
|
|
264
|
+
self._redis.delete(self._build_key(key))
|
|
265
|
+
|
|
266
|
+
def _key_type(self, key: str) -> str | None:
|
|
267
|
+
try:
|
|
268
|
+
key_type = self._redis.type(key)
|
|
269
|
+
if key_type is not None:
|
|
270
|
+
return str(key_type.decode()) # type: ignore
|
|
271
|
+
return key_type
|
|
272
|
+
except RedisConnectionError:
|
|
273
|
+
return None
|
|
274
|
+
|
|
275
|
+
def _build_key(self, key: str) -> str:
|
|
276
|
+
return f"{self._prefix}{key}"
|
|
277
|
+
|
|
278
|
+
def _watcher(self) -> None:
|
|
279
|
+
# This thread seems unecessary, but without it messages aren't updated.
|
|
280
|
+
# Even if we're not receiving any 'pmessage' messages (???)
|
|
281
|
+
while True:
|
|
282
|
+
for message in self._pubsub.listen():
|
|
283
|
+
logger.info(
|
|
284
|
+
"Redis watcher: Received event: %s - %s",
|
|
285
|
+
message["channel"],
|
|
286
|
+
message["data"],
|
|
287
|
+
)
|
|
288
|
+
if message["type"] == "pmessage":
|
|
289
|
+
event_db, event_key, event_type = self._decode_event(message)
|
|
290
|
+
if event_db is None or event_key is None:
|
|
291
|
+
continue
|
|
292
|
+
int_event_db = int(event_db)
|
|
293
|
+
str_event_key = str(event_key)
|
|
294
|
+
key_value = self._key_fetch(str_event_key) or ""
|
|
295
|
+
|
|
296
|
+
self._on_key_event(
|
|
297
|
+
int_event_db, str_event_key, event_type, key_value
|
|
298
|
+
)
|
|
299
|
+
time.sleep(1)
|
|
300
|
+
logger.info("Redis watcher is shutting down...")
|
|
301
|
+
|
|
302
|
+
def _decode_event(
|
|
303
|
+
self, message: dict[str, bytes]
|
|
304
|
+
) -> tuple[int | None, str | None, str]:
|
|
305
|
+
channel = message["channel"].decode()
|
|
306
|
+
|
|
307
|
+
match = re.match(KEY_EVENT_REGEX, channel)
|
|
308
|
+
if not match:
|
|
309
|
+
return None, None, message["data"].decode()
|
|
310
|
+
|
|
311
|
+
event_key = match.group("key")
|
|
312
|
+
event_db = int(match.group("db"))
|
|
313
|
+
event_type = message["data"].decode()
|
|
314
|
+
return event_db, event_key, event_type
|
|
315
|
+
|
|
316
|
+
def _on_key_event(self, db: int, key: str, event: str, value: str) -> None:
|
|
317
|
+
if db != self._db:
|
|
318
|
+
return
|
|
319
|
+
|
|
320
|
+
if event in [self.EVENT_SET, self.EVENT_RPUSH, self.EVENT_LSET]:
|
|
321
|
+
self._on_key_update(key, value)
|
|
322
|
+
elif event == self.EVENT_DELETE:
|
|
323
|
+
self._on_key_delete(key)
|
|
324
|
+
else:
|
|
325
|
+
logger.info(f"Redis _on_key_event: Ignoring operation: {event}")
|
|
326
|
+
|
|
327
|
+
def _on_key_update(self, key: str, value: str) -> None:
|
|
328
|
+
logger.info(f"Redis: Updating local cache: {key}")
|
|
329
|
+
self._cache[key] = value
|
|
330
|
+
self._cache_verisons[key] = self._key_versions(key) or -1
|
|
331
|
+
|
|
332
|
+
def _on_key_delete(self, key: str) -> None:
|
|
333
|
+
del self._cache[key]
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
# mypy: disable-error-code=no-untyped-def
|
|
2
|
+
import json
|
|
3
|
+
from collections.abc import AsyncGenerator
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
7
|
+
import pytest
|
|
8
|
+
import pytest_asyncio
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
from testcontainers.redis import RedisContainer
|
|
11
|
+
|
|
12
|
+
from cledar.redis import AsyncRedisService, FailedValue, RedisServiceConfig
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class UserModel(BaseModel):
|
|
16
|
+
user_id: int
|
|
17
|
+
name: str
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Color(Enum):
|
|
21
|
+
RED = 1
|
|
22
|
+
BLUE = 2
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@pytest.fixture(scope="module")
|
|
26
|
+
def redis_container():
|
|
27
|
+
"""Start a Redis container for testing."""
|
|
28
|
+
with RedisContainer("redis:7.2-alpine") as redis_db:
|
|
29
|
+
yield redis_db
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@pytest_asyncio.fixture(scope="function")
|
|
33
|
+
async def async_redis_service(
|
|
34
|
+
redis_container: RedisContainer,
|
|
35
|
+
) -> AsyncGenerator[AsyncRedisService, None]:
|
|
36
|
+
host = redis_container.get_container_host_ip()
|
|
37
|
+
port = int(redis_container.get_exposed_port(6379))
|
|
38
|
+
|
|
39
|
+
config = RedisServiceConfig(redis_host=host, redis_port=port, redis_db=0)
|
|
40
|
+
service = AsyncRedisService(config)
|
|
41
|
+
await service.connect()
|
|
42
|
+
yield service
|
|
43
|
+
await service.close()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@pytest.mark.asyncio
|
|
47
|
+
async def test_is_alive(async_redis_service: AsyncRedisService) -> None:
|
|
48
|
+
assert await async_redis_service.is_alive() is True
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@pytest.mark.asyncio
|
|
52
|
+
async def test_set_and_get_pydantic_model(
|
|
53
|
+
async_redis_service: AsyncRedisService,
|
|
54
|
+
) -> None:
|
|
55
|
+
key = "async:user:1"
|
|
56
|
+
model = UserModel(user_id=1, name="Alice")
|
|
57
|
+
assert await async_redis_service.set(key, model) is True
|
|
58
|
+
got = await async_redis_service.get(key, UserModel)
|
|
59
|
+
assert isinstance(got, UserModel)
|
|
60
|
+
assert got == model
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@pytest.mark.asyncio
|
|
64
|
+
async def test_set_plain_string_and_get_raw(
|
|
65
|
+
async_redis_service: AsyncRedisService,
|
|
66
|
+
) -> None:
|
|
67
|
+
key = "async:greeting"
|
|
68
|
+
assert await async_redis_service.set(key, "hello") is True
|
|
69
|
+
assert await async_redis_service.get_raw(key) == "hello"
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@pytest.mark.asyncio
|
|
73
|
+
async def test_set_with_enum_and_datetime_uses_custom_encoder(
|
|
74
|
+
async_redis_service: AsyncRedisService,
|
|
75
|
+
) -> None:
|
|
76
|
+
key = "async:meta"
|
|
77
|
+
now = datetime(2024, 1, 2, 3, 4, 5)
|
|
78
|
+
payload = {"color": Color.RED, "when": now}
|
|
79
|
+
assert await async_redis_service.set(key, payload) is True
|
|
80
|
+
|
|
81
|
+
raw = await async_redis_service.get_raw(key)
|
|
82
|
+
data = json.loads(raw) # type: ignore
|
|
83
|
+
assert data["color"] == "red"
|
|
84
|
+
assert data["when"] == now.isoformat()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@pytest.mark.asyncio
|
|
88
|
+
async def test_list_keys(async_redis_service: AsyncRedisService) -> None:
|
|
89
|
+
prefix = "async:listkeys:test:"
|
|
90
|
+
keys = [f"{prefix}{i}" for i in range(3)]
|
|
91
|
+
for k in keys:
|
|
92
|
+
assert await async_redis_service.set(k, {"i": 1}) is True
|
|
93
|
+
|
|
94
|
+
listed = await async_redis_service.list_keys(f"{prefix}*")
|
|
95
|
+
for k in keys:
|
|
96
|
+
assert k in listed
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@pytest.mark.asyncio
|
|
100
|
+
async def test_mget_mixed_results(async_redis_service: AsyncRedisService) -> None:
|
|
101
|
+
ok = UserModel(user_id=2, name="Bob")
|
|
102
|
+
k1 = "async:mget:ok"
|
|
103
|
+
k2 = "async:mget:not_json"
|
|
104
|
+
k3 = "async:mget:bad_validation"
|
|
105
|
+
k4 = "async:mget:none"
|
|
106
|
+
|
|
107
|
+
assert await async_redis_service.set(k1, ok) is True
|
|
108
|
+
assert await async_redis_service.set(k2, "{not-json}") is True
|
|
109
|
+
assert await async_redis_service.set(k3, json.dumps({"user_id": 3})) is True
|
|
110
|
+
|
|
111
|
+
results = await async_redis_service.mget([k1, k2, k3, k4], UserModel)
|
|
112
|
+
|
|
113
|
+
assert isinstance(results[0], UserModel)
|
|
114
|
+
assert isinstance(results[1], FailedValue)
|
|
115
|
+
assert isinstance(results[2], FailedValue)
|
|
116
|
+
assert results[3] is None
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@pytest.mark.asyncio
|
|
120
|
+
async def test_delete(async_redis_service: AsyncRedisService) -> None:
|
|
121
|
+
key = "async:delete:test"
|
|
122
|
+
assert await async_redis_service.set(key, {"x": 1}) is True
|
|
123
|
+
assert await async_redis_service.delete(key) is True
|
|
124
|
+
assert await async_redis_service.get_raw(key) is None
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@pytest.mark.asyncio
|
|
128
|
+
async def test_context_manager_pattern(redis_container: RedisContainer) -> None:
|
|
129
|
+
"""Test that service can be used with proper async context management."""
|
|
130
|
+
host = redis_container.get_container_host_ip()
|
|
131
|
+
port = int(redis_container.get_exposed_port(6379))
|
|
132
|
+
|
|
133
|
+
config = RedisServiceConfig(redis_host=host, redis_port=port, redis_db=0)
|
|
134
|
+
service = AsyncRedisService(config)
|
|
135
|
+
|
|
136
|
+
try:
|
|
137
|
+
await service.connect()
|
|
138
|
+
assert await service.is_alive() is True
|
|
139
|
+
await service.set("test:key", "test:value")
|
|
140
|
+
assert await service.get_raw("test:key") == "test:value"
|
|
141
|
+
finally:
|
|
142
|
+
await service.close()
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
@pytest.mark.asyncio
|
|
146
|
+
async def test_concurrent_operations(async_redis_service: AsyncRedisService) -> None:
|
|
147
|
+
"""Test multiple concurrent async operations."""
|
|
148
|
+
import asyncio
|
|
149
|
+
|
|
150
|
+
async def set_and_get(key: str, value: str) -> str | None:
|
|
151
|
+
await async_redis_service.set(key, value)
|
|
152
|
+
return await async_redis_service.get_raw(key)
|
|
153
|
+
|
|
154
|
+
tasks = [set_and_get(f"async:concurrent:{i}", f"value:{i}") for i in range(10)]
|
|
155
|
+
results = await asyncio.gather(*tasks)
|
|
156
|
+
|
|
157
|
+
for i, result in enumerate(results):
|
|
158
|
+
assert result == f"value:{i}"
|