cledar-sdk 2.0.1__py3-none-any.whl → 2.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cledar/__init__.py +0 -0
- cledar/kafka/README.md +239 -0
- cledar/kafka/__init__.py +40 -0
- cledar/kafka/clients/base.py +98 -0
- cledar/kafka/clients/consumer.py +110 -0
- cledar/kafka/clients/producer.py +80 -0
- cledar/kafka/config/schemas.py +178 -0
- cledar/kafka/exceptions.py +22 -0
- cledar/kafka/handlers/dead_letter.py +82 -0
- cledar/kafka/handlers/parser.py +49 -0
- cledar/kafka/logger.py +3 -0
- cledar/kafka/models/input.py +13 -0
- cledar/kafka/models/message.py +10 -0
- cledar/kafka/models/output.py +8 -0
- cledar/kafka/tests/.env.test.kafka +3 -0
- cledar/kafka/tests/README.md +216 -0
- cledar/kafka/tests/conftest.py +104 -0
- cledar/kafka/tests/integration/__init__.py +1 -0
- cledar/kafka/tests/integration/conftest.py +78 -0
- cledar/kafka/tests/integration/helpers.py +47 -0
- cledar/kafka/tests/integration/test_consumer_integration.py +375 -0
- cledar/kafka/tests/integration/test_integration.py +394 -0
- cledar/kafka/tests/integration/test_producer_consumer_interaction.py +388 -0
- cledar/kafka/tests/integration/test_producer_integration.py +217 -0
- cledar/kafka/tests/unit/__init__.py +1 -0
- cledar/kafka/tests/unit/test_base_kafka_client.py +391 -0
- cledar/kafka/tests/unit/test_config_validation.py +609 -0
- cledar/kafka/tests/unit/test_dead_letter_handler.py +443 -0
- cledar/kafka/tests/unit/test_error_handling.py +674 -0
- cledar/kafka/tests/unit/test_input_parser.py +310 -0
- cledar/kafka/tests/unit/test_input_parser_comprehensive.py +489 -0
- cledar/kafka/tests/unit/test_utils.py +25 -0
- cledar/kafka/tests/unit/test_utils_comprehensive.py +408 -0
- cledar/kafka/utils/callbacks.py +19 -0
- cledar/kafka/utils/messages.py +28 -0
- cledar/kafka/utils/topics.py +2 -0
- cledar/kserve/README.md +352 -0
- cledar/kserve/__init__.py +3 -0
- cledar/kserve/tests/__init__.py +0 -0
- cledar/kserve/tests/test_utils.py +64 -0
- cledar/kserve/utils.py +27 -0
- cledar/logging/README.md +53 -0
- cledar/logging/__init__.py +3 -0
- cledar/logging/tests/test_universal_plaintext_formatter.py +249 -0
- cledar/logging/universal_plaintext_formatter.py +94 -0
- cledar/monitoring/README.md +71 -0
- cledar/monitoring/__init__.py +3 -0
- cledar/monitoring/monitoring_server.py +112 -0
- cledar/monitoring/tests/integration/test_monitoring_server_int.py +162 -0
- cledar/monitoring/tests/test_monitoring_server.py +59 -0
- cledar/nonce/README.md +99 -0
- cledar/nonce/__init__.py +3 -0
- cledar/nonce/nonce_service.py +36 -0
- cledar/nonce/tests/__init__.py +0 -0
- cledar/nonce/tests/test_nonce_service.py +136 -0
- cledar/redis/README.md +536 -0
- cledar/redis/__init__.py +15 -0
- cledar/redis/async_example.py +111 -0
- cledar/redis/example.py +37 -0
- cledar/redis/exceptions.py +22 -0
- cledar/redis/logger.py +3 -0
- cledar/redis/model.py +10 -0
- cledar/redis/redis.py +525 -0
- cledar/redis/redis_config_store.py +252 -0
- cledar/redis/tests/test_async_integration_redis.py +158 -0
- cledar/redis/tests/test_async_redis_service.py +380 -0
- cledar/redis/tests/test_integration_redis.py +119 -0
- cledar/redis/tests/test_redis_service.py +319 -0
- cledar/storage/README.md +529 -0
- cledar/storage/__init__.py +4 -0
- cledar/storage/constants.py +3 -0
- cledar/storage/exceptions.py +50 -0
- cledar/storage/models.py +19 -0
- cledar/storage/object_storage.py +955 -0
- cledar/storage/tests/conftest.py +18 -0
- cledar/storage/tests/test_abfs.py +164 -0
- cledar/storage/tests/test_integration_filesystem.py +359 -0
- cledar/storage/tests/test_integration_s3.py +453 -0
- cledar/storage/tests/test_local.py +384 -0
- cledar/storage/tests/test_s3.py +521 -0
- {cledar_sdk-2.0.1.dist-info → cledar_sdk-2.0.3.dist-info}/METADATA +1 -1
- cledar_sdk-2.0.3.dist-info/RECORD +84 -0
- cledar_sdk-2.0.1.dist-info/RECORD +0 -4
- {cledar_sdk-2.0.1.dist-info → cledar_sdk-2.0.3.dist-info}/WHEEL +0 -0
- {cledar_sdk-2.0.1.dist-info → cledar_sdk-2.0.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import re
|
|
3
|
+
import time
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from dataclasses import asdict
|
|
6
|
+
from threading import Thread
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from redis import ConnectionError as RedisConnectionError
|
|
10
|
+
from redis import Redis
|
|
11
|
+
|
|
12
|
+
from .logger import logger
|
|
13
|
+
from .model import ConfigAbstract as T
|
|
14
|
+
|
|
15
|
+
KEY_EVENT_FORMAT = "__keyspace@{DB}__:{KEY}"
|
|
16
|
+
KEY_EVENT_REGEX = r"__keyspace@(?P<db>\d+)__:(?P<key>.+)"
|
|
17
|
+
|
|
18
|
+
OP_EVENT_FORMAT = "__keyevent@{DB}__:{OPERATION}"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class RedisConfigStore:
|
|
22
|
+
TYPE_NONE = "none"
|
|
23
|
+
TYPE_LIST = "list"
|
|
24
|
+
TYPE_STRING = "string"
|
|
25
|
+
|
|
26
|
+
EVENT_DELETE = "del"
|
|
27
|
+
EVENT_SET = "set"
|
|
28
|
+
EVENT_RPUSH = "rpush"
|
|
29
|
+
EVENT_LSET = "lset"
|
|
30
|
+
|
|
31
|
+
def __init__(self, redis: Redis, prefix: str | None = None) -> None:
|
|
32
|
+
self._redis: Redis = redis
|
|
33
|
+
self._pubsub = redis.pubsub() # type: ignore
|
|
34
|
+
self._db: int = redis.connection_pool.connection_kwargs.get("db")
|
|
35
|
+
self._prefix: str = prefix or ""
|
|
36
|
+
self._cache: dict[str, str] = {}
|
|
37
|
+
self._cache_verisons: dict[str, int] = {}
|
|
38
|
+
self._monitoring: dict[
|
|
39
|
+
str, list[Callable[[int, str, str, str], None] | None]
|
|
40
|
+
] = {}
|
|
41
|
+
self._watcher_thread: Thread = Thread(target=self._watcher)
|
|
42
|
+
self._watcher_thread.start()
|
|
43
|
+
|
|
44
|
+
def is_ready(self) -> bool:
|
|
45
|
+
try:
|
|
46
|
+
return self._redis.ping() # type: ignore
|
|
47
|
+
except RedisConnectionError:
|
|
48
|
+
return False
|
|
49
|
+
|
|
50
|
+
def versions(self, key: str) -> int | None:
|
|
51
|
+
return self._key_versions(key)
|
|
52
|
+
|
|
53
|
+
def cached_version(self, key: str) -> int | None:
|
|
54
|
+
return self._cache_verisons.get(key)
|
|
55
|
+
|
|
56
|
+
def fetch(self, cls: type[T], key: str) -> T | None:
|
|
57
|
+
if key not in self._cache:
|
|
58
|
+
new_value = self._key_fetch(key)
|
|
59
|
+
if new_value is None:
|
|
60
|
+
return None
|
|
61
|
+
self._cache[key] = new_value
|
|
62
|
+
self._cache_verisons[key] = self._key_versions(key) or -1
|
|
63
|
+
self._key_watch(key)
|
|
64
|
+
return cls(**json.loads(self._cache[key]))
|
|
65
|
+
|
|
66
|
+
def update(self, key: str, value: T) -> None:
|
|
67
|
+
self._cache[key] = self._key_update(key, value)
|
|
68
|
+
self._cache_verisons[key] = self._key_versions(key) or -1
|
|
69
|
+
self._key_watch(key)
|
|
70
|
+
|
|
71
|
+
def delete(self, key: str) -> None:
|
|
72
|
+
if key in self._cache:
|
|
73
|
+
del self._cache[key]
|
|
74
|
+
del self._cache_verisons[key]
|
|
75
|
+
self._key_delete(key)
|
|
76
|
+
self._key_watch(key)
|
|
77
|
+
|
|
78
|
+
def watch(
|
|
79
|
+
self, key: str, callback: Callable[[int, str, str, str], None] | None = None
|
|
80
|
+
) -> None:
|
|
81
|
+
self._key_watch(key, callback)
|
|
82
|
+
|
|
83
|
+
def __setitem__(self, key: str, value: T) -> None:
|
|
84
|
+
self.update(key, value)
|
|
85
|
+
|
|
86
|
+
def __delitem__(self, key: str) -> None:
|
|
87
|
+
self.delete(key)
|
|
88
|
+
|
|
89
|
+
def _key_watch(
|
|
90
|
+
self, key: str, callback: Callable[[int, str, str, str], None] | None = None
|
|
91
|
+
) -> None:
|
|
92
|
+
if key not in self._monitoring:
|
|
93
|
+
self._monitoring[key] = []
|
|
94
|
+
|
|
95
|
+
if callback in self._monitoring[key]:
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
self._monitoring[key].append(callback)
|
|
99
|
+
callbacks = list(self._monitoring[key])
|
|
100
|
+
event_key = KEY_EVENT_FORMAT.format(DB=self._db, KEY=self._build_key(key))
|
|
101
|
+
|
|
102
|
+
def callback_wrapper(message: dict[str, bytes]) -> None:
|
|
103
|
+
event_db, event_key, event_type = self._decode_event(message)
|
|
104
|
+
if event_db is None or event_key is None:
|
|
105
|
+
return
|
|
106
|
+
int_event_db = int(event_db)
|
|
107
|
+
str_event_key = str(event_key)
|
|
108
|
+
logger.info(
|
|
109
|
+
"Redis: Handling `%s` for key `%s` (in db %d)...",
|
|
110
|
+
event_type,
|
|
111
|
+
event_key,
|
|
112
|
+
event_db,
|
|
113
|
+
)
|
|
114
|
+
key_value: str = self._key_fetch(str_event_key) or ""
|
|
115
|
+
# NOTE: Reimplement to non-blocking and parallel,
|
|
116
|
+
# instead of sequencial and blocking
|
|
117
|
+
# NOTE: pubsub requires synchronous callbacks
|
|
118
|
+
for callback in callbacks:
|
|
119
|
+
if callback is None:
|
|
120
|
+
self._on_key_event(
|
|
121
|
+
int_event_db, str_event_key, event_type, key_value
|
|
122
|
+
)
|
|
123
|
+
else:
|
|
124
|
+
callback(int_event_db, str_event_key, event_type, key_value)
|
|
125
|
+
|
|
126
|
+
self._pubsub.psubscribe(**{event_key: callback_wrapper})
|
|
127
|
+
|
|
128
|
+
def _key_versions(self, key: str) -> int | None:
|
|
129
|
+
try:
|
|
130
|
+
key = self._build_key(key)
|
|
131
|
+
key_type = self._key_type(key)
|
|
132
|
+
|
|
133
|
+
if key_type == self.TYPE_LIST:
|
|
134
|
+
return self._redis.llen(key) # type: ignore
|
|
135
|
+
if key_type == self.TYPE_STRING:
|
|
136
|
+
return 1
|
|
137
|
+
return 0
|
|
138
|
+
except RedisConnectionError:
|
|
139
|
+
logger.error("Redis version: Failed - no connection")
|
|
140
|
+
return self._cache_verisons.get(key)
|
|
141
|
+
|
|
142
|
+
def _key_fetch(self, key: str) -> str | None:
|
|
143
|
+
try:
|
|
144
|
+
key = self._build_key(key)
|
|
145
|
+
key_type = self._key_type(key)
|
|
146
|
+
value = None
|
|
147
|
+
|
|
148
|
+
if key_type == self.TYPE_LIST:
|
|
149
|
+
value = self._redis.lindex(key, -1)
|
|
150
|
+
if key_type == self.TYPE_STRING:
|
|
151
|
+
value = self._redis.get(key)
|
|
152
|
+
|
|
153
|
+
if value is not None:
|
|
154
|
+
return value.decode() # type: ignore
|
|
155
|
+
|
|
156
|
+
return value
|
|
157
|
+
except RedisConnectionError:
|
|
158
|
+
logger.error("Redis fetch: Failed - no connection")
|
|
159
|
+
return self._cache.get(key)
|
|
160
|
+
|
|
161
|
+
def _key_update(self, key: str, value: T) -> Any:
|
|
162
|
+
key = self._build_key(key)
|
|
163
|
+
key_type = self._key_type(key)
|
|
164
|
+
value_new = json.dumps(asdict(value))
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
if value_new == json.dumps(self._key_fetch(key)):
|
|
168
|
+
logger.info("Redis update: Identical, skiping change...")
|
|
169
|
+
return value_new
|
|
170
|
+
if key_type in [self.TYPE_LIST, self.TYPE_NONE]:
|
|
171
|
+
self._redis.rpush(key, value_new)
|
|
172
|
+
elif key_type == self.TYPE_STRING:
|
|
173
|
+
value_old = self._redis.get(key)
|
|
174
|
+
self._redis.delete(key)
|
|
175
|
+
self._redis.rpush(key, value_old, value_new) # type: ignore
|
|
176
|
+
|
|
177
|
+
return value_new
|
|
178
|
+
except RedisConnectionError:
|
|
179
|
+
logger.error("Redis update: Failed - no connection")
|
|
180
|
+
return self._cache[key]
|
|
181
|
+
|
|
182
|
+
def _key_delete(self, key: str) -> None:
|
|
183
|
+
self._redis.delete(self._build_key(key))
|
|
184
|
+
|
|
185
|
+
def _key_type(self, key: str) -> str | None:
|
|
186
|
+
try:
|
|
187
|
+
key_type = self._redis.type(key)
|
|
188
|
+
if key_type is not None:
|
|
189
|
+
return str(key_type.decode()) # type: ignore
|
|
190
|
+
return key_type
|
|
191
|
+
except RedisConnectionError:
|
|
192
|
+
return None
|
|
193
|
+
|
|
194
|
+
def _build_key(self, key: str) -> str:
|
|
195
|
+
return f"{self._prefix}{key}"
|
|
196
|
+
|
|
197
|
+
def _watcher(self) -> None:
|
|
198
|
+
# This thread seems unecessary, but without it messages aren't updated.
|
|
199
|
+
# Even if we're not receiving any 'pmessage' messages (???)
|
|
200
|
+
while True:
|
|
201
|
+
for message in self._pubsub.listen():
|
|
202
|
+
logger.info(
|
|
203
|
+
"Redis watcher: Received event: %s - %s",
|
|
204
|
+
message["channel"],
|
|
205
|
+
message["data"],
|
|
206
|
+
)
|
|
207
|
+
if message["type"] == "pmessage":
|
|
208
|
+
event_db, event_key, event_type = self._decode_event(message)
|
|
209
|
+
if event_db is None or event_key is None:
|
|
210
|
+
continue
|
|
211
|
+
int_event_db = int(event_db)
|
|
212
|
+
str_event_key = str(event_key)
|
|
213
|
+
key_value = self._key_fetch(str_event_key) or ""
|
|
214
|
+
|
|
215
|
+
self._on_key_event(
|
|
216
|
+
int_event_db, str_event_key, event_type, key_value
|
|
217
|
+
)
|
|
218
|
+
time.sleep(1)
|
|
219
|
+
logger.info("Redis watcher is shutting down...")
|
|
220
|
+
|
|
221
|
+
def _decode_event(
|
|
222
|
+
self, message: dict[str, bytes]
|
|
223
|
+
) -> tuple[int | None, str | None, str]:
|
|
224
|
+
channel = message["channel"].decode()
|
|
225
|
+
|
|
226
|
+
match = re.match(KEY_EVENT_REGEX, channel)
|
|
227
|
+
if not match:
|
|
228
|
+
return None, None, message["data"].decode()
|
|
229
|
+
|
|
230
|
+
event_key = match.group("key")
|
|
231
|
+
event_db = int(match.group("db"))
|
|
232
|
+
event_type = message["data"].decode()
|
|
233
|
+
return event_db, event_key, event_type
|
|
234
|
+
|
|
235
|
+
def _on_key_event(self, db: int, key: str, event: str, value: str) -> None:
|
|
236
|
+
if db != self._db:
|
|
237
|
+
return
|
|
238
|
+
|
|
239
|
+
if event in [self.EVENT_SET, self.EVENT_RPUSH, self.EVENT_LSET]:
|
|
240
|
+
self._on_key_update(key, value)
|
|
241
|
+
elif event == self.EVENT_DELETE:
|
|
242
|
+
self._on_key_delete(key)
|
|
243
|
+
else:
|
|
244
|
+
logger.info(f"Redis _on_key_event: Ignoring operation: {event}")
|
|
245
|
+
|
|
246
|
+
def _on_key_update(self, key: str, value: str) -> None:
|
|
247
|
+
logger.info(f"Redis: Updating local cache: {key}")
|
|
248
|
+
self._cache[key] = value
|
|
249
|
+
self._cache_verisons[key] = self._key_versions(key) or -1
|
|
250
|
+
|
|
251
|
+
def _on_key_delete(self, key: str) -> None:
|
|
252
|
+
del self._cache[key]
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
# mypy: disable-error-code=no-untyped-def
|
|
2
|
+
import json
|
|
3
|
+
from collections.abc import AsyncGenerator
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
7
|
+
import pytest
|
|
8
|
+
import pytest_asyncio
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
from testcontainers.redis import RedisContainer
|
|
11
|
+
|
|
12
|
+
from cledar.redis import AsyncRedisService, FailedValue, RedisServiceConfig
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class UserModel(BaseModel):
|
|
16
|
+
user_id: int
|
|
17
|
+
name: str
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Color(Enum):
|
|
21
|
+
RED = 1
|
|
22
|
+
BLUE = 2
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@pytest.fixture(scope="module")
|
|
26
|
+
def redis_container():
|
|
27
|
+
"""Start a Redis container for testing."""
|
|
28
|
+
with RedisContainer("redis:7.2-alpine") as redis_db:
|
|
29
|
+
yield redis_db
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@pytest_asyncio.fixture(scope="function")
|
|
33
|
+
async def async_redis_service(
|
|
34
|
+
redis_container: RedisContainer,
|
|
35
|
+
) -> AsyncGenerator[AsyncRedisService, None]:
|
|
36
|
+
host = redis_container.get_container_host_ip()
|
|
37
|
+
port = int(redis_container.get_exposed_port(6379))
|
|
38
|
+
|
|
39
|
+
config = RedisServiceConfig(redis_host=host, redis_port=port, redis_db=0)
|
|
40
|
+
service = AsyncRedisService(config)
|
|
41
|
+
await service.connect()
|
|
42
|
+
yield service
|
|
43
|
+
await service.close()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@pytest.mark.asyncio
|
|
47
|
+
async def test_is_alive(async_redis_service: AsyncRedisService) -> None:
|
|
48
|
+
assert await async_redis_service.is_alive() is True
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@pytest.mark.asyncio
|
|
52
|
+
async def test_set_and_get_pydantic_model(
|
|
53
|
+
async_redis_service: AsyncRedisService,
|
|
54
|
+
) -> None:
|
|
55
|
+
key = "async:user:1"
|
|
56
|
+
model = UserModel(user_id=1, name="Alice")
|
|
57
|
+
assert await async_redis_service.set(key, model) is True
|
|
58
|
+
got = await async_redis_service.get(key, UserModel)
|
|
59
|
+
assert isinstance(got, UserModel)
|
|
60
|
+
assert got == model
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@pytest.mark.asyncio
|
|
64
|
+
async def test_set_plain_string_and_get_raw(
|
|
65
|
+
async_redis_service: AsyncRedisService,
|
|
66
|
+
) -> None:
|
|
67
|
+
key = "async:greeting"
|
|
68
|
+
assert await async_redis_service.set(key, "hello") is True
|
|
69
|
+
assert await async_redis_service.get_raw(key) == "hello"
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@pytest.mark.asyncio
|
|
73
|
+
async def test_set_with_enum_and_datetime_uses_custom_encoder(
|
|
74
|
+
async_redis_service: AsyncRedisService,
|
|
75
|
+
) -> None:
|
|
76
|
+
key = "async:meta"
|
|
77
|
+
now = datetime(2024, 1, 2, 3, 4, 5)
|
|
78
|
+
payload = {"color": Color.RED, "when": now}
|
|
79
|
+
assert await async_redis_service.set(key, payload) is True
|
|
80
|
+
|
|
81
|
+
raw = await async_redis_service.get_raw(key)
|
|
82
|
+
data = json.loads(raw) # type: ignore
|
|
83
|
+
assert data["color"] == "red"
|
|
84
|
+
assert data["when"] == now.isoformat()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@pytest.mark.asyncio
|
|
88
|
+
async def test_list_keys(async_redis_service: AsyncRedisService) -> None:
|
|
89
|
+
prefix = "async:listkeys:test:"
|
|
90
|
+
keys = [f"{prefix}{i}" for i in range(3)]
|
|
91
|
+
for k in keys:
|
|
92
|
+
assert await async_redis_service.set(k, {"i": 1}) is True
|
|
93
|
+
|
|
94
|
+
listed = await async_redis_service.list_keys(f"{prefix}*")
|
|
95
|
+
for k in keys:
|
|
96
|
+
assert k in listed
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@pytest.mark.asyncio
|
|
100
|
+
async def test_mget_mixed_results(async_redis_service: AsyncRedisService) -> None:
|
|
101
|
+
ok = UserModel(user_id=2, name="Bob")
|
|
102
|
+
k1 = "async:mget:ok"
|
|
103
|
+
k2 = "async:mget:not_json"
|
|
104
|
+
k3 = "async:mget:bad_validation"
|
|
105
|
+
k4 = "async:mget:none"
|
|
106
|
+
|
|
107
|
+
assert await async_redis_service.set(k1, ok) is True
|
|
108
|
+
assert await async_redis_service.set(k2, "{not-json}") is True
|
|
109
|
+
assert await async_redis_service.set(k3, json.dumps({"user_id": 3})) is True
|
|
110
|
+
|
|
111
|
+
results = await async_redis_service.mget([k1, k2, k3, k4], UserModel)
|
|
112
|
+
|
|
113
|
+
assert isinstance(results[0], UserModel)
|
|
114
|
+
assert isinstance(results[1], FailedValue)
|
|
115
|
+
assert isinstance(results[2], FailedValue)
|
|
116
|
+
assert results[3] is None
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@pytest.mark.asyncio
|
|
120
|
+
async def test_delete(async_redis_service: AsyncRedisService) -> None:
|
|
121
|
+
key = "async:delete:test"
|
|
122
|
+
assert await async_redis_service.set(key, {"x": 1}) is True
|
|
123
|
+
assert await async_redis_service.delete(key) is True
|
|
124
|
+
assert await async_redis_service.get_raw(key) is None
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@pytest.mark.asyncio
|
|
128
|
+
async def test_context_manager_pattern(redis_container: RedisContainer) -> None:
|
|
129
|
+
"""Test that service can be used with proper async context management."""
|
|
130
|
+
host = redis_container.get_container_host_ip()
|
|
131
|
+
port = int(redis_container.get_exposed_port(6379))
|
|
132
|
+
|
|
133
|
+
config = RedisServiceConfig(redis_host=host, redis_port=port, redis_db=0)
|
|
134
|
+
service = AsyncRedisService(config)
|
|
135
|
+
|
|
136
|
+
try:
|
|
137
|
+
await service.connect()
|
|
138
|
+
assert await service.is_alive() is True
|
|
139
|
+
await service.set("test:key", "test:value")
|
|
140
|
+
assert await service.get_raw("test:key") == "test:value"
|
|
141
|
+
finally:
|
|
142
|
+
await service.close()
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
@pytest.mark.asyncio
|
|
146
|
+
async def test_concurrent_operations(async_redis_service: AsyncRedisService) -> None:
|
|
147
|
+
"""Test multiple concurrent async operations."""
|
|
148
|
+
import asyncio
|
|
149
|
+
|
|
150
|
+
async def set_and_get(key: str, value: str) -> str | None:
|
|
151
|
+
await async_redis_service.set(key, value)
|
|
152
|
+
return await async_redis_service.get_raw(key)
|
|
153
|
+
|
|
154
|
+
tasks = [set_and_get(f"async:concurrent:{i}", f"value:{i}") for i in range(10)]
|
|
155
|
+
results = await asyncio.gather(*tasks)
|
|
156
|
+
|
|
157
|
+
for i, result in enumerate(results):
|
|
158
|
+
assert result == f"value:{i}"
|