async-redis-client 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,52 @@
1
+ """
2
+ Ports-and-adapters cache library: Redis-backed adapters with Fernet + Pydantic JSON.
3
+
4
+ Application code should depend on :class:`~async_redis_client.ports.sync_cache_port.CacheSyncPort`
5
+ or :class:`~async_redis_client.ports.async_cache_port.CacheAsyncPort` only; compose Redis adapters
6
+ in your bootstrap layer.
7
+ """
8
+
9
+ from importlib.metadata import PackageNotFoundError, version
10
+
11
+ try:
12
+ __version__ = version("async-redis-client")
13
+ except PackageNotFoundError:
14
+ __version__ = "0.0.0+unknown"
15
+
16
+ from async_redis_client.adapters.memory import (
17
+ MemoryCacheAsyncAdapter,
18
+ MemoryCacheSyncAdapter,
19
+ )
20
+ from async_redis_client.adapters.redis import (
21
+ RedisCacheAsyncAdapter,
22
+ RedisCacheSyncAdapter,
23
+ )
24
+ from async_redis_client.errors import (
25
+ CacheClosedError,
26
+ CacheError,
27
+ CacheKeyNotFoundError,
28
+ DecryptionError,
29
+ SerializationError,
30
+ )
31
+ from async_redis_client.ports.async_cache_port import CacheAsyncPort
32
+ from async_redis_client.ports.sync_cache_port import CacheSyncPort
33
+
34
+ SyncCachePort = CacheSyncPort
35
+ AsyncCachePort = CacheAsyncPort
36
+
37
+ __all__ = [
38
+ "__version__",
39
+ "AsyncCachePort",
40
+ "CacheAsyncPort",
41
+ "CacheClosedError",
42
+ "CacheError",
43
+ "CacheKeyNotFoundError",
44
+ "CacheSyncPort",
45
+ "DecryptionError",
46
+ "MemoryCacheAsyncAdapter",
47
+ "MemoryCacheSyncAdapter",
48
+ "RedisCacheAsyncAdapter",
49
+ "RedisCacheSyncAdapter",
50
+ "SerializationError",
51
+ "SyncCachePort",
52
+ ]
@@ -0,0 +1 @@
1
+ """Infrastructure adapters (Redis, optional in-memory stubs)."""
@@ -0,0 +1,4 @@
1
+ from async_redis_client.adapters.memory.async_adapter import MemoryCacheAsyncAdapter
2
+ from async_redis_client.adapters.memory.sync_adapter import MemoryCacheSyncAdapter
3
+
4
+ __all__ = ["MemoryCacheAsyncAdapter", "MemoryCacheSyncAdapter"]
@@ -0,0 +1,105 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ from collections.abc import Mapping, Sequence
5
+ from typing import Self, TypeVar
6
+
7
+ from pydantic import BaseModel, JsonValue
8
+
9
+ from async_redis_client.adapters.memory.sync_adapter import MemoryCacheSyncAdapter
10
+
11
+ TModel = TypeVar("TModel")
12
+
13
+
14
+ class MemoryCacheAsyncAdapter:
15
+ """Async façade over :class:`MemoryCacheSyncAdapter` with an asyncio lock."""
16
+
17
+ __slots__ = ("_inner", "_lock")
18
+
19
+ def __init__(
20
+ self,
21
+ *,
22
+ namespace: str = "",
23
+ key_prefix: str = "",
24
+ ) -> None:
25
+ self._inner = MemoryCacheSyncAdapter(namespace=namespace, key_prefix=key_prefix)
26
+ self._lock = asyncio.Lock()
27
+
28
+ async def close(self) -> None:
29
+ """No-op; satisfies :class:`~async_redis_client.ports.async_cache_port.CacheAsyncPort`."""
30
+
31
+ aclose = close
32
+
33
+ async def __aenter__(self) -> Self:
34
+ return self
35
+
36
+ async def __aexit__(self, exc_type: object, exc: object, tb: object) -> None:
37
+ await self.close()
38
+
39
+ async def get(self, key: str) -> JsonValue | None:
40
+ async with self._lock:
41
+ return self._inner.get(key)
42
+
43
+ async def set(
44
+ self, key: str, value: JsonValue, ttl_seconds: int | None = None
45
+ ) -> None:
46
+ async with self._lock:
47
+ self._inner.set(key, value, ttl_seconds=ttl_seconds)
48
+
49
+ async def set_json(
50
+ self, key: str, value: JsonValue, ttl_seconds: int | None = None
51
+ ) -> None:
52
+ async with self._lock:
53
+ self._inner.set_json(key, value, ttl_seconds=ttl_seconds)
54
+
55
+ async def get_json(self, key: str) -> JsonValue | None:
56
+ async with self._lock:
57
+ return self._inner.get_json(key)
58
+
59
+ async def get_or_raise_if_missing(self, key: str) -> JsonValue:
60
+ async with self._lock:
61
+ return self._inner.get_or_raise_if_missing(key)
62
+
63
+ async def get_json_or_raise_if_missing(self, key: str) -> JsonValue:
64
+ async with self._lock:
65
+ return self._inner.get_json_or_raise_if_missing(key)
66
+
67
+ async def set_model(
68
+ self, key: str, model: BaseModel, ttl_seconds: int | None = None
69
+ ) -> None:
70
+ async with self._lock:
71
+ self._inner.set_model(key, model, ttl_seconds=ttl_seconds)
72
+
73
+ async def get_as_model(self, key: str, model_type: type[TModel]) -> TModel | None:
74
+ async with self._lock:
75
+ return self._inner.get_as_model(key, model_type)
76
+
77
+ async def delete(self, key: str) -> int:
78
+ async with self._lock:
79
+ return self._inner.delete(key)
80
+
81
+ async def exists(self, key: str) -> bool:
82
+ async with self._lock:
83
+ return self._inner.exists(key)
84
+
85
+ async def incr(self, key: str, amount: int = 1) -> int:
86
+ async with self._lock:
87
+ return self._inner.incr(key, amount)
88
+
89
+ async def decr(self, key: str, amount: int = 1) -> int:
90
+ async with self._lock:
91
+ return self._inner.decr(key, amount)
92
+
93
+ async def incrby(self, key: str, amount: int) -> int:
94
+ async with self._lock:
95
+ return self._inner.incrby(key, amount)
96
+
97
+ async def set_many(
98
+ self, mapping: Mapping[str, JsonValue], ttl_seconds: int | None = None
99
+ ) -> None:
100
+ async with self._lock:
101
+ self._inner.set_many(mapping, ttl_seconds=ttl_seconds)
102
+
103
+ async def get_many(self, keys: Sequence[str]) -> dict[str, JsonValue | None]:
104
+ async with self._lock:
105
+ return self._inner.get_many(keys)
@@ -0,0 +1,144 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Mapping, Sequence
4
+ from typing import Self, TypeVar
5
+
6
+ from pydantic import BaseModel, JsonValue, TypeAdapter
7
+
8
+ from async_redis_client.errors import CacheKeyNotFoundError, SerializationError
9
+
10
+ TModel = TypeVar("TModel")
11
+
12
+
13
+ class MemoryCacheSyncAdapter:
14
+ """
15
+ Minimal in-memory implementation of :class:`~async_redis_client.ports.sync_cache_port.CacheSyncPort`.
16
+
17
+ Intended for domain/unit tests without Redis. Mutually exclusive storage per key:
18
+ JSON tree, ``BaseModel``, or integer counter (same collision rules as documented for Redis).
19
+ TTL is not simulated (``ttl_seconds`` is accepted and ignored).
20
+
21
+ Physical keys mirror Redis adapters: ``namespace + key_prefix +`` logical key passed to port methods.
22
+ """
23
+
24
+ __slots__ = ("_counters", "_json", "_models", "_namespace", "_key_prefix")
25
+
26
+ def __init__(
27
+ self,
28
+ *,
29
+ namespace: str = "",
30
+ key_prefix: str = "",
31
+ ) -> None:
32
+ self._json: dict[str, JsonValue] = {}
33
+ self._models: dict[str, BaseModel] = {}
34
+ self._counters: dict[str, int] = {}
35
+ self._namespace = namespace
36
+ self._key_prefix = key_prefix
37
+
38
+ def _full_key(self, logical_key: str) -> str:
39
+ return f"{self._namespace}{self._key_prefix}{logical_key}"
40
+
41
+ def close(self) -> None:
42
+ """No-op; satisfies :class:`~async_redis_client.ports.sync_cache_port.CacheSyncPort`."""
43
+
44
+ def __enter__(self) -> Self:
45
+ return self
46
+
47
+ def __exit__(self, exc_type: object, exc: object, tb: object) -> None:
48
+ self.close()
49
+
50
+ def _clear_non_counter(self, physical_key: str) -> None:
51
+ self._json.pop(physical_key, None)
52
+ self._models.pop(physical_key, None)
53
+
54
+ def get(self, key: str) -> JsonValue | None:
55
+ fk = self._full_key(key)
56
+ if fk in self._models:
57
+ raise SerializationError("Key holds a model instance; use get_as_model().")
58
+ if fk in self._counters:
59
+ raise SerializationError("Key holds a counter; use incr/decr APIs.")
60
+ return self._json.get(fk)
61
+
62
+ def set(self, key: str, value: JsonValue, ttl_seconds: int | None = None) -> None:
63
+ del ttl_seconds
64
+ fk = self._full_key(key)
65
+ self._clear_non_counter(fk)
66
+ self._json[fk] = value
67
+
68
+ def set_json(
69
+ self, key: str, value: JsonValue, ttl_seconds: int | None = None
70
+ ) -> None:
71
+ self.set(key, value, ttl_seconds=ttl_seconds)
72
+
73
+ def get_json(self, key: str) -> JsonValue | None:
74
+ return self.get(key)
75
+
76
+ def get_or_raise_if_missing(self, key: str) -> JsonValue:
77
+ fk = self._full_key(key)
78
+ if fk in self._models:
79
+ raise SerializationError("Key holds a model instance; use get_as_model().")
80
+ if fk in self._counters:
81
+ raise SerializationError("Key holds a counter; use incr/decr APIs.")
82
+ if fk not in self._json:
83
+ raise CacheKeyNotFoundError(key)
84
+ return self._json[fk]
85
+
86
+ def get_json_or_raise_if_missing(self, key: str) -> JsonValue:
87
+ return self.get_or_raise_if_missing(key)
88
+
89
+ def set_model(
90
+ self, key: str, model: BaseModel, ttl_seconds: int | None = None
91
+ ) -> None:
92
+ del ttl_seconds
93
+ fk = self._full_key(key)
94
+ self._clear_non_counter(fk)
95
+ self._models[fk] = model
96
+ self._json.pop(fk, None)
97
+
98
+ def get_as_model(self, key: str, model_type: type[TModel]) -> TModel | None:
99
+ fk = self._full_key(key)
100
+ if fk in self._json:
101
+ raw = TypeAdapter(JsonValue).dump_json(self._json[fk])
102
+ return TypeAdapter(model_type).validate_json(raw)
103
+ if fk in self._models:
104
+ m = self._models[fk]
105
+ if isinstance(m, model_type):
106
+ return m
107
+ return TypeAdapter(model_type).validate_python(m.model_dump())
108
+ if fk in self._counters:
109
+ raise SerializationError("Key holds a counter; cannot load as model.")
110
+ return None
111
+
112
+ def delete(self, key: str) -> int:
113
+ fk = self._full_key(key)
114
+ existed = fk in self._json or fk in self._models or fk in self._counters
115
+ self._json.pop(fk, None)
116
+ self._models.pop(fk, None)
117
+ self._counters.pop(fk, None)
118
+ return 1 if existed else 0
119
+
120
+ def exists(self, key: str) -> bool:
121
+ fk = self._full_key(key)
122
+ return fk in self._json or fk in self._models or fk in self._counters
123
+
124
+ def incr(self, key: str, amount: int = 1) -> int:
125
+ fk = self._full_key(key)
126
+ self._clear_non_counter(fk)
127
+ cur = self._counters.get(fk, 0) + amount
128
+ self._counters[fk] = cur
129
+ return cur
130
+
131
+ def decr(self, key: str, amount: int = 1) -> int:
132
+ return self.incr(key, -amount)
133
+
134
+ def incrby(self, key: str, amount: int) -> int:
135
+ return self.incr(key, amount)
136
+
137
+ def set_many(
138
+ self, mapping: Mapping[str, JsonValue], ttl_seconds: int | None = None
139
+ ) -> None:
140
+ for k, v in mapping.items():
141
+ self.set(k, v, ttl_seconds=ttl_seconds)
142
+
143
+ def get_many(self, keys: Sequence[str]) -> dict[str, JsonValue | None]:
144
+ return {k: self.get(k) for k in keys}
@@ -0,0 +1,4 @@
1
+ from async_redis_client.adapters.redis.async_adapter import RedisCacheAsyncAdapter
2
+ from async_redis_client.adapters.redis.sync_adapter import RedisCacheSyncAdapter
3
+
4
+ __all__ = ["RedisCacheAsyncAdapter", "RedisCacheSyncAdapter"]
@@ -0,0 +1,78 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Mapping, Sequence
4
+ from typing import Any
5
+
6
+ from cryptography.fernet import Fernet
7
+ from pydantic import JsonValue
8
+
9
+ from async_redis_client.crypto import decrypt_bytes, encrypt_bytes
10
+ from async_redis_client.serialization import dump_json_value, load_json_value
11
+
12
+
13
+ def full_key(namespace: str, key_prefix: str, logical_key: str) -> str:
14
+ """Physical Redis key: ``namespace + key_prefix + logical_key``."""
15
+ return f"{namespace}{key_prefix}{logical_key}"
16
+
17
+
18
+ def redis_raw_to_bytes(raw: object) -> bytes:
19
+ if isinstance(raw, bytes):
20
+ return raw
21
+ if isinstance(raw, bytearray):
22
+ return bytes(raw)
23
+ return bytes(raw)
24
+
25
+
26
+ def encrypt_json_tree(fernet: Fernet, value: JsonValue) -> bytes:
27
+ return encrypt_bytes(fernet, dump_json_value(value))
28
+
29
+
30
+ def decrypt_json_tree(
31
+ fernet: Fernet,
32
+ raw: bytes,
33
+ *,
34
+ secondary: Fernet | None,
35
+ ) -> JsonValue:
36
+ plain = decrypt_bytes(fernet, raw, secondary=secondary)
37
+ return load_json_value(plain)
38
+
39
+
40
+ def pipeline_enqueue_json_sets(
41
+ pipe: Any,
42
+ mapping: Mapping[str, JsonValue],
43
+ *,
44
+ namespace: str,
45
+ key_prefix: str,
46
+ fernet: Fernet,
47
+ ttl_seconds: int | None,
48
+ ) -> None:
49
+ for logical_key, value in mapping.items():
50
+ fk = full_key(namespace, key_prefix, logical_key)
51
+ blob = encrypt_json_tree(fernet, value)
52
+ if ttl_seconds is not None:
53
+ pipe.set(fk, blob, ex=ttl_seconds)
54
+ else:
55
+ pipe.set(fk, blob)
56
+
57
+
58
+ def redis_keys_from_logical(
59
+ namespace: str, key_prefix: str, keys: Sequence[str]
60
+ ) -> list[str]:
61
+ return [full_key(namespace, key_prefix, k) for k in keys]
62
+
63
+
64
+ def decode_mget_json_rows(
65
+ keys: Sequence[str],
66
+ values: Sequence[object | None],
67
+ *,
68
+ fernet: Fernet,
69
+ secondary: Fernet | None,
70
+ ) -> dict[str, JsonValue | None]:
71
+ out: dict[str, JsonValue | None] = {}
72
+ for logical, raw in zip(keys, values, strict=False):
73
+ if raw is None:
74
+ out[logical] = None
75
+ continue
76
+ data = redis_raw_to_bytes(raw)
77
+ out[logical] = decrypt_json_tree(fernet, data, secondary=secondary)
78
+ return out
@@ -0,0 +1,247 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Mapping, Sequence
4
+ from typing import Self, TypeVar
5
+
6
+ from pydantic import BaseModel, JsonValue
7
+ from redis.asyncio import Redis as AsyncRedis
8
+ from redis.asyncio.cluster import RedisCluster as AsyncRedisCluster
9
+
10
+ from async_redis_client.adapters.redis._helpers import (
11
+ decode_mget_json_rows,
12
+ decrypt_json_tree,
13
+ encrypt_json_tree,
14
+ full_key,
15
+ pipeline_enqueue_json_sets,
16
+ redis_keys_from_logical,
17
+ redis_raw_to_bytes,
18
+ )
19
+ from async_redis_client.crypto import (
20
+ build_fernet,
21
+ build_secondary_fernet,
22
+ decrypt_bytes,
23
+ encrypt_bytes,
24
+ )
25
+ from async_redis_client.errors import CacheClosedError, CacheKeyNotFoundError
26
+ from async_redis_client.serialization import dump_model, load_as_type
27
+
28
+ RedisAsyncClient = AsyncRedis | AsyncRedisCluster
29
+
30
+ TModel = TypeVar("TModel")
31
+
32
+
33
+ class RedisCacheAsyncAdapter:
34
+ """
35
+ Async Redis cache implementing :class:`~async_redis_client.ports.async_cache_port.CacheAsyncPort`.
36
+
37
+ Semantics match :class:`~async_redis_client.adapters.redis.sync_adapter.RedisCacheSyncAdapter`:
38
+ encrypted JSON/model payloads vs plaintext integer counters; Cluster hash-slot constraints
39
+ apply to :meth:`set_many` / :meth:`get_many`. Secondary Fernet key behaves as in the sync adapter.
40
+
41
+ **Lifecycle:** URL factories set ``owns_client=True``; use ``async with adapter:`` or await
42
+ :meth:`close`. Injected clients default to ``owns_client=False`` (you close the Redis client),
43
+ unless you pass ``owns_client=True``. :meth:`aclose` is an alias of :meth:`close`. After an
44
+ owned shutdown, operations raise :exc:`~async_redis_client.errors.CacheClosedError`.
45
+ """
46
+
47
+ __slots__ = (
48
+ "_client",
49
+ "_fernet",
50
+ "_fernet_secondary",
51
+ "_namespace",
52
+ "_key_prefix",
53
+ "_owns_client",
54
+ "_closed",
55
+ )
56
+
57
+ def __init__(
58
+ self,
59
+ client: RedisAsyncClient,
60
+ *,
61
+ fernet_key: bytes | None = None,
62
+ fernet_key_secondary: bytes | None = None,
63
+ namespace: str = "",
64
+ key_prefix: str = "",
65
+ owns_client: bool = False,
66
+ ) -> None:
67
+ self._client = client
68
+ self._fernet = build_fernet(fernet_key=fernet_key)
69
+ self._fernet_secondary = build_secondary_fernet(fernet_key=fernet_key_secondary)
70
+ self._namespace = namespace
71
+ self._key_prefix = key_prefix
72
+ self._owns_client = owns_client
73
+ self._closed = False
74
+
75
+ @classmethod
76
+ def from_standalone_url(
77
+ cls,
78
+ url: str,
79
+ *,
80
+ fernet_key: bytes | None = None,
81
+ fernet_key_secondary: bytes | None = None,
82
+ namespace: str = "",
83
+ key_prefix: str = "",
84
+ **kwargs: object,
85
+ ) -> RedisCacheAsyncAdapter:
86
+ client = AsyncRedis.from_url(url, **kwargs) # type: ignore[arg-type]
87
+ return cls(
88
+ client,
89
+ fernet_key=fernet_key,
90
+ fernet_key_secondary=fernet_key_secondary,
91
+ namespace=namespace,
92
+ key_prefix=key_prefix,
93
+ owns_client=True,
94
+ )
95
+
96
+ @classmethod
97
+ def from_cluster_url(
98
+ cls,
99
+ url: str,
100
+ *,
101
+ fernet_key: bytes | None = None,
102
+ fernet_key_secondary: bytes | None = None,
103
+ namespace: str = "",
104
+ key_prefix: str = "",
105
+ **kwargs: object,
106
+ ) -> RedisCacheAsyncAdapter:
107
+ client = AsyncRedisCluster.from_url(url, **kwargs) # type: ignore[arg-type]
108
+ return cls(
109
+ client,
110
+ fernet_key=fernet_key,
111
+ fernet_key_secondary=fernet_key_secondary,
112
+ namespace=namespace,
113
+ key_prefix=key_prefix,
114
+ owns_client=True,
115
+ )
116
+
117
+ async def close(self) -> None:
118
+ if self._closed:
119
+ return
120
+ if self._owns_client:
121
+ await self._client.aclose()
122
+ self._owns_client = False
123
+ self._closed = True
124
+
125
+ aclose = close
126
+
127
+ async def __aenter__(self) -> Self:
128
+ self._require_open()
129
+ return self
130
+
131
+ async def __aexit__(self, exc_type: object, exc: object, tb: object) -> None:
132
+ await self.close()
133
+
134
+ def _require_open(self) -> None:
135
+ if self._closed:
136
+ raise CacheClosedError("Redis cache adapter is closed")
137
+
138
+ def _full_key(self, key: str) -> str:
139
+ return full_key(self._namespace, self._key_prefix, key)
140
+
141
+ async def get(self, key: str) -> JsonValue | None:
142
+ self._require_open()
143
+ raw = await self._client.get(self._full_key(key))
144
+ if raw is None:
145
+ return None
146
+ data = redis_raw_to_bytes(raw)
147
+ return decrypt_json_tree(self._fernet, data, secondary=self._fernet_secondary)
148
+
149
+ async def set(
150
+ self, key: str, value: JsonValue, ttl_seconds: int | None = None
151
+ ) -> None:
152
+ self._require_open()
153
+ fk = self._full_key(key)
154
+ blob = encrypt_json_tree(self._fernet, value)
155
+ if ttl_seconds is not None:
156
+ await self._client.set(fk, blob, ex=ttl_seconds)
157
+ else:
158
+ await self._client.set(fk, blob)
159
+
160
+ async def set_json(
161
+ self, key: str, value: JsonValue, ttl_seconds: int | None = None
162
+ ) -> None:
163
+ await self.set(key, value, ttl_seconds=ttl_seconds)
164
+
165
+ async def get_json(self, key: str) -> JsonValue | None:
166
+ return await self.get(key)
167
+
168
+ async def get_or_raise_if_missing(self, key: str) -> JsonValue:
169
+ self._require_open()
170
+ raw = await self._client.get(self._full_key(key))
171
+ if raw is None:
172
+ raise CacheKeyNotFoundError(key)
173
+ data = redis_raw_to_bytes(raw)
174
+ return decrypt_json_tree(self._fernet, data, secondary=self._fernet_secondary)
175
+
176
+ async def get_json_or_raise_if_missing(self, key: str) -> JsonValue:
177
+ return await self.get_or_raise_if_missing(key)
178
+
179
+ async def set_model(
180
+ self, key: str, model: BaseModel, ttl_seconds: int | None = None
181
+ ) -> None:
182
+ self._require_open()
183
+ fk = self._full_key(key)
184
+ blob = encrypt_bytes(self._fernet, dump_model(model))
185
+ if ttl_seconds is not None:
186
+ await self._client.set(fk, blob, ex=ttl_seconds)
187
+ else:
188
+ await self._client.set(fk, blob)
189
+
190
+ async def get_as_model(self, key: str, model_type: type[TModel]) -> TModel | None:
191
+ self._require_open()
192
+ raw = await self._client.get(self._full_key(key))
193
+ if raw is None:
194
+ return None
195
+ data = redis_raw_to_bytes(raw)
196
+ plain = decrypt_bytes(self._fernet, data, secondary=self._fernet_secondary)
197
+ return load_as_type(model_type, plain)
198
+
199
+ async def delete(self, key: str) -> int:
200
+ self._require_open()
201
+ return int(await self._client.delete(self._full_key(key)))
202
+
203
+ async def exists(self, key: str) -> bool:
204
+ self._require_open()
205
+ return bool(await self._client.exists(self._full_key(key)))
206
+
207
+ async def incr(self, key: str, amount: int = 1) -> int:
208
+ self._require_open()
209
+ return int(await self._client.incrby(self._full_key(key), amount))
210
+
211
+ async def decr(self, key: str, amount: int = 1) -> int:
212
+ self._require_open()
213
+ return int(await self._client.decrby(self._full_key(key), amount))
214
+
215
+ async def incrby(self, key: str, amount: int) -> int:
216
+ self._require_open()
217
+ return int(await self._client.incrby(self._full_key(key), amount))
218
+
219
+ async def set_many(
220
+ self, mapping: Mapping[str, JsonValue], ttl_seconds: int | None = None
221
+ ) -> None:
222
+ self._require_open()
223
+ if not mapping:
224
+ return
225
+ pipe = self._client.pipeline(transaction=False)
226
+ pipeline_enqueue_json_sets(
227
+ pipe,
228
+ mapping,
229
+ namespace=self._namespace,
230
+ key_prefix=self._key_prefix,
231
+ fernet=self._fernet,
232
+ ttl_seconds=ttl_seconds,
233
+ )
234
+ await pipe.execute()
235
+
236
+ async def get_many(self, keys: Sequence[str]) -> dict[str, JsonValue | None]:
237
+ self._require_open()
238
+ if not keys:
239
+ return {}
240
+ rkeys = redis_keys_from_logical(self._namespace, self._key_prefix, keys)
241
+ values = await self._client.mget(rkeys)
242
+ return decode_mget_json_rows(
243
+ keys,
244
+ values,
245
+ fernet=self._fernet,
246
+ secondary=self._fernet_secondary,
247
+ )