dycw-utilities 0.129.10__py3-none-any.whl → 0.175.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. dycw_utilities-0.175.17.dist-info/METADATA +34 -0
  2. dycw_utilities-0.175.17.dist-info/RECORD +103 -0
  3. dycw_utilities-0.175.17.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.175.17.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +14 -14
  7. utilities/asyncio.py +350 -819
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +77 -22
  10. utilities/cachetools.py +24 -29
  11. utilities/click.py +393 -237
  12. utilities/concurrent.py +8 -11
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +83 -118
  17. utilities/docker.py +293 -0
  18. utilities/enum.py +26 -23
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +29 -65
  21. utilities/fpdf2.py +3 -3
  22. utilities/functions.py +169 -416
  23. utilities/functools.py +18 -19
  24. utilities/git.py +9 -30
  25. utilities/grp.py +28 -0
  26. utilities/gzip.py +31 -0
  27. utilities/http.py +3 -2
  28. utilities/hypothesis.py +738 -589
  29. utilities/importlib.py +17 -1
  30. utilities/inflect.py +25 -0
  31. utilities/iterables.py +194 -262
  32. utilities/jinja2.py +148 -0
  33. utilities/json.py +70 -0
  34. utilities/libcst.py +38 -17
  35. utilities/lightweight_charts.py +5 -9
  36. utilities/logging.py +345 -543
  37. utilities/math.py +18 -13
  38. utilities/memory_profiler.py +11 -15
  39. utilities/more_itertools.py +200 -131
  40. utilities/operator.py +33 -29
  41. utilities/optuna.py +6 -6
  42. utilities/orjson.py +272 -137
  43. utilities/os.py +61 -4
  44. utilities/parse.py +59 -61
  45. utilities/pathlib.py +281 -40
  46. utilities/permissions.py +298 -0
  47. utilities/pickle.py +2 -2
  48. utilities/platform.py +24 -5
  49. utilities/polars.py +1214 -430
  50. utilities/polars_ols.py +1 -1
  51. utilities/postgres.py +408 -0
  52. utilities/pottery.py +113 -26
  53. utilities/pqdm.py +10 -11
  54. utilities/psutil.py +6 -57
  55. utilities/pwd.py +28 -0
  56. utilities/pydantic.py +4 -54
  57. utilities/pydantic_settings.py +240 -0
  58. utilities/pydantic_settings_sops.py +76 -0
  59. utilities/pyinstrument.py +8 -10
  60. utilities/pytest.py +227 -121
  61. utilities/pytest_plugins/__init__.py +1 -0
  62. utilities/pytest_plugins/pytest_randomly.py +23 -0
  63. utilities/pytest_plugins/pytest_regressions.py +56 -0
  64. utilities/pytest_regressions.py +26 -46
  65. utilities/random.py +13 -9
  66. utilities/re.py +58 -28
  67. utilities/redis.py +401 -550
  68. utilities/scipy.py +1 -1
  69. utilities/sentinel.py +10 -0
  70. utilities/shelve.py +4 -1
  71. utilities/shutil.py +25 -0
  72. utilities/slack_sdk.py +36 -106
  73. utilities/sqlalchemy.py +502 -473
  74. utilities/sqlalchemy_polars.py +38 -94
  75. utilities/string.py +2 -3
  76. utilities/subprocess.py +1572 -0
  77. utilities/tempfile.py +86 -4
  78. utilities/testbook.py +50 -0
  79. utilities/text.py +165 -42
  80. utilities/timer.py +37 -65
  81. utilities/traceback.py +158 -929
  82. utilities/types.py +146 -116
  83. utilities/typing.py +531 -71
  84. utilities/tzdata.py +1 -53
  85. utilities/tzlocal.py +6 -23
  86. utilities/uuid.py +43 -5
  87. utilities/version.py +27 -26
  88. utilities/whenever.py +1776 -386
  89. utilities/zoneinfo.py +84 -22
  90. dycw_utilities-0.129.10.dist-info/METADATA +0 -241
  91. dycw_utilities-0.129.10.dist-info/RECORD +0 -96
  92. dycw_utilities-0.129.10.dist-info/WHEEL +0 -4
  93. dycw_utilities-0.129.10.dist-info/licenses/LICENSE +0 -21
  94. utilities/datetime.py +0 -1409
  95. utilities/eventkit.py +0 -402
  96. utilities/loguru.py +0 -144
  97. utilities/luigi.py +0 -228
  98. utilities/period.py +0 -324
  99. utilities/pyrsistent.py +0 -89
  100. utilities/python_dotenv.py +0 -105
  101. utilities/streamlit.py +0 -105
  102. utilities/sys.py +0 -87
  103. utilities/tenacity.py +0 -145
utilities/redis.py CHANGED
@@ -1,21 +1,17 @@
1
1
  from __future__ import annotations
2
2
 
3
- import asyncio
4
- from asyncio import CancelledError, Event, Queue, Task, create_task
5
- from collections.abc import AsyncIterator, Callable, Mapping
6
- from contextlib import asynccontextmanager, suppress
7
- from dataclasses import dataclass, field
3
+ from asyncio import CancelledError, Queue, Task, TaskGroup, create_task
4
+ from collections.abc import AsyncIterator, Callable, Mapping, Sequence
5
+ from contextlib import suppress
6
+ from dataclasses import dataclass
8
7
  from functools import partial
9
8
  from operator import itemgetter
10
9
  from typing import (
11
10
  TYPE_CHECKING,
12
11
  Any,
13
- Generic,
14
12
  Literal,
15
- Self,
16
13
  TypedDict,
17
14
  TypeGuard,
18
- TypeVar,
19
15
  assert_never,
20
16
  cast,
21
17
  overload,
@@ -24,107 +20,79 @@ from typing import (
24
20
 
25
21
  from redis.asyncio import Redis
26
22
 
27
- from utilities.asyncio import EnhancedQueue, Looper, timeout_dur
28
- from utilities.contextlib import suppress_super_object_attribute_error
29
- from utilities.datetime import (
30
- MILLISECOND,
31
- SECOND,
32
- datetime_duration_to_float,
33
- datetime_duration_to_timedelta,
34
- )
23
+ from utilities.asyncio import sleep_td, timeout_td
24
+ from utilities.contextlib import enhanced_async_context_manager
35
25
  from utilities.errors import ImpossibleCaseError
36
26
  from utilities.functions import ensure_int, identity
37
27
  from utilities.iterables import always_iterable, one
38
- from utilities.orjson import deserialize, serialize
28
+ from utilities.os import is_pytest
29
+ from utilities.typing import is_instance_gen
30
+ from utilities.whenever import MILLISECOND, SECOND, to_milliseconds, to_nanoseconds
39
31
 
40
32
  if TYPE_CHECKING:
41
- from collections.abc import (
42
- AsyncIterator,
43
- Awaitable,
44
- Collection,
45
- Iterable,
46
- Iterator,
47
- Sequence,
48
- )
49
- from types import TracebackType
33
+ from collections.abc import AsyncIterator, Awaitable, Collection, Iterable
50
34
 
51
35
  from redis.asyncio import ConnectionPool
52
36
  from redis.asyncio.client import PubSub
53
- from redis.typing import EncodableT, ResponseT
37
+ from redis.typing import EncodableT
54
38
 
55
39
  from utilities.iterables import MaybeIterable
56
- from utilities.types import Duration, TypeLike
57
-
40
+ from utilities.types import Delta, MaybeSequence, MaybeType, TypeLike
58
41
 
59
- _K = TypeVar("_K")
60
- _K1 = TypeVar("_K1")
61
- _K2 = TypeVar("_K2")
62
- _K3 = TypeVar("_K3")
63
- _T = TypeVar("_T")
64
- _T1 = TypeVar("_T1")
65
- _T2 = TypeVar("_T2")
66
- _T3 = TypeVar("_T3")
67
- _T4 = TypeVar("_T4")
68
- _T5 = TypeVar("_T5")
69
- _V = TypeVar("_V")
70
- _V1 = TypeVar("_V1")
71
- _V2 = TypeVar("_V2")
72
- _V3 = TypeVar("_V3")
73
42
 
74
-
75
- _PUBLISH_TIMEOUT: Duration = SECOND
43
+ _PUBLISH_TIMEOUT: Delta = SECOND
76
44
 
77
45
 
78
46
  ##
79
47
 
80
48
 
81
49
  @dataclass(kw_only=True)
82
- class RedisHashMapKey(Generic[_K, _V]):
50
+ class RedisHashMapKey[K, V]:
83
51
  """A hashmap key in a redis store."""
84
52
 
85
53
  name: str
86
- key: TypeLike[_K]
87
- key_serializer: Callable[[_K], bytes] | None = None
88
- key_deserializer: Callable[[bytes], _K] | None = None
89
- value: TypeLike[_V]
90
- value_serializer: Callable[[_V], bytes] | None = None
91
- value_deserializer: Callable[[bytes], _V] | None = None
92
- timeout: Duration | None = None
93
- error: type[Exception] = TimeoutError
94
- ttl: Duration | None = None
95
-
96
- async def delete(self, redis: Redis, key: _K, /) -> int:
54
+ key: TypeLike[K]
55
+ key_serializer: Callable[[K], bytes] | None = None
56
+ key_deserializer: Callable[[bytes], K] | None = None
57
+ value: TypeLike[V]
58
+ value_serializer: Callable[[V], bytes] | None = None
59
+ value_deserializer: Callable[[bytes], V] | None = None
60
+ timeout: Delta | None = None
61
+ error: MaybeType[BaseException] = TimeoutError
62
+ ttl: Delta | None = None
63
+
64
+ async def delete(self, redis: Redis, key: K, /) -> int:
97
65
  """Delete a key from a hashmap in `redis`."""
98
66
  ser = _serialize( # skipif-ci-and-not-linux
99
67
  key, serializer=self.key_serializer
100
68
  ).decode()
101
- async with timeout_dur( # skipif-ci-and-not-linux
102
- duration=self.timeout, error=self.error
69
+ async with timeout_td( # skipif-ci-and-not-linux
70
+ self.timeout, error=self.error
103
71
  ):
104
72
  return await cast("Awaitable[int]", redis.hdel(self.name, ser))
105
73
  raise ImpossibleCaseError(case=[f"{redis=}", f"{key=}"]) # pragma: no cover
106
74
 
107
- async def exists(self, redis: Redis, key: _K, /) -> bool:
75
+ async def exists(self, redis: Redis, key: K, /) -> bool:
108
76
  """Check if the key exists in a hashmap in `redis`."""
109
77
  ser = _serialize( # skipif-ci-and-not-linux
110
78
  key, serializer=self.key_serializer
111
79
  ).decode()
112
- async with timeout_dur( # skipif-ci-and-not-linux
113
- duration=self.timeout, error=self.error
80
+ async with timeout_td( # skipif-ci-and-not-linux
81
+ self.timeout, error=self.error
114
82
  ):
115
83
  return await cast("Awaitable[bool]", redis.hexists(self.name, ser))
116
84
 
117
- async def get(self, redis: Redis, key: _K, /) -> _V:
85
+ async def get(self, redis: Redis, key: K, /) -> V:
118
86
  """Get a value from a hashmap in `redis`."""
119
87
  result = one(await self.get_many(redis, [key])) # skipif-ci-and-not-linux
120
88
  if result is None: # skipif-ci-and-not-linux
121
89
  raise KeyError(self.name, key)
122
90
  return result # skipif-ci-and-not-linux
123
91
 
124
- async def get_all(self, redis: Redis, /) -> Mapping[_K, _V]:
92
+ async def get_all(self, redis: Redis, /) -> Mapping[K, V]:
125
93
  """Get a value from a hashmap in `redis`."""
126
- async with timeout_dur( # skipif-ci-and-not-linux
127
- duration=self.timeout, error=self.error
94
+ async with timeout_td( # skipif-ci-and-not-linux
95
+ self.timeout, error=self.error
128
96
  ):
129
97
  result = await cast( # skipif-ci-and-not-linux
130
98
  "Awaitable[Mapping[bytes, bytes]]", redis.hgetall(self.name)
@@ -136,9 +104,7 @@ class RedisHashMapKey(Generic[_K, _V]):
136
104
  for key, value in result.items()
137
105
  }
138
106
 
139
- async def get_many(
140
- self, redis: Redis, keys: Iterable[_K], /
141
- ) -> Sequence[_V | None]:
107
+ async def get_many(self, redis: Redis, keys: Iterable[K], /) -> Sequence[V | None]:
142
108
  """Get multiple values from a hashmap in `redis`."""
143
109
  keys = list(keys) # skipif-ci-and-not-linux
144
110
  if len(keys) == 0: # skipif-ci-and-not-linux
@@ -146,8 +112,8 @@ class RedisHashMapKey(Generic[_K, _V]):
146
112
  ser = [ # skipif-ci-and-not-linux
147
113
  _serialize(key, serializer=self.key_serializer) for key in keys
148
114
  ]
149
- async with timeout_dur( # skipif-ci-and-not-linux
150
- duration=self.timeout, error=self.error
115
+ async with timeout_td( # skipif-ci-and-not-linux
116
+ self.timeout, error=self.error
151
117
  ):
152
118
  result = await cast( # skipif-ci-and-not-linux
153
119
  "Awaitable[Sequence[bytes | None]]", redis.hmget(self.name, ser)
@@ -159,10 +125,10 @@ class RedisHashMapKey(Generic[_K, _V]):
159
125
  for data in result
160
126
  ]
161
127
 
162
- async def keys(self, redis: Redis, /) -> Sequence[_K]:
128
+ async def keys(self, redis: Redis, /) -> Sequence[K]:
163
129
  """Get the keys of a hashmap in `redis`."""
164
- async with timeout_dur( # skipif-ci-and-not-linux
165
- duration=self.timeout, error=self.error
130
+ async with timeout_td( # skipif-ci-and-not-linux
131
+ self.timeout, error=self.error
166
132
  ):
167
133
  result = await cast("Awaitable[Sequence[bytes]]", redis.hkeys(self.name))
168
134
  return [ # skipif-ci-and-not-linux
@@ -171,16 +137,16 @@ class RedisHashMapKey(Generic[_K, _V]):
171
137
 
172
138
  async def length(self, redis: Redis, /) -> int:
173
139
  """Get the length of a hashmap in `redis`."""
174
- async with timeout_dur( # skipif-ci-and-not-linux
175
- duration=self.timeout, error=self.error
140
+ async with timeout_td( # skipif-ci-and-not-linux
141
+ self.timeout, error=self.error
176
142
  ):
177
143
  return await cast("Awaitable[int]", redis.hlen(self.name))
178
144
 
179
- async def set(self, redis: Redis, key: _K, value: _V, /) -> int:
145
+ async def set(self, redis: Redis, key: K, value: V, /) -> int:
180
146
  """Set a value in a hashmap in `redis`."""
181
147
  return await self.set_many(redis, {key: value}) # skipif-ci-and-not-linux
182
148
 
183
- async def set_many(self, redis: Redis, mapping: Mapping[_K, _V], /) -> int:
149
+ async def set_many(self, redis: Redis, mapping: Mapping[K, V], /) -> int:
184
150
  """Set multiple value(s) in a hashmap in `redis`."""
185
151
  if len(mapping) == 0: # skipif-ci-and-not-linux
186
152
  return 0
@@ -190,20 +156,20 @@ class RedisHashMapKey(Generic[_K, _V]):
190
156
  )
191
157
  for key, value in mapping.items()
192
158
  }
193
- async with timeout_dur( # skipif-ci-and-not-linux
194
- duration=self.timeout, error=self.error
159
+ async with timeout_td( # skipif-ci-and-not-linux
160
+ self.timeout, error=self.error
195
161
  ):
196
162
  result = await cast(
197
163
  "Awaitable[int]", redis.hset(self.name, mapping=cast("Any", ser))
198
164
  )
199
165
  if self.ttl is not None:
200
- await redis.pexpire(self.name, datetime_duration_to_timedelta(self.ttl))
166
+ await redis.pexpire(self.name, to_milliseconds(self.ttl))
201
167
  return result # skipif-ci-and-not-linux
202
168
 
203
- async def values(self, redis: Redis, /) -> Sequence[_V]:
169
+ async def values(self, redis: Redis, /) -> Sequence[V]:
204
170
  """Get the values of a hashmap in `redis`."""
205
- async with timeout_dur( # skipif-ci-and-not-linux
206
- duration=self.timeout, error=self.error
171
+ async with timeout_td( # skipif-ci-and-not-linux
172
+ self.timeout, error=self.error
207
173
  ):
208
174
  result = await cast("Awaitable[Sequence[bytes]]", redis.hvals(self.name))
209
175
  return [ # skipif-ci-and-not-linux
@@ -212,169 +178,169 @@ class RedisHashMapKey(Generic[_K, _V]):
212
178
 
213
179
 
214
180
  @overload
215
- def redis_hash_map_key(
181
+ def redis_hash_map_key[K, V](
216
182
  name: str,
217
- key: type[_K],
218
- value: type[_V],
183
+ key: type[K],
184
+ value: type[V],
219
185
  /,
220
186
  *,
221
- key_serializer: Callable[[_K], bytes] | None = None,
187
+ key_serializer: Callable[[K], bytes] | None = None,
222
188
  key_deserializer: Callable[[bytes], Any] | None = None,
223
- value_serializer: Callable[[_V], bytes] | None = None,
224
- value_deserializer: Callable[[bytes], _V] | None = None,
225
- timeout: Duration | None = None,
226
- error: type[Exception] = TimeoutError,
227
- ttl: Duration | None = None,
228
- ) -> RedisHashMapKey[_K, _V]: ...
189
+ value_serializer: Callable[[V], bytes] | None = None,
190
+ value_deserializer: Callable[[bytes], V] | None = None,
191
+ timeout: Delta | None = None,
192
+ error: MaybeType[BaseException] = TimeoutError,
193
+ ttl: Delta | None = None,
194
+ ) -> RedisHashMapKey[K, V]: ...
229
195
  @overload
230
- def redis_hash_map_key(
196
+ def redis_hash_map_key[K, V1, V2](
231
197
  name: str,
232
- key: type[_K],
233
- value: tuple[type[_V1], type[_V2]],
198
+ key: type[K],
199
+ value: tuple[type[V1], type[V2]],
234
200
  /,
235
201
  *,
236
- key_serializer: Callable[[_K], bytes] | None = None,
202
+ key_serializer: Callable[[K], bytes] | None = None,
237
203
  key_deserializer: Callable[[bytes], Any] | None = None,
238
- value_serializer: Callable[[_V1 | _V2], bytes] | None = None,
239
- value_deserializer: Callable[[bytes], _V1 | _V2] | None = None,
240
- timeout: Duration | None = None,
241
- error: type[Exception] = TimeoutError,
242
- ttl: Duration | None = None,
243
- ) -> RedisHashMapKey[_K, _V1 | _V2]: ...
204
+ value_serializer: Callable[[V1 | V2], bytes] | None = None,
205
+ value_deserializer: Callable[[bytes], V1 | V2] | None = None,
206
+ timeout: Delta | None = None,
207
+ error: MaybeType[BaseException] = TimeoutError,
208
+ ttl: Delta | None = None,
209
+ ) -> RedisHashMapKey[K, V1 | V2]: ...
244
210
  @overload
245
- def redis_hash_map_key(
211
+ def redis_hash_map_key[K, V1, V2, V3](
246
212
  name: str,
247
- key: type[_K],
248
- value: tuple[type[_V1], type[_V2], type[_V3]],
213
+ key: type[K],
214
+ value: tuple[type[V1], type[V2], type[V3]],
249
215
  /,
250
216
  *,
251
- key_serializer: Callable[[_K], bytes] | None = None,
217
+ key_serializer: Callable[[K], bytes] | None = None,
252
218
  key_deserializer: Callable[[bytes], Any] | None = None,
253
- value_serializer: Callable[[_V1 | _V2 | _V3], bytes] | None = None,
254
- value_deserializer: Callable[[bytes], _V1 | _V2 | _V3] | None = None,
255
- timeout: Duration | None = None,
256
- error: type[Exception] = TimeoutError,
257
- ttl: Duration | None = None,
258
- ) -> RedisHashMapKey[_K, _V1 | _V2 | _V3]: ...
219
+ value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
220
+ value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
221
+ timeout: Delta | None = None,
222
+ error: MaybeType[BaseException] = TimeoutError,
223
+ ttl: Delta | None = None,
224
+ ) -> RedisHashMapKey[K, V1 | V2 | V3]: ...
259
225
  @overload
260
- def redis_hash_map_key(
226
+ def redis_hash_map_key[K1, K2, V](
261
227
  name: str,
262
- key: tuple[type[_K1], type[_K2]],
263
- value: type[_V],
228
+ key: tuple[type[K1], type[K2]],
229
+ value: type[V],
264
230
  /,
265
231
  *,
266
- key_serializer: Callable[[_K1 | _K2], bytes] | None = None,
232
+ key_serializer: Callable[[K1 | K2], bytes] | None = None,
267
233
  key_deserializer: Callable[[bytes], Any] | None = None,
268
- value_serializer: Callable[[_V], bytes] | None = None,
269
- value_deserializer: Callable[[bytes], _V] | None = None,
270
- timeout: Duration | None = None,
271
- error: type[Exception] = TimeoutError,
272
- ttl: Duration | None = None,
273
- ) -> RedisHashMapKey[_K1 | _K2, _V]: ...
234
+ value_serializer: Callable[[V], bytes] | None = None,
235
+ value_deserializer: Callable[[bytes], V] | None = None,
236
+ timeout: Delta | None = None,
237
+ error: MaybeType[BaseException] = TimeoutError,
238
+ ttl: Delta | None = None,
239
+ ) -> RedisHashMapKey[K1 | K2, V]: ...
274
240
  @overload
275
- def redis_hash_map_key(
241
+ def redis_hash_map_key[K1, K2, V1, V2](
276
242
  name: str,
277
- key: tuple[type[_K1], type[_K2]],
278
- value: tuple[type[_V1], type[_V2]],
243
+ key: tuple[type[K1], type[K2]],
244
+ value: tuple[type[V1], type[V2]],
279
245
  /,
280
246
  *,
281
- key_serializer: Callable[[_K1 | _K2], bytes] | None = None,
247
+ key_serializer: Callable[[K1 | K2], bytes] | None = None,
282
248
  key_deserializer: Callable[[bytes], Any] | None = None,
283
- value_serializer: Callable[[_V1 | _V2], bytes] | None = None,
284
- value_deserializer: Callable[[bytes], _V1 | _V2] | None = None,
285
- timeout: Duration | None = None,
286
- error: type[Exception] = TimeoutError,
287
- ttl: Duration | None = None,
288
- ) -> RedisHashMapKey[_K1 | _K2, _V1 | _V2]: ...
249
+ value_serializer: Callable[[V1 | V2], bytes] | None = None,
250
+ value_deserializer: Callable[[bytes], V1 | V2] | None = None,
251
+ timeout: Delta | None = None,
252
+ error: MaybeType[BaseException] = TimeoutError,
253
+ ttl: Delta | None = None,
254
+ ) -> RedisHashMapKey[K1 | K2, V1 | V2]: ...
289
255
  @overload
290
- def redis_hash_map_key(
256
+ def redis_hash_map_key[K1, K2, V1, V2, V3](
291
257
  name: str,
292
- key: tuple[type[_K1], type[_K2]],
293
- value: tuple[type[_V1], type[_V2], type[_V3]],
258
+ key: tuple[type[K1], type[K2]],
259
+ value: tuple[type[V1], type[V2], type[V3]],
294
260
  /,
295
261
  *,
296
- key_serializer: Callable[[_K1 | _K2], bytes] | None = None,
262
+ key_serializer: Callable[[K1 | K2], bytes] | None = None,
297
263
  key_deserializer: Callable[[bytes], Any] | None = None,
298
- value_serializer: Callable[[_V1 | _V2 | _V3], bytes] | None = None,
299
- value_deserializer: Callable[[bytes], _V1 | _V2 | _V3] | None = None,
300
- timeout: Duration | None = None,
301
- error: type[Exception] = TimeoutError,
302
- ttl: Duration | None = None,
303
- ) -> RedisHashMapKey[_K1 | _K2, _V1 | _V2 | _V3]: ...
264
+ value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
265
+ value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
266
+ timeout: Delta | None = None,
267
+ error: MaybeType[BaseException] = TimeoutError,
268
+ ttl: Delta | None = None,
269
+ ) -> RedisHashMapKey[K1 | K2, V1 | V2 | V3]: ...
304
270
  @overload
305
- def redis_hash_map_key(
271
+ def redis_hash_map_key[K1, K2, K3, V](
306
272
  name: str,
307
- key: tuple[type[_K1], type[_K2], type[_K3]],
308
- value: type[_V],
273
+ key: tuple[type[K1], type[K2], type[K3]],
274
+ value: type[V],
309
275
  /,
310
276
  *,
311
- key_serializer: Callable[[_K1 | _K2 | _K3], bytes] | None = None,
277
+ key_serializer: Callable[[K1 | K2 | K3], bytes] | None = None,
312
278
  key_deserializer: Callable[[bytes], Any] | None = None,
313
- value_serializer: Callable[[_V], bytes] | None = None,
314
- value_deserializer: Callable[[bytes], _V] | None = None,
315
- timeout: Duration | None = None,
316
- error: type[Exception] = TimeoutError,
317
- ttl: Duration | None = None,
318
- ) -> RedisHashMapKey[_K1 | _K2 | _K3, _V]: ...
279
+ value_serializer: Callable[[V], bytes] | None = None,
280
+ value_deserializer: Callable[[bytes], V] | None = None,
281
+ timeout: Delta | None = None,
282
+ error: MaybeType[BaseException] = TimeoutError,
283
+ ttl: Delta | None = None,
284
+ ) -> RedisHashMapKey[K1 | K2 | K3, V]: ...
319
285
  @overload
320
- def redis_hash_map_key(
286
+ def redis_hash_map_key[K1, K2, K3, V1, V2](
321
287
  name: str,
322
- key: tuple[type[_K1], type[_K2], type[_K3]],
323
- value: tuple[type[_V1], type[_V2]],
288
+ key: tuple[type[K1], type[K2], type[K3]],
289
+ value: tuple[type[V1], type[V2]],
324
290
  /,
325
291
  *,
326
- key_serializer: Callable[[_K1 | _K2 | _K3], bytes] | None = None,
292
+ key_serializer: Callable[[K1 | K2 | K3], bytes] | None = None,
327
293
  key_deserializer: Callable[[bytes], Any] | None = None,
328
- value_serializer: Callable[[_V1 | _V2], bytes] | None = None,
329
- value_deserializer: Callable[[bytes], _V1 | _V2] | None = None,
330
- timeout: Duration | None = None,
331
- error: type[Exception] = TimeoutError,
332
- ttl: Duration | None = None,
333
- ) -> RedisHashMapKey[_K1 | _K2 | _K3, _V1 | _V2]: ...
294
+ value_serializer: Callable[[V1 | V2], bytes] | None = None,
295
+ value_deserializer: Callable[[bytes], V1 | V2] | None = None,
296
+ timeout: Delta | None = None,
297
+ error: MaybeType[BaseException] = TimeoutError,
298
+ ttl: Delta | None = None,
299
+ ) -> RedisHashMapKey[K1 | K2 | K3, V1 | V2]: ...
334
300
  @overload
335
- def redis_hash_map_key(
301
+ def redis_hash_map_key[K1, K2, K3, V1, V2, V3](
336
302
  name: str,
337
- key: tuple[type[_K1], type[_K2], type[_K3]],
338
- value: tuple[type[_V1], type[_V2], type[_V3]],
303
+ key: tuple[type[K1], type[K2], type[K3]],
304
+ value: tuple[type[V1], type[V2], type[V3]],
339
305
  /,
340
306
  *,
341
- key_serializer: Callable[[_K1 | _K2 | _K3], bytes] | None = None,
307
+ key_serializer: Callable[[K1 | K2 | K3], bytes] | None = None,
342
308
  key_deserializer: Callable[[bytes], Any] | None = None,
343
- value_serializer: Callable[[_V1 | _V2 | _V3], bytes] | None = None,
344
- value_deserializer: Callable[[bytes], _V1 | _V2 | _V3] | None = None,
345
- timeout: Duration | None = None,
346
- error: type[Exception] = TimeoutError,
347
- ttl: Duration | None = None,
348
- ) -> RedisHashMapKey[_K1 | _K2 | _K3, _V1 | _V2 | _V3]: ...
309
+ value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
310
+ value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
311
+ timeout: Delta | None = None,
312
+ error: MaybeType[BaseException] = TimeoutError,
313
+ ttl: Delta | None = None,
314
+ ) -> RedisHashMapKey[K1 | K2 | K3, V1 | V2 | V3]: ...
349
315
  @overload
350
- def redis_hash_map_key(
316
+ def redis_hash_map_key[K, K1, K2, K3, V, V1, V2, V3](
351
317
  name: str,
352
- key: TypeLike[_K],
353
- value: TypeLike[_V],
318
+ key: TypeLike[K],
319
+ value: TypeLike[V],
354
320
  /,
355
321
  *,
356
- key_serializer: Callable[[_K1 | _K2 | _K3], bytes] | None = None,
322
+ key_serializer: Callable[[K1 | K2 | K3], bytes] | None = None,
357
323
  key_deserializer: Callable[[bytes], Any] | None = None,
358
- value_serializer: Callable[[_V1 | _V2 | _V3], bytes] | None = None,
359
- value_deserializer: Callable[[bytes], _V1 | _V2 | _V3] | None = None,
360
- timeout: Duration | None = None,
361
- error: type[Exception] = TimeoutError,
362
- ttl: Duration | None = None,
363
- ) -> RedisHashMapKey[_K, _V]: ...
364
- def redis_hash_map_key(
324
+ value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
325
+ value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
326
+ timeout: Delta | None = None,
327
+ error: MaybeType[BaseException] = TimeoutError,
328
+ ttl: Delta | None = None,
329
+ ) -> RedisHashMapKey[K, V]: ...
330
+ def redis_hash_map_key[K, V](
365
331
  name: str,
366
- key: TypeLike[_K],
367
- value: TypeLike[_V],
332
+ key: TypeLike[K],
333
+ value: TypeLike[V],
368
334
  /,
369
335
  *,
370
336
  key_serializer: Callable[[Any], bytes] | None = None,
371
337
  key_deserializer: Callable[[bytes], Any] | None = None,
372
338
  value_serializer: Callable[[Any], bytes] | None = None,
373
339
  value_deserializer: Callable[[bytes], Any] | None = None,
374
- timeout: Duration | None = None,
375
- ttl: Duration | None = None,
376
- error: type[Exception] = TimeoutError,
377
- ) -> RedisHashMapKey[_K, _V]:
340
+ timeout: Delta | None = None,
341
+ ttl: Delta | None = None,
342
+ error: MaybeType[BaseException] = TimeoutError,
343
+ ) -> RedisHashMapKey[K, V]:
378
344
  """Create a redis key."""
379
345
  return RedisHashMapKey( # skipif-ci-and-not-linux
380
346
  name=name,
@@ -394,40 +360,41 @@ def redis_hash_map_key(
394
360
 
395
361
 
396
362
  @dataclass(kw_only=True)
397
- class RedisKey(Generic[_T]):
363
+ class RedisKey[T]:
398
364
  """A key in a redis store."""
399
365
 
400
366
  name: str
401
- type: TypeLike[_T]
402
- serializer: Callable[[_T], bytes] | None = None
403
- deserializer: Callable[[bytes], _T] | None = None
404
- timeout: Duration | None = None
405
- error: type[Exception] = TimeoutError
406
- ttl: Duration | None = None
367
+ type: TypeLike[T]
368
+ serializer: Callable[[T], bytes] | None = None
369
+ deserializer: Callable[[bytes], T] | None = None
370
+ timeout: Delta | None = None
371
+ error: MaybeType[BaseException] = TimeoutError
372
+ ttl: Delta | None = None
407
373
 
408
374
  async def delete(self, redis: Redis, /) -> int:
409
375
  """Delete the key from `redis`."""
410
- async with timeout_dur( # skipif-ci-and-not-linux
411
- duration=self.timeout, error=self.error
376
+ async with timeout_td( # skipif-ci-and-not-linux
377
+ self.timeout, error=self.error
412
378
  ):
413
- return ensure_int(await redis.delete(self.name))
379
+ response = await redis.delete(self.name)
380
+ return ensure_int(response)
414
381
 
415
382
  async def exists(self, redis: Redis, /) -> bool:
416
383
  """Check if the key exists in `redis`."""
417
- async with timeout_dur( # skipif-ci-and-not-linux
418
- duration=self.timeout, error=self.error
384
+ async with timeout_td( # skipif-ci-and-not-linux
385
+ self.timeout, error=self.error
419
386
  ):
420
387
  result = cast("Literal[0, 1]", await redis.exists(self.name))
421
388
  match result: # skipif-ci-and-not-linux
422
389
  case 0 | 1 as value:
423
390
  return bool(value)
424
- case _ as never:
391
+ case never:
425
392
  assert_never(never)
426
393
 
427
- async def get(self, redis: Redis, /) -> _T:
394
+ async def get(self, redis: Redis, /) -> T:
428
395
  """Get a value from `redis`."""
429
- async with timeout_dur( # skipif-ci-and-not-linux
430
- duration=self.timeout, error=self.error
396
+ async with timeout_td( # skipif-ci-and-not-linux
397
+ self.timeout, error=self.error
431
398
  ):
432
399
  result = cast("bytes | None", await redis.get(self.name))
433
400
  if result is None: # skipif-ci-and-not-linux
@@ -436,106 +403,104 @@ class RedisKey(Generic[_T]):
436
403
  result, deserializer=self.deserializer
437
404
  )
438
405
 
439
- async def set(self, redis: Redis, value: _T, /) -> int:
406
+ async def set(self, redis: Redis, value: T, /) -> int:
440
407
  """Set a value in `redis`."""
441
408
  ser = _serialize(value, serializer=self.serializer) # skipif-ci-and-not-linux
442
409
  ttl = ( # skipif-ci-and-not-linux
443
- None
444
- if self.ttl is None
445
- else round(1000 * datetime_duration_to_float(self.ttl))
410
+ None if self.ttl is None else to_milliseconds(self.ttl)
446
411
  )
447
- async with timeout_dur( # skipif-ci-and-not-linux
448
- duration=self.timeout, error=self.error
412
+ async with timeout_td( # skipif-ci-and-not-linux
413
+ self.timeout, error=self.error
449
414
  ):
450
- result = await redis.set( # skipif-ci-and-not-linux
415
+ response = await redis.set( # skipif-ci-and-not-linux
451
416
  self.name, ser, px=ttl
452
417
  )
453
- return ensure_int(result) # skipif-ci-and-not-linux
418
+ return ensure_int(response) # skipif-ci-and-not-linux
454
419
 
455
420
 
456
421
  @overload
457
- def redis_key(
422
+ def redis_key[T](
458
423
  name: str,
459
- type_: type[_T],
424
+ type_: type[T],
460
425
  /,
461
426
  *,
462
- serializer: Callable[[_T], bytes] | None = None,
463
- deserializer: Callable[[bytes], _T] | None = None,
464
- timeout: Duration | None = None,
465
- error: type[Exception] = TimeoutError,
466
- ttl: Duration | None = None,
467
- ) -> RedisKey[_T]: ...
427
+ serializer: Callable[[T], bytes] | None = None,
428
+ deserializer: Callable[[bytes], T] | None = None,
429
+ timeout: Delta | None = None,
430
+ error: MaybeType[BaseException] = TimeoutError,
431
+ ttl: Delta | None = None,
432
+ ) -> RedisKey[T]: ...
468
433
  @overload
469
- def redis_key(
434
+ def redis_key[T1, T2](
470
435
  name: str,
471
- type_: tuple[type[_T1], type[_T2]],
436
+ type_: tuple[type[T1], type[T2]],
472
437
  /,
473
438
  *,
474
- serializer: Callable[[_T1 | _T2], bytes] | None = None,
475
- deserializer: Callable[[bytes], _T1 | _T2] | None = None,
476
- timeout: Duration | None = None,
477
- error: type[Exception] = TimeoutError,
478
- ttl: Duration | None = None,
479
- ) -> RedisKey[_T1 | _T2]: ...
439
+ serializer: Callable[[T1 | T2], bytes] | None = None,
440
+ deserializer: Callable[[bytes], T1 | T2] | None = None,
441
+ timeout: Delta | None = None,
442
+ error: MaybeType[BaseException] = TimeoutError,
443
+ ttl: Delta | None = None,
444
+ ) -> RedisKey[T1 | T2]: ...
480
445
  @overload
481
- def redis_key(
446
+ def redis_key[T1, T2, T3](
482
447
  name: str,
483
- type_: tuple[type[_T1], type[_T2], type[_T3]],
448
+ type_: tuple[type[T1], type[T2], type[T3]],
484
449
  /,
485
450
  *,
486
- serializer: Callable[[_T1 | _T2 | _T3], bytes] | None = None,
487
- deserializer: Callable[[bytes], _T1 | _T2 | _T3] | None = None,
488
- timeout: Duration | None = None,
489
- error: type[Exception] = TimeoutError,
490
- ttl: Duration | None = None,
491
- ) -> RedisKey[_T1 | _T2 | _T3]: ...
451
+ serializer: Callable[[T1 | T2 | T3], bytes] | None = None,
452
+ deserializer: Callable[[bytes], T1 | T2 | T3] | None = None,
453
+ timeout: Delta | None = None,
454
+ error: MaybeType[BaseException] = TimeoutError,
455
+ ttl: Delta | None = None,
456
+ ) -> RedisKey[T1 | T2 | T3]: ...
492
457
  @overload
493
- def redis_key(
458
+ def redis_key[T1, T2, T3, T4](
494
459
  name: str,
495
- type_: tuple[type[_T1], type[_T2], type[_T3], type[_T4]],
460
+ type_: tuple[type[T1], type[T2], type[T3], type[T4]],
496
461
  /,
497
462
  *,
498
- serializer: Callable[[_T1 | _T2 | _T3 | _T4], bytes] | None = None,
499
- deserializer: Callable[[bytes], _T1 | _T2 | _T3 | _T4] | None = None,
500
- timeout: Duration | None = None,
501
- error: type[Exception] = TimeoutError,
502
- ttl: Duration | None = None,
503
- ) -> RedisKey[_T1 | _T2 | _T3 | _T4]: ...
463
+ serializer: Callable[[T1 | T2 | T3 | T4], bytes] | None = None,
464
+ deserializer: Callable[[bytes], T1 | T2 | T3 | T4] | None = None,
465
+ timeout: Delta | None = None,
466
+ error: MaybeType[BaseException] = TimeoutError,
467
+ ttl: Delta | None = None,
468
+ ) -> RedisKey[T1 | T2 | T3 | T4]: ...
504
469
  @overload
505
- def redis_key(
470
+ def redis_key[T1, T2, T3, T4, T5](
506
471
  name: str,
507
- type_: tuple[type[_T1], type[_T2], type[_T3], type[_T4], type[_T5]],
472
+ type_: tuple[type[T1], type[T2], type[T3], type[T4], type[T5]],
508
473
  /,
509
474
  *,
510
- serializer: Callable[[_T1 | _T2 | _T3 | _T4 | _T5], bytes] | None = None,
511
- deserializer: Callable[[bytes], _T1 | _T2 | _T3 | _T4 | _T5] | None = None,
512
- timeout: Duration | None = None,
513
- error: type[Exception] = TimeoutError,
514
- ttl: Duration | None = None,
515
- ) -> RedisKey[_T1 | _T2 | _T3 | _T4 | _T5]: ...
475
+ serializer: Callable[[T1 | T2 | T3 | T4 | T5], bytes] | None = None,
476
+ deserializer: Callable[[bytes], T1 | T2 | T3 | T4 | T5] | None = None,
477
+ timeout: Delta | None = None,
478
+ error: MaybeType[BaseException] = TimeoutError,
479
+ ttl: Delta | None = None,
480
+ ) -> RedisKey[T1 | T2 | T3 | T4 | T5]: ...
516
481
  @overload
517
- def redis_key(
482
+ def redis_key[T, T1, T2, T3, T4, T5](
518
483
  name: str,
519
- type_: TypeLike[_T],
484
+ type_: TypeLike[T],
520
485
  /,
521
486
  *,
522
- serializer: Callable[[_T1 | _T2 | _T3 | _T4 | _T5], bytes] | None = None,
523
- deserializer: Callable[[bytes], _T1 | _T2 | _T3 | _T4 | _T5] | None = None,
524
- timeout: Duration | None = None,
525
- error: type[Exception] = TimeoutError,
526
- ttl: Duration | None = None,
527
- ) -> RedisKey[_T]: ...
528
- def redis_key(
487
+ serializer: Callable[[T1 | T2 | T3 | T4 | T5], bytes] | None = None,
488
+ deserializer: Callable[[bytes], T1 | T2 | T3 | T4 | T5] | None = None,
489
+ timeout: Delta | None = None,
490
+ error: MaybeType[BaseException] = TimeoutError,
491
+ ttl: Delta | None = None,
492
+ ) -> RedisKey[T]: ...
493
+ def redis_key[T](
529
494
  name: str,
530
- type_: TypeLike[_T],
495
+ type_: TypeLike[T],
531
496
  /,
532
497
  *,
533
498
  serializer: Callable[[Any], bytes] | None = None,
534
499
  deserializer: Callable[[bytes], Any] | None = None,
535
- timeout: Duration | None = None,
536
- error: type[Exception] = TimeoutError,
537
- ttl: Duration | None = None,
538
- ) -> RedisKey[_T]:
500
+ timeout: Delta | None = None,
501
+ error: MaybeType[BaseException] = TimeoutError,
502
+ ttl: Delta | None = None,
503
+ ) -> RedisKey[T]:
539
504
  """Create a redis key."""
540
505
  return RedisKey( # skipif-ci-and-not-linux
541
506
  name=name,
@@ -552,15 +517,15 @@ def redis_key(
552
517
 
553
518
 
554
519
  @overload
555
- async def publish(
520
+ async def publish[T](
556
521
  redis: Redis,
557
522
  channel: str,
558
- data: _T,
523
+ data: T,
559
524
  /,
560
525
  *,
561
- serializer: Callable[[_T], EncodableT],
562
- timeout: Duration = _PUBLISH_TIMEOUT,
563
- ) -> ResponseT: ...
526
+ serializer: Callable[[T], EncodableT],
527
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
528
+ ) -> int: ...
564
529
  @overload
565
530
  async def publish(
566
531
  redis: Redis,
@@ -569,217 +534,192 @@ async def publish(
569
534
  /,
570
535
  *,
571
536
  serializer: None = None,
572
- timeout: Duration = _PUBLISH_TIMEOUT,
573
- ) -> ResponseT: ...
537
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
538
+ ) -> int: ...
574
539
  @overload
575
- async def publish(
540
+ async def publish[T](
576
541
  redis: Redis,
577
542
  channel: str,
578
- data: bytes | str | _T,
543
+ data: bytes | str | T,
579
544
  /,
580
545
  *,
581
- serializer: Callable[[_T], EncodableT] | None = None,
582
- timeout: Duration = _PUBLISH_TIMEOUT,
583
- ) -> ResponseT: ...
584
- async def publish(
546
+ serializer: Callable[[T], EncodableT] | None = None,
547
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
548
+ ) -> int: ...
549
+ async def publish[T](
585
550
  redis: Redis,
586
551
  channel: str,
587
- data: bytes | str | _T,
552
+ data: bytes | str | T,
588
553
  /,
589
554
  *,
590
- serializer: Callable[[_T], EncodableT] | None = None,
591
- timeout: Duration = _PUBLISH_TIMEOUT,
592
- ) -> ResponseT:
555
+ serializer: Callable[[T], EncodableT] | None = None,
556
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
557
+ ) -> int:
593
558
  """Publish an object to a channel."""
594
559
  match data, serializer: # skipif-ci-and-not-linux
595
560
  case bytes() | str() as data_use, _:
596
561
  ...
597
562
  case _, None:
598
- raise PublishError(data=data, serializer=serializer)
563
+ raise PublishError(data=data)
599
564
  case _, Callable():
600
565
  data_use = serializer(data)
601
- case _ as never:
566
+ case never:
602
567
  assert_never(never)
603
- async with timeout_dur(duration=timeout): # skipif-ci-and-not-linux
604
- return await redis.publish(channel, data_use) # skipif-ci-and-not-linux
568
+ async with timeout_td(timeout): # skipif-ci-and-not-linux
569
+ response = await redis.publish(channel, data_use) # skipif-ci-and-not-linux
570
+ return ensure_int(response) # skipif-ci-and-not-linux
605
571
 
606
572
 
607
573
  @dataclass(kw_only=True, slots=True)
608
574
  class PublishError(Exception):
609
575
  data: Any
610
- serializer: Callable[[Any], EncodableT] | None = None
611
576
 
612
577
  @override
613
578
  def __str__(self) -> str:
614
- return (
615
- f"Unable to publish data {self.data!r} with serializer {self.serializer!r}"
616
- )
579
+ return f"Unable to publish data {self.data!r} with no serializer"
617
580
 
618
581
 
619
582
  ##
620
583
 
621
584
 
622
- @dataclass(kw_only=True)
623
- class PublishService(Looper[tuple[str, _T]]):
624
- """Service to publish items to Redis."""
625
-
626
- # base
627
- freq: Duration = field(default=MILLISECOND, repr=False)
628
- backoff: Duration = field(default=SECOND, repr=False)
629
- empty_upon_exit: bool = field(default=True, repr=False)
630
- # self
631
- redis: Redis
632
- serializer: Callable[[_T], EncodableT] = serialize
633
- publish_timeout: Duration = _PUBLISH_TIMEOUT
634
-
635
- @override
636
- async def core(self) -> None:
637
- await super().core() # skipif-ci-and-not-linux
638
- while not self.empty(): # skipif-ci-and-not-linux
639
- channel, data = self.get_left_nowait()
640
- _ = await publish(
641
- self.redis,
642
- channel,
643
- data,
644
- serializer=self.serializer,
645
- timeout=self.publish_timeout,
585
+ async def publish_many[T](
586
+ redis: Redis,
587
+ channel: str,
588
+ data: MaybeSequence[bytes | str | T],
589
+ /,
590
+ *,
591
+ serializer: Callable[[T], EncodableT] | None = None,
592
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
593
+ ) -> Sequence[bool]:
594
+ """Publish an object/multiple objects to a channel."""
595
+ async with TaskGroup() as tg:
596
+ tasks = [
597
+ tg.create_task(
598
+ _try_publish(
599
+ redis,
600
+ channel,
601
+ d,
602
+ serializer=cast("Callable[[Any], EncodableT]", serializer),
603
+ timeout=timeout,
604
+ )
646
605
  )
606
+ for d in always_iterable(data)
607
+ ]
608
+ return [t.result() for t in tasks]
647
609
 
648
610
 
649
- ##
650
-
651
-
652
- @dataclass(kw_only=True)
653
- class PublishServiceMixin(Generic[_T]):
654
- """Mix-in for the publish service."""
655
-
656
- # base - looper
657
- publish_service_freq: Duration = field(default=MILLISECOND, repr=False)
658
- publish_service_backoff: Duration = field(default=SECOND, repr=False)
659
- publish_service_empty_upon_exit: bool = field(default=False, repr=False)
660
- publish_service_logger: str | None = field(default=None, repr=False)
661
- publish_service_timeout: Duration | None = field(default=None, repr=False)
662
- publish_service_debug: bool = field(default=False, repr=False)
663
- _is_pending_restart: Event = field(default_factory=Event, init=False, repr=False)
664
- # base - publish service
665
- publish_service_redis: Redis
666
- publish_service_serializer: Callable[[_T], EncodableT] = serialize
667
- publish_service_publish_timeout: Duration = _PUBLISH_TIMEOUT
668
- # self
669
- _publish_service: PublishService[_T] = field(init=False, repr=False)
670
-
671
- def __post_init__(self) -> None:
672
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
673
- super().__post_init__() # pyright: ignore[reportAttributeAccessIssue]
674
- self._publish_service = PublishService( # skipif-ci-and-not-linux
675
- # looper
676
- freq=self.publish_service_freq,
677
- backoff=self.publish_service_backoff,
678
- empty_upon_exit=self.publish_service_empty_upon_exit,
679
- logger=self.publish_service_logger,
680
- timeout=self.publish_service_timeout,
681
- _debug=self.publish_service_debug,
682
- # publish service
683
- redis=self.publish_service_redis,
684
- serializer=self.publish_service_serializer,
685
- publish_timeout=self.publish_service_publish_timeout,
686
- )
687
-
688
- def _yield_sub_loopers(self) -> Iterator[Looper[Any]]:
689
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
690
- yield from super()._yield_sub_loopers() # pyright: ignore[reportAttributeAccessIssue]
691
- yield self._publish_service # skipif-ci-and-not-linux
611
+ async def _try_publish[T](
612
+ redis: Redis,
613
+ channel: str,
614
+ data: bytes | str | T,
615
+ /,
616
+ *,
617
+ serializer: Callable[[T], EncodableT] | None = None,
618
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
619
+ ) -> bool:
620
+ try:
621
+ _ = await publish(redis, channel, data, serializer=serializer, timeout=timeout)
622
+ except TimeoutError:
623
+ return False
624
+ return True
692
625
 
693
626
 
694
627
  ##
695
628
 
696
629
 
697
- _SUBSCRIBE_TIMEOUT: Duration = SECOND
698
- _SUBSCRIBE_SLEEP: Duration = MILLISECOND
630
+ _SUBSCRIBE_TIMEOUT: Delta = SECOND
631
+ _SUBSCRIBE_SLEEP: Delta = MILLISECOND
699
632
 
700
633
 
701
634
  @overload
702
- @asynccontextmanager
635
+ @enhanced_async_context_manager
703
636
  def subscribe(
704
637
  redis: Redis,
705
638
  channels: MaybeIterable[str],
706
639
  queue: Queue[_RedisMessage],
707
640
  /,
708
641
  *,
709
- timeout: Duration | None = _SUBSCRIBE_TIMEOUT,
710
- sleep: Duration = _SUBSCRIBE_SLEEP,
642
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
711
643
  output: Literal["raw"],
712
- filter_: Callable[[_RedisMessage], bool] | None = None,
644
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
645
+ filter_: Callable[[bytes], bool] | None = None,
646
+ error_filter: Callable[[bytes, Exception], None] | None = None,
647
+ sleep: Delta = _SUBSCRIBE_SLEEP,
713
648
  ) -> AsyncIterator[Task[None]]: ...
714
649
  @overload
715
- @asynccontextmanager
650
+ @enhanced_async_context_manager
716
651
  def subscribe(
717
652
  redis: Redis,
718
653
  channels: MaybeIterable[str],
719
654
  queue: Queue[bytes],
720
655
  /,
721
656
  *,
722
- timeout: Duration | None = _SUBSCRIBE_TIMEOUT,
723
- sleep: Duration = _SUBSCRIBE_SLEEP,
657
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
724
658
  output: Literal["bytes"],
659
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
725
660
  filter_: Callable[[bytes], bool] | None = None,
661
+ error_filter: Callable[[bytes, Exception], None] | None = None,
662
+ sleep: Delta = _SUBSCRIBE_SLEEP,
726
663
  ) -> AsyncIterator[Task[None]]: ...
727
664
  @overload
728
- @asynccontextmanager
665
+ @enhanced_async_context_manager
729
666
  def subscribe(
730
667
  redis: Redis,
731
668
  channels: MaybeIterable[str],
732
669
  queue: Queue[str],
733
670
  /,
734
671
  *,
735
- timeout: Duration | None = _SUBSCRIBE_TIMEOUT,
736
- sleep: Duration = _SUBSCRIBE_SLEEP,
672
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
737
673
  output: Literal["text"] = "text",
674
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
738
675
  filter_: Callable[[str], bool] | None = None,
676
+ error_filter: Callable[[str, Exception], None] | None = None,
677
+ sleep: Delta = _SUBSCRIBE_SLEEP,
739
678
  ) -> AsyncIterator[Task[None]]: ...
740
679
  @overload
741
- @asynccontextmanager
742
- def subscribe(
680
+ @enhanced_async_context_manager
681
+ def subscribe[T](
743
682
  redis: Redis,
744
683
  channels: MaybeIterable[str],
745
- queue: Queue[_T],
684
+ queue: Queue[T],
746
685
  /,
747
686
  *,
748
- timeout: Duration | None = _SUBSCRIBE_TIMEOUT,
749
- sleep: Duration = _SUBSCRIBE_SLEEP,
750
- output: Callable[[bytes], _T],
751
- filter_: Callable[[_T], bool] | None = None,
687
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
688
+ output: Callable[[bytes], T],
689
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
690
+ filter_: Callable[[T], bool] | None = None,
691
+ error_filter: Callable[[T, Exception], None] | None = None,
692
+ sleep: Delta = _SUBSCRIBE_SLEEP,
752
693
  ) -> AsyncIterator[Task[None]]: ...
753
- @asynccontextmanager
754
- async def subscribe(
694
+ @enhanced_async_context_manager
695
+ async def subscribe[T](
755
696
  redis: Redis,
756
697
  channels: MaybeIterable[str],
757
- queue: Queue[_RedisMessage] | Queue[bytes] | Queue[_T],
698
+ queue: Queue[_RedisMessage] | Queue[bytes] | Queue[T],
758
699
  /,
759
700
  *,
760
- timeout: Duration | None = _SUBSCRIBE_TIMEOUT,
761
- sleep: Duration = _SUBSCRIBE_SLEEP,
762
- output: Literal["raw", "bytes", "text"] | Callable[[bytes], _T] = "text",
763
- filter_: Callable[[Any], bool] | None = None,
701
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
702
+ output: Literal["raw", "bytes", "text"] | Callable[[bytes], T] = "text",
703
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
704
+ filter_: Callable[[T], bool] | None = None,
705
+ error_filter: Callable[[T, Exception], None] | None = None,
706
+ sleep: Delta = _SUBSCRIBE_SLEEP,
764
707
  ) -> AsyncIterator[Task[None]]:
765
708
  """Subscribe to the data of a given channel(s)."""
766
709
  channels = list(always_iterable(channels)) # skipif-ci-and-not-linux
767
710
  match output: # skipif-ci-and-not-linux
768
711
  case "raw":
769
- transform = cast("Any", identity)
712
+ transform = cast("Callable[[_RedisMessage], T]", identity)
770
713
  case "bytes":
771
- transform = cast("Any", itemgetter("data"))
714
+ transform = cast("Callable[[_RedisMessage], T]", itemgetter("data"))
772
715
  case "text":
773
-
774
- def transform(message: _RedisMessage, /) -> str: # pyright: ignore[reportRedeclaration]
775
- return message["data"].decode()
776
-
716
+ transform = cast("Callable[[_RedisMessage], T]", _decoded_data)
777
717
  case Callable() as deserialize:
778
718
 
779
- def transform(message: _RedisMessage, /) -> _T:
719
+ def transform(message: _RedisMessage, /) -> T:
780
720
  return deserialize(message["data"])
781
721
 
782
- case _ as never:
722
+ case never:
783
723
  assert_never(never)
784
724
 
785
725
  task = create_task( # skipif-ci-and-not-linux
@@ -789,33 +729,44 @@ async def subscribe(
789
729
  transform,
790
730
  queue,
791
731
  timeout=timeout,
792
- sleep=sleep,
732
+ error_transform=error_transform,
793
733
  filter_=filter_,
734
+ error_filter=error_filter,
735
+ sleep=sleep,
794
736
  )
795
737
  )
796
738
  try: # skipif-ci-and-not-linux
797
739
  yield task
798
740
  finally: # skipif-ci-and-not-linux
799
- _ = task.cancel()
741
+ try:
742
+ _ = task.cancel()
743
+ except RuntimeError as error: # pragma: no cover
744
+ if (not is_pytest()) or (error.args[0] != "Event loop is closed"):
745
+ raise
800
746
  with suppress(CancelledError):
801
747
  await task
802
748
 
803
749
 
804
- async def _subscribe_core(
750
+ def _decoded_data(message: _RedisMessage, /) -> str:
751
+ return message["data"].decode()
752
+
753
+
754
+ async def _subscribe_core[T](
805
755
  redis: Redis,
806
756
  channels: MaybeIterable[str],
807
- transform: Callable[[_RedisMessage], Any],
757
+ transform: Callable[[_RedisMessage], T],
808
758
  queue: Queue[Any],
809
759
  /,
810
760
  *,
811
- timeout: Duration | None = _SUBSCRIBE_TIMEOUT,
812
- sleep: Duration = _SUBSCRIBE_SLEEP,
813
- filter_: Callable[[Any], bool] | None = None,
761
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
762
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
763
+ filter_: Callable[[T], bool] | None = None,
764
+ error_filter: Callable[[T, Exception], None] | None = None,
765
+ sleep: Delta = _SUBSCRIBE_SLEEP,
814
766
  ) -> None:
815
767
  timeout_use = ( # skipif-ci-and-not-linux
816
- None if timeout is None else datetime_duration_to_float(timeout)
768
+ None if timeout is None else (to_nanoseconds(timeout) / 1e9)
817
769
  )
818
- sleep_use = datetime_duration_to_float(sleep) # skipif-ci-and-not-linux
819
770
  is_subscribe_message = partial( # skipif-ci-and-not-linux
820
771
  _is_message, channels={c.encode() for c in channels}
821
772
  )
@@ -823,30 +774,49 @@ async def _subscribe_core(
823
774
  while True:
824
775
  message = await pubsub.get_message(timeout=timeout_use)
825
776
  if is_subscribe_message(message):
826
- transformed = transform(message)
827
- if (filter_ is None) or filter_(transformed):
828
- if isinstance(queue, EnhancedQueue):
829
- queue.put_right_nowait(transformed)
830
- else:
831
- queue.put_nowait(transformed)
777
+ _handle_message(
778
+ message,
779
+ transform,
780
+ queue,
781
+ error_transform=error_transform,
782
+ filter_=filter_,
783
+ error_filter=error_filter,
784
+ )
832
785
  else:
833
- await asyncio.sleep(sleep_use)
786
+ await sleep_td(sleep)
834
787
 
835
788
 
836
789
  def _is_message(
837
790
  message: Any, /, *, channels: Collection[bytes]
838
791
  ) -> TypeGuard[_RedisMessage]:
839
- return (
840
- isinstance(message, Mapping)
841
- and ("type" in message)
842
- and (message["type"] in {"subscribe", "psubscribe", "message", "pmessage"})
843
- and ("pattern" in message)
844
- and ((message["pattern"] is None) or isinstance(message["pattern"], str))
845
- and ("channel" in message)
846
- and (message["channel"] in channels)
847
- and ("data" in message)
848
- and isinstance(message["data"], bytes)
849
- )
792
+ return is_instance_gen(message, _RedisMessage) and (message["channel"] in channels)
793
+
794
+
795
+ def _handle_message[T](
796
+ message: _RedisMessage,
797
+ transform: Callable[[_RedisMessage], T],
798
+ queue: Queue[Any],
799
+ /,
800
+ *,
801
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
802
+ filter_: Callable[[T], bool] | None = None,
803
+ error_filter: Callable[[T, Exception], None] | None = None,
804
+ ) -> None:
805
+ try:
806
+ transformed = transform(message)
807
+ except Exception as error: # noqa: BLE001
808
+ if error_transform is not None:
809
+ error_transform(message, error)
810
+ return
811
+ if filter_ is None:
812
+ queue.put_nowait(transformed)
813
+ return
814
+ try:
815
+ if filter_(transformed):
816
+ queue.put_nowait(transformed)
817
+ except Exception as error: # noqa: BLE001
818
+ if error_filter is not None:
819
+ error_filter(transformed, error)
850
820
 
851
821
 
852
822
  class _RedisMessage(TypedDict):
@@ -859,125 +829,7 @@ class _RedisMessage(TypedDict):
859
829
  ##
860
830
 
861
831
 
862
- @dataclass(kw_only=True)
863
- class SubscribeService(Looper[_T]):
864
- """Service to subscribe to Redis."""
865
-
866
- # base
867
- freq: Duration = field(default=MILLISECOND, repr=False)
868
- backoff: Duration = field(default=SECOND, repr=False)
869
- logger: str | None = field(default=__name__, repr=False)
870
- # self
871
- redis: Redis
872
- channel: str
873
- deserializer: Callable[[bytes], _T] = deserialize
874
- subscribe_timeout: Duration | None = _SUBSCRIBE_TIMEOUT
875
- subscribe_sleep: Duration = _SUBSCRIBE_SLEEP
876
- filter_: Callable[[_T], bool] | None = None
877
- _is_subscribed: Event = field(default_factory=Event, init=False, repr=False)
878
-
879
- @override
880
- async def __aenter__(self) -> Self:
881
- _ = await super().__aenter__() # skipif-ci-and-not-linux
882
- match self._is_subscribed.is_set(): # skipif-ci-and-not-linux
883
- case True:
884
- _ = self._debug and self._logger.debug("%s: already subscribing", self)
885
- case False:
886
- _ = self._debug and self._logger.debug(
887
- "%s: starting subscription...", self
888
- )
889
- self._is_subscribed.set()
890
- _ = await self._stack.enter_async_context(
891
- subscribe(
892
- self.redis,
893
- self.channel,
894
- self._queue,
895
- timeout=self.subscribe_timeout,
896
- sleep=self.subscribe_sleep,
897
- output=self.deserializer,
898
- filter_=self.filter_,
899
- )
900
- )
901
- case _ as never:
902
- assert_never(never)
903
- return self # skipif-ci-and-not-linux
904
-
905
- @override
906
- async def __aexit__(
907
- self,
908
- exc_type: type[BaseException] | None = None,
909
- exc_value: BaseException | None = None,
910
- traceback: TracebackType | None = None,
911
- ) -> None:
912
- await super().__aexit__( # skipif-ci-and-not-linux
913
- exc_type=exc_type, exc_value=exc_value, traceback=traceback
914
- )
915
- match self._is_subscribed.is_set(): # skipif-ci-and-not-linux
916
- case True:
917
- _ = self._debug and self._logger.debug(
918
- "%s: stopping subscription...", self
919
- )
920
- self._is_subscribed.clear()
921
- case False:
922
- _ = self._debug and self._logger.debug(
923
- "%s: already stopped subscription", self
924
- )
925
- case _ as never:
926
- assert_never(never)
927
-
928
-
929
- ##
930
-
931
-
932
- @dataclass(kw_only=True)
933
- class SubscribeServiceMixin(Generic[_T]):
934
- """Mix-in for the subscribe service."""
935
-
936
- # base - looper
937
- subscribe_service_freq: Duration = field(default=MILLISECOND, repr=False)
938
- subscribe_service_backoff: Duration = field(default=SECOND, repr=False)
939
- subscribe_service_empty_upon_exit: bool = field(default=False, repr=False)
940
- subscribe_service_logger: str | None = field(default=None, repr=False)
941
- subscribe_service_timeout: Duration | None = field(default=None, repr=False)
942
- subscribe_service_debug: bool = field(default=False, repr=False)
943
- # base - looper
944
- subscribe_service_redis: Redis
945
- subscribe_service_channel: str
946
- subscribe_service_deserializer: Callable[[bytes], _T] = deserialize
947
- subscribe_service_subscribe_sleep: Duration = _SUBSCRIBE_SLEEP
948
- subscribe_service_subscribe_timeout: Duration | None = _SUBSCRIBE_TIMEOUT
949
- # self
950
- _subscribe_service: SubscribeService[_T] = field(init=False, repr=False)
951
-
952
- def __post_init__(self) -> None:
953
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
954
- super().__post_init__() # pyright: ignore[reportAttributeAccessIssue]
955
- self._subscribe_service = SubscribeService( # skipif-ci-and-not-linux
956
- # looper
957
- freq=self.subscribe_service_freq,
958
- backoff=self.subscribe_service_backoff,
959
- empty_upon_exit=self.subscribe_service_empty_upon_exit,
960
- logger=self.subscribe_service_logger,
961
- timeout=self.subscribe_service_timeout,
962
- _debug=self.subscribe_service_debug,
963
- # subscribe service
964
- redis=self.subscribe_service_redis,
965
- channel=self.subscribe_service_channel,
966
- deserializer=self.subscribe_service_deserializer,
967
- subscribe_sleep=self.subscribe_service_subscribe_sleep,
968
- subscribe_timeout=self.subscribe_service_subscribe_timeout,
969
- )
970
-
971
- def _yield_sub_loopers(self) -> Iterator[Looper[Any]]:
972
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
973
- yield from super()._yield_sub_loopers() # pyright: ignore[reportAttributeAccessIssue]
974
- yield self._subscribe_service # skipif-ci-and-not-linux
975
-
976
-
977
- ##
978
-
979
-
980
- @asynccontextmanager
832
+ @enhanced_async_context_manager
981
833
  async def yield_pubsub(
982
834
  redis: Redis, channels: MaybeIterable[str], /
983
835
  ) -> AsyncIterator[PubSub]:
@@ -999,7 +851,7 @@ _HOST = "localhost"
999
851
  _PORT = 6379
1000
852
 
1001
853
 
1002
- @asynccontextmanager
854
+ @enhanced_async_context_manager
1003
855
  async def yield_redis(
1004
856
  *,
1005
857
  host: str = _HOST,
@@ -1037,7 +889,9 @@ async def yield_redis(
1037
889
  ##
1038
890
 
1039
891
 
1040
- def _serialize(obj: _T, /, *, serializer: Callable[[_T], bytes] | None = None) -> bytes:
892
+ def _serialize[T](
893
+ obj: T, /, *, serializer: Callable[[T], bytes] | None = None
894
+ ) -> bytes:
1041
895
  if serializer is None: # skipif-ci-and-not-linux
1042
896
  from utilities.orjson import serialize as serializer_use
1043
897
  else: # skipif-ci-and-not-linux
@@ -1045,9 +899,9 @@ def _serialize(obj: _T, /, *, serializer: Callable[[_T], bytes] | None = None) -
1045
899
  return serializer_use(obj) # skipif-ci-and-not-linux
1046
900
 
1047
901
 
1048
- def _deserialize(
1049
- data: bytes, /, *, deserializer: Callable[[bytes], _T] | None = None
1050
- ) -> _T:
902
+ def _deserialize[T](
903
+ data: bytes, /, *, deserializer: Callable[[bytes], T] | None = None
904
+ ) -> T:
1051
905
  if deserializer is None: # skipif-ci-and-not-linux
1052
906
  from utilities.orjson import deserialize as deserializer_use
1053
907
  else: # skipif-ci-and-not-linux
@@ -1056,13 +910,10 @@ def _deserialize(
1056
910
 
1057
911
 
1058
912
  __all__ = [
1059
- "PublishService",
1060
- "PublishServiceMixin",
1061
913
  "RedisHashMapKey",
1062
914
  "RedisKey",
1063
- "SubscribeService",
1064
- "SubscribeServiceMixin",
1065
915
  "publish",
916
+ "publish_many",
1066
917
  "redis_hash_map_key",
1067
918
  "redis_key",
1068
919
  "subscribe",