dycw-utilities 0.135.0__py3-none-any.whl → 0.178.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (97) hide show
  1. dycw_utilities-0.178.1.dist-info/METADATA +34 -0
  2. dycw_utilities-0.178.1.dist-info/RECORD +105 -0
  3. dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.178.1.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +13 -10
  7. utilities/asyncio.py +312 -787
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +64 -4
  10. utilities/cachetools.py +9 -6
  11. utilities/click.py +195 -77
  12. utilities/concurrent.py +1 -1
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +15 -28
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +2 -2
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +28 -59
  21. utilities/fpdf2.py +2 -2
  22. utilities/functions.py +24 -269
  23. utilities/git.py +9 -30
  24. utilities/grp.py +28 -0
  25. utilities/gzip.py +31 -0
  26. utilities/http.py +3 -2
  27. utilities/hypothesis.py +513 -159
  28. utilities/importlib.py +17 -1
  29. utilities/inflect.py +12 -4
  30. utilities/iterables.py +33 -58
  31. utilities/jinja2.py +148 -0
  32. utilities/json.py +70 -0
  33. utilities/libcst.py +38 -17
  34. utilities/lightweight_charts.py +4 -7
  35. utilities/logging.py +136 -93
  36. utilities/math.py +8 -4
  37. utilities/more_itertools.py +43 -45
  38. utilities/operator.py +27 -27
  39. utilities/orjson.py +189 -36
  40. utilities/os.py +61 -4
  41. utilities/packaging.py +115 -0
  42. utilities/parse.py +8 -5
  43. utilities/pathlib.py +269 -40
  44. utilities/permissions.py +298 -0
  45. utilities/platform.py +7 -6
  46. utilities/polars.py +1205 -413
  47. utilities/polars_ols.py +1 -1
  48. utilities/postgres.py +408 -0
  49. utilities/pottery.py +43 -19
  50. utilities/pqdm.py +3 -3
  51. utilities/psutil.py +5 -57
  52. utilities/pwd.py +28 -0
  53. utilities/pydantic.py +4 -52
  54. utilities/pydantic_settings.py +240 -0
  55. utilities/pydantic_settings_sops.py +76 -0
  56. utilities/pyinstrument.py +7 -7
  57. utilities/pytest.py +104 -143
  58. utilities/pytest_plugins/__init__.py +1 -0
  59. utilities/pytest_plugins/pytest_randomly.py +23 -0
  60. utilities/pytest_plugins/pytest_regressions.py +56 -0
  61. utilities/pytest_regressions.py +26 -46
  62. utilities/random.py +11 -6
  63. utilities/re.py +1 -1
  64. utilities/redis.py +220 -343
  65. utilities/sentinel.py +10 -0
  66. utilities/shelve.py +4 -1
  67. utilities/shutil.py +25 -0
  68. utilities/slack_sdk.py +35 -104
  69. utilities/sqlalchemy.py +496 -471
  70. utilities/sqlalchemy_polars.py +29 -54
  71. utilities/string.py +2 -3
  72. utilities/subprocess.py +1977 -0
  73. utilities/tempfile.py +112 -4
  74. utilities/testbook.py +50 -0
  75. utilities/text.py +174 -42
  76. utilities/throttle.py +158 -0
  77. utilities/timer.py +2 -2
  78. utilities/traceback.py +70 -35
  79. utilities/types.py +102 -30
  80. utilities/typing.py +479 -19
  81. utilities/uuid.py +42 -5
  82. utilities/version.py +27 -26
  83. utilities/whenever.py +1559 -361
  84. utilities/zoneinfo.py +80 -22
  85. dycw_utilities-0.135.0.dist-info/METADATA +0 -39
  86. dycw_utilities-0.135.0.dist-info/RECORD +0 -96
  87. dycw_utilities-0.135.0.dist-info/WHEEL +0 -4
  88. dycw_utilities-0.135.0.dist-info/licenses/LICENSE +0 -21
  89. utilities/aiolimiter.py +0 -25
  90. utilities/arq.py +0 -216
  91. utilities/eventkit.py +0 -388
  92. utilities/luigi.py +0 -183
  93. utilities/period.py +0 -152
  94. utilities/pudb.py +0 -62
  95. utilities/python_dotenv.py +0 -101
  96. utilities/streamlit.py +0 -105
  97. utilities/typed_settings.py +0 -123
utilities/redis.py CHANGED
@@ -1,16 +1,15 @@
1
1
  from __future__ import annotations
2
2
 
3
- from asyncio import CancelledError, Event, Queue, Task, create_task
4
- from collections.abc import AsyncIterator, Callable, Mapping
5
- from contextlib import asynccontextmanager, suppress
6
- from dataclasses import dataclass, field
3
+ from asyncio import CancelledError, Queue, Task, TaskGroup, create_task
4
+ from collections.abc import AsyncIterator, Callable, Mapping, Sequence
5
+ from contextlib import suppress
6
+ from dataclasses import dataclass
7
7
  from functools import partial
8
8
  from operator import itemgetter
9
9
  from typing import (
10
10
  TYPE_CHECKING,
11
11
  Any,
12
12
  Literal,
13
- Self,
14
13
  TypedDict,
15
14
  TypeGuard,
16
15
  assert_never,
@@ -21,35 +20,27 @@ from typing import (
21
20
 
22
21
  from redis.asyncio import Redis
23
22
 
24
- from utilities.asyncio import EnhancedQueue, Looper, sleep_td, timeout_td
25
- from utilities.contextlib import suppress_super_object_attribute_error
23
+ from utilities.asyncio import sleep_td, timeout_td
24
+ from utilities.contextlib import enhanced_async_context_manager
26
25
  from utilities.errors import ImpossibleCaseError
27
26
  from utilities.functions import ensure_int, identity
28
27
  from utilities.iterables import always_iterable, one
29
- from utilities.orjson import deserialize, serialize
30
- from utilities.whenever import MILLISECOND, SECOND
28
+ from utilities.os import is_pytest
29
+ from utilities.typing import is_instance_gen
30
+ from utilities.whenever import MILLISECOND, SECOND, to_milliseconds, to_nanoseconds
31
31
 
32
32
  if TYPE_CHECKING:
33
- from collections.abc import (
34
- AsyncIterator,
35
- Awaitable,
36
- Collection,
37
- Iterable,
38
- Iterator,
39
- Sequence,
40
- )
41
- from types import TracebackType
33
+ from collections.abc import AsyncIterator, Awaitable, Collection, Iterable
42
34
 
43
35
  from redis.asyncio import ConnectionPool
44
36
  from redis.asyncio.client import PubSub
45
- from redis.typing import EncodableT, ResponseT
46
- from whenever import TimeDelta
37
+ from redis.typing import EncodableT
47
38
 
48
39
  from utilities.iterables import MaybeIterable
49
- from utilities.types import MaybeType, TypeLike
40
+ from utilities.types import Delta, MaybeSequence, MaybeType, TypeLike
50
41
 
51
42
 
52
- _PUBLISH_TIMEOUT: TimeDelta = SECOND
43
+ _PUBLISH_TIMEOUT: Delta = SECOND
53
44
 
54
45
 
55
46
  ##
@@ -66,9 +57,9 @@ class RedisHashMapKey[K, V]:
66
57
  value: TypeLike[V]
67
58
  value_serializer: Callable[[V], bytes] | None = None
68
59
  value_deserializer: Callable[[bytes], V] | None = None
69
- timeout: TimeDelta | None = None
60
+ timeout: Delta | None = None
70
61
  error: MaybeType[BaseException] = TimeoutError
71
- ttl: TimeDelta | None = None
62
+ ttl: Delta | None = None
72
63
 
73
64
  async def delete(self, redis: Redis, key: K, /) -> int:
74
65
  """Delete a key from a hashmap in `redis`."""
@@ -172,7 +163,7 @@ class RedisHashMapKey[K, V]:
172
163
  "Awaitable[int]", redis.hset(self.name, mapping=cast("Any", ser))
173
164
  )
174
165
  if self.ttl is not None:
175
- await redis.pexpire(self.name, self.ttl.py_timedelta())
166
+ await redis.pexpire(self.name, to_milliseconds(self.ttl))
176
167
  return result # skipif-ci-and-not-linux
177
168
 
178
169
  async def values(self, redis: Redis, /) -> Sequence[V]:
@@ -197,9 +188,9 @@ def redis_hash_map_key[K, V](
197
188
  key_deserializer: Callable[[bytes], Any] | None = None,
198
189
  value_serializer: Callable[[V], bytes] | None = None,
199
190
  value_deserializer: Callable[[bytes], V] | None = None,
200
- timeout: TimeDelta | None = None,
201
- error: type[Exception] = TimeoutError,
202
- ttl: TimeDelta | None = None,
191
+ timeout: Delta | None = None,
192
+ error: MaybeType[BaseException] = TimeoutError,
193
+ ttl: Delta | None = None,
203
194
  ) -> RedisHashMapKey[K, V]: ...
204
195
  @overload
205
196
  def redis_hash_map_key[K, V1, V2](
@@ -212,9 +203,9 @@ def redis_hash_map_key[K, V1, V2](
212
203
  key_deserializer: Callable[[bytes], Any] | None = None,
213
204
  value_serializer: Callable[[V1 | V2], bytes] | None = None,
214
205
  value_deserializer: Callable[[bytes], V1 | V2] | None = None,
215
- timeout: TimeDelta | None = None,
216
- error: type[Exception] = TimeoutError,
217
- ttl: TimeDelta | None = None,
206
+ timeout: Delta | None = None,
207
+ error: MaybeType[BaseException] = TimeoutError,
208
+ ttl: Delta | None = None,
218
209
  ) -> RedisHashMapKey[K, V1 | V2]: ...
219
210
  @overload
220
211
  def redis_hash_map_key[K, V1, V2, V3](
@@ -227,9 +218,9 @@ def redis_hash_map_key[K, V1, V2, V3](
227
218
  key_deserializer: Callable[[bytes], Any] | None = None,
228
219
  value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
229
220
  value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
230
- timeout: TimeDelta | None = None,
231
- error: type[Exception] = TimeoutError,
232
- ttl: TimeDelta | None = None,
221
+ timeout: Delta | None = None,
222
+ error: MaybeType[BaseException] = TimeoutError,
223
+ ttl: Delta | None = None,
233
224
  ) -> RedisHashMapKey[K, V1 | V2 | V3]: ...
234
225
  @overload
235
226
  def redis_hash_map_key[K1, K2, V](
@@ -242,9 +233,9 @@ def redis_hash_map_key[K1, K2, V](
242
233
  key_deserializer: Callable[[bytes], Any] | None = None,
243
234
  value_serializer: Callable[[V], bytes] | None = None,
244
235
  value_deserializer: Callable[[bytes], V] | None = None,
245
- timeout: TimeDelta | None = None,
246
- error: type[Exception] = TimeoutError,
247
- ttl: TimeDelta | None = None,
236
+ timeout: Delta | None = None,
237
+ error: MaybeType[BaseException] = TimeoutError,
238
+ ttl: Delta | None = None,
248
239
  ) -> RedisHashMapKey[K1 | K2, V]: ...
249
240
  @overload
250
241
  def redis_hash_map_key[K1, K2, V1, V2](
@@ -257,9 +248,9 @@ def redis_hash_map_key[K1, K2, V1, V2](
257
248
  key_deserializer: Callable[[bytes], Any] | None = None,
258
249
  value_serializer: Callable[[V1 | V2], bytes] | None = None,
259
250
  value_deserializer: Callable[[bytes], V1 | V2] | None = None,
260
- timeout: TimeDelta | None = None,
261
- error: type[Exception] = TimeoutError,
262
- ttl: TimeDelta | None = None,
251
+ timeout: Delta | None = None,
252
+ error: MaybeType[BaseException] = TimeoutError,
253
+ ttl: Delta | None = None,
263
254
  ) -> RedisHashMapKey[K1 | K2, V1 | V2]: ...
264
255
  @overload
265
256
  def redis_hash_map_key[K1, K2, V1, V2, V3](
@@ -272,9 +263,9 @@ def redis_hash_map_key[K1, K2, V1, V2, V3](
272
263
  key_deserializer: Callable[[bytes], Any] | None = None,
273
264
  value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
274
265
  value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
275
- timeout: TimeDelta | None = None,
276
- error: type[Exception] = TimeoutError,
277
- ttl: TimeDelta | None = None,
266
+ timeout: Delta | None = None,
267
+ error: MaybeType[BaseException] = TimeoutError,
268
+ ttl: Delta | None = None,
278
269
  ) -> RedisHashMapKey[K1 | K2, V1 | V2 | V3]: ...
279
270
  @overload
280
271
  def redis_hash_map_key[K1, K2, K3, V](
@@ -287,9 +278,9 @@ def redis_hash_map_key[K1, K2, K3, V](
287
278
  key_deserializer: Callable[[bytes], Any] | None = None,
288
279
  value_serializer: Callable[[V], bytes] | None = None,
289
280
  value_deserializer: Callable[[bytes], V] | None = None,
290
- timeout: TimeDelta | None = None,
291
- error: type[Exception] = TimeoutError,
292
- ttl: TimeDelta | None = None,
281
+ timeout: Delta | None = None,
282
+ error: MaybeType[BaseException] = TimeoutError,
283
+ ttl: Delta | None = None,
293
284
  ) -> RedisHashMapKey[K1 | K2 | K3, V]: ...
294
285
  @overload
295
286
  def redis_hash_map_key[K1, K2, K3, V1, V2](
@@ -302,9 +293,9 @@ def redis_hash_map_key[K1, K2, K3, V1, V2](
302
293
  key_deserializer: Callable[[bytes], Any] | None = None,
303
294
  value_serializer: Callable[[V1 | V2], bytes] | None = None,
304
295
  value_deserializer: Callable[[bytes], V1 | V2] | None = None,
305
- timeout: TimeDelta | None = None,
306
- error: type[Exception] = TimeoutError,
307
- ttl: TimeDelta | None = None,
296
+ timeout: Delta | None = None,
297
+ error: MaybeType[BaseException] = TimeoutError,
298
+ ttl: Delta | None = None,
308
299
  ) -> RedisHashMapKey[K1 | K2 | K3, V1 | V2]: ...
309
300
  @overload
310
301
  def redis_hash_map_key[K1, K2, K3, V1, V2, V3](
@@ -317,9 +308,9 @@ def redis_hash_map_key[K1, K2, K3, V1, V2, V3](
317
308
  key_deserializer: Callable[[bytes], Any] | None = None,
318
309
  value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
319
310
  value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
320
- timeout: TimeDelta | None = None,
321
- error: type[Exception] = TimeoutError,
322
- ttl: TimeDelta | None = None,
311
+ timeout: Delta | None = None,
312
+ error: MaybeType[BaseException] = TimeoutError,
313
+ ttl: Delta | None = None,
323
314
  ) -> RedisHashMapKey[K1 | K2 | K3, V1 | V2 | V3]: ...
324
315
  @overload
325
316
  def redis_hash_map_key[K, K1, K2, K3, V, V1, V2, V3](
@@ -332,9 +323,9 @@ def redis_hash_map_key[K, K1, K2, K3, V, V1, V2, V3](
332
323
  key_deserializer: Callable[[bytes], Any] | None = None,
333
324
  value_serializer: Callable[[V1 | V2 | V3], bytes] | None = None,
334
325
  value_deserializer: Callable[[bytes], V1 | V2 | V3] | None = None,
335
- timeout: TimeDelta | None = None,
336
- error: type[Exception] = TimeoutError,
337
- ttl: TimeDelta | None = None,
326
+ timeout: Delta | None = None,
327
+ error: MaybeType[BaseException] = TimeoutError,
328
+ ttl: Delta | None = None,
338
329
  ) -> RedisHashMapKey[K, V]: ...
339
330
  def redis_hash_map_key[K, V](
340
331
  name: str,
@@ -346,9 +337,9 @@ def redis_hash_map_key[K, V](
346
337
  key_deserializer: Callable[[bytes], Any] | None = None,
347
338
  value_serializer: Callable[[Any], bytes] | None = None,
348
339
  value_deserializer: Callable[[bytes], Any] | None = None,
349
- timeout: TimeDelta | None = None,
350
- ttl: TimeDelta | None = None,
351
- error: type[Exception] = TimeoutError,
340
+ timeout: Delta | None = None,
341
+ ttl: Delta | None = None,
342
+ error: MaybeType[BaseException] = TimeoutError,
352
343
  ) -> RedisHashMapKey[K, V]:
353
344
  """Create a redis key."""
354
345
  return RedisHashMapKey( # skipif-ci-and-not-linux
@@ -376,16 +367,17 @@ class RedisKey[T]:
376
367
  type: TypeLike[T]
377
368
  serializer: Callable[[T], bytes] | None = None
378
369
  deserializer: Callable[[bytes], T] | None = None
379
- timeout: TimeDelta | None = None
370
+ timeout: Delta | None = None
380
371
  error: MaybeType[BaseException] = TimeoutError
381
- ttl: TimeDelta | None = None
372
+ ttl: Delta | None = None
382
373
 
383
374
  async def delete(self, redis: Redis, /) -> int:
384
375
  """Delete the key from `redis`."""
385
376
  async with timeout_td( # skipif-ci-and-not-linux
386
377
  self.timeout, error=self.error
387
378
  ):
388
- return ensure_int(await redis.delete(self.name))
379
+ response = await redis.delete(self.name)
380
+ return ensure_int(response)
389
381
 
390
382
  async def exists(self, redis: Redis, /) -> bool:
391
383
  """Check if the key exists in `redis`."""
@@ -396,7 +388,7 @@ class RedisKey[T]:
396
388
  match result: # skipif-ci-and-not-linux
397
389
  case 0 | 1 as value:
398
390
  return bool(value)
399
- case _ as never:
391
+ case never:
400
392
  assert_never(never)
401
393
 
402
394
  async def get(self, redis: Redis, /) -> T:
@@ -415,15 +407,15 @@ class RedisKey[T]:
415
407
  """Set a value in `redis`."""
416
408
  ser = _serialize(value, serializer=self.serializer) # skipif-ci-and-not-linux
417
409
  ttl = ( # skipif-ci-and-not-linux
418
- None if self.ttl is None else round(self.ttl.in_milliseconds())
410
+ None if self.ttl is None else to_milliseconds(self.ttl)
419
411
  )
420
412
  async with timeout_td( # skipif-ci-and-not-linux
421
413
  self.timeout, error=self.error
422
414
  ):
423
- result = await redis.set( # skipif-ci-and-not-linux
415
+ response = await redis.set( # skipif-ci-and-not-linux
424
416
  self.name, ser, px=ttl
425
417
  )
426
- return ensure_int(result) # skipif-ci-and-not-linux
418
+ return ensure_int(response) # skipif-ci-and-not-linux
427
419
 
428
420
 
429
421
  @overload
@@ -434,9 +426,9 @@ def redis_key[T](
434
426
  *,
435
427
  serializer: Callable[[T], bytes] | None = None,
436
428
  deserializer: Callable[[bytes], T] | None = None,
437
- timeout: TimeDelta | None = None,
438
- error: type[Exception] = TimeoutError,
439
- ttl: TimeDelta | None = None,
429
+ timeout: Delta | None = None,
430
+ error: MaybeType[BaseException] = TimeoutError,
431
+ ttl: Delta | None = None,
440
432
  ) -> RedisKey[T]: ...
441
433
  @overload
442
434
  def redis_key[T1, T2](
@@ -446,9 +438,9 @@ def redis_key[T1, T2](
446
438
  *,
447
439
  serializer: Callable[[T1 | T2], bytes] | None = None,
448
440
  deserializer: Callable[[bytes], T1 | T2] | None = None,
449
- timeout: TimeDelta | None = None,
450
- error: type[Exception] = TimeoutError,
451
- ttl: TimeDelta | None = None,
441
+ timeout: Delta | None = None,
442
+ error: MaybeType[BaseException] = TimeoutError,
443
+ ttl: Delta | None = None,
452
444
  ) -> RedisKey[T1 | T2]: ...
453
445
  @overload
454
446
  def redis_key[T1, T2, T3](
@@ -458,9 +450,9 @@ def redis_key[T1, T2, T3](
458
450
  *,
459
451
  serializer: Callable[[T1 | T2 | T3], bytes] | None = None,
460
452
  deserializer: Callable[[bytes], T1 | T2 | T3] | None = None,
461
- timeout: TimeDelta | None = None,
462
- error: type[Exception] = TimeoutError,
463
- ttl: TimeDelta | None = None,
453
+ timeout: Delta | None = None,
454
+ error: MaybeType[BaseException] = TimeoutError,
455
+ ttl: Delta | None = None,
464
456
  ) -> RedisKey[T1 | T2 | T3]: ...
465
457
  @overload
466
458
  def redis_key[T1, T2, T3, T4](
@@ -470,9 +462,9 @@ def redis_key[T1, T2, T3, T4](
470
462
  *,
471
463
  serializer: Callable[[T1 | T2 | T3 | T4], bytes] | None = None,
472
464
  deserializer: Callable[[bytes], T1 | T2 | T3 | T4] | None = None,
473
- timeout: TimeDelta | None = None,
474
- error: type[Exception] = TimeoutError,
475
- ttl: TimeDelta | None = None,
465
+ timeout: Delta | None = None,
466
+ error: MaybeType[BaseException] = TimeoutError,
467
+ ttl: Delta | None = None,
476
468
  ) -> RedisKey[T1 | T2 | T3 | T4]: ...
477
469
  @overload
478
470
  def redis_key[T1, T2, T3, T4, T5](
@@ -482,9 +474,9 @@ def redis_key[T1, T2, T3, T4, T5](
482
474
  *,
483
475
  serializer: Callable[[T1 | T2 | T3 | T4 | T5], bytes] | None = None,
484
476
  deserializer: Callable[[bytes], T1 | T2 | T3 | T4 | T5] | None = None,
485
- timeout: TimeDelta | None = None,
486
- error: type[Exception] = TimeoutError,
487
- ttl: TimeDelta | None = None,
477
+ timeout: Delta | None = None,
478
+ error: MaybeType[BaseException] = TimeoutError,
479
+ ttl: Delta | None = None,
488
480
  ) -> RedisKey[T1 | T2 | T3 | T4 | T5]: ...
489
481
  @overload
490
482
  def redis_key[T, T1, T2, T3, T4, T5](
@@ -494,9 +486,9 @@ def redis_key[T, T1, T2, T3, T4, T5](
494
486
  *,
495
487
  serializer: Callable[[T1 | T2 | T3 | T4 | T5], bytes] | None = None,
496
488
  deserializer: Callable[[bytes], T1 | T2 | T3 | T4 | T5] | None = None,
497
- timeout: TimeDelta | None = None,
498
- error: type[Exception] = TimeoutError,
499
- ttl: TimeDelta | None = None,
489
+ timeout: Delta | None = None,
490
+ error: MaybeType[BaseException] = TimeoutError,
491
+ ttl: Delta | None = None,
500
492
  ) -> RedisKey[T]: ...
501
493
  def redis_key[T](
502
494
  name: str,
@@ -505,9 +497,9 @@ def redis_key[T](
505
497
  *,
506
498
  serializer: Callable[[Any], bytes] | None = None,
507
499
  deserializer: Callable[[bytes], Any] | None = None,
508
- timeout: TimeDelta | None = None,
509
- error: type[Exception] = TimeoutError,
510
- ttl: TimeDelta | None = None,
500
+ timeout: Delta | None = None,
501
+ error: MaybeType[BaseException] = TimeoutError,
502
+ ttl: Delta | None = None,
511
503
  ) -> RedisKey[T]:
512
504
  """Create a redis key."""
513
505
  return RedisKey( # skipif-ci-and-not-linux
@@ -532,8 +524,8 @@ async def publish[T](
532
524
  /,
533
525
  *,
534
526
  serializer: Callable[[T], EncodableT],
535
- timeout: TimeDelta = _PUBLISH_TIMEOUT,
536
- ) -> ResponseT: ...
527
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
528
+ ) -> int: ...
537
529
  @overload
538
530
  async def publish(
539
531
  redis: Redis,
@@ -542,8 +534,8 @@ async def publish(
542
534
  /,
543
535
  *,
544
536
  serializer: None = None,
545
- timeout: TimeDelta = _PUBLISH_TIMEOUT,
546
- ) -> ResponseT: ...
537
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
538
+ ) -> int: ...
547
539
  @overload
548
540
  async def publish[T](
549
541
  redis: Redis,
@@ -552,8 +544,8 @@ async def publish[T](
552
544
  /,
553
545
  *,
554
546
  serializer: Callable[[T], EncodableT] | None = None,
555
- timeout: TimeDelta = _PUBLISH_TIMEOUT,
556
- ) -> ResponseT: ...
547
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
548
+ ) -> int: ...
557
549
  async def publish[T](
558
550
  redis: Redis,
559
551
  channel: str,
@@ -561,198 +553,173 @@ async def publish[T](
561
553
  /,
562
554
  *,
563
555
  serializer: Callable[[T], EncodableT] | None = None,
564
- timeout: TimeDelta = _PUBLISH_TIMEOUT,
565
- ) -> ResponseT:
556
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
557
+ ) -> int:
566
558
  """Publish an object to a channel."""
567
559
  match data, serializer: # skipif-ci-and-not-linux
568
560
  case bytes() | str() as data_use, _:
569
561
  ...
570
562
  case _, None:
571
- raise PublishError(data=data, serializer=serializer)
563
+ raise PublishError(data=data)
572
564
  case _, Callable():
573
565
  data_use = serializer(data)
574
- case _ as never:
566
+ case never:
575
567
  assert_never(never)
576
568
  async with timeout_td(timeout): # skipif-ci-and-not-linux
577
- return await redis.publish(channel, data_use) # skipif-ci-and-not-linux
569
+ response = await redis.publish(channel, data_use) # skipif-ci-and-not-linux
570
+ return ensure_int(response) # skipif-ci-and-not-linux
578
571
 
579
572
 
580
573
  @dataclass(kw_only=True, slots=True)
581
574
  class PublishError(Exception):
582
575
  data: Any
583
- serializer: Callable[[Any], EncodableT] | None = None
584
576
 
585
577
  @override
586
578
  def __str__(self) -> str:
587
- return (
588
- f"Unable to publish data {self.data!r} with serializer {self.serializer!r}"
589
- )
579
+ return f"Unable to publish data {self.data!r} with no serializer"
590
580
 
591
581
 
592
582
  ##
593
583
 
594
584
 
595
- @dataclass(kw_only=True)
596
- class PublishService[T](Looper[tuple[str, T]]):
597
- """Service to publish items to Redis."""
598
-
599
- # base
600
- freq: TimeDelta = field(default=MILLISECOND, repr=False)
601
- backoff: TimeDelta = field(default=SECOND, repr=False)
602
- empty_upon_exit: bool = field(default=True, repr=False)
603
- # self
604
- redis: Redis
605
- serializer: Callable[[T], EncodableT] = serialize
606
- publish_timeout: TimeDelta = _PUBLISH_TIMEOUT
607
-
608
- @override
609
- async def core(self) -> None:
610
- await super().core() # skipif-ci-and-not-linux
611
- while not self.empty(): # skipif-ci-and-not-linux
612
- channel, data = self.get_left_nowait()
613
- _ = await publish(
614
- self.redis,
615
- channel,
616
- data,
617
- serializer=self.serializer,
618
- timeout=self.publish_timeout,
585
+ async def publish_many[T](
586
+ redis: Redis,
587
+ channel: str,
588
+ data: MaybeSequence[bytes | str | T],
589
+ /,
590
+ *,
591
+ serializer: Callable[[T], EncodableT] | None = None,
592
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
593
+ ) -> Sequence[bool]:
594
+ """Publish an object/multiple objects to a channel."""
595
+ async with TaskGroup() as tg:
596
+ tasks = [
597
+ tg.create_task(
598
+ _try_publish(
599
+ redis,
600
+ channel,
601
+ d,
602
+ serializer=cast("Callable[[Any], EncodableT]", serializer),
603
+ timeout=timeout,
604
+ )
619
605
  )
606
+ for d in always_iterable(data)
607
+ ]
608
+ return [t.result() for t in tasks]
620
609
 
621
610
 
622
- ##
623
-
624
-
625
- @dataclass(kw_only=True)
626
- class PublishServiceMixin[T]:
627
- """Mix-in for the publish service."""
628
-
629
- # base - looper
630
- publish_service_freq: TimeDelta = field(default=MILLISECOND, repr=False)
631
- publish_service_backoff: TimeDelta = field(default=SECOND, repr=False)
632
- publish_service_empty_upon_exit: bool = field(default=False, repr=False)
633
- publish_service_logger: str | None = field(default=None, repr=False)
634
- publish_service_timeout: TimeDelta | None = field(default=None, repr=False)
635
- publish_service_debug: bool = field(default=False, repr=False)
636
- _is_pending_restart: Event = field(default_factory=Event, init=False, repr=False)
637
- # base - publish service
638
- publish_service_redis: Redis
639
- publish_service_serializer: Callable[[T], EncodableT] = serialize
640
- publish_service_publish_timeout: TimeDelta = _PUBLISH_TIMEOUT
641
- # self
642
- _publish_service: PublishService[T] = field(init=False, repr=False)
643
-
644
- def __post_init__(self) -> None:
645
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
646
- super().__post_init__() # pyright: ignore[reportAttributeAccessIssue]
647
- self._publish_service = PublishService( # skipif-ci-and-not-linux
648
- # looper
649
- freq=self.publish_service_freq,
650
- backoff=self.publish_service_backoff,
651
- empty_upon_exit=self.publish_service_empty_upon_exit,
652
- logger=self.publish_service_logger,
653
- timeout=self.publish_service_timeout,
654
- _debug=self.publish_service_debug,
655
- # publish service
656
- redis=self.publish_service_redis,
657
- serializer=self.publish_service_serializer,
658
- publish_timeout=self.publish_service_publish_timeout,
659
- )
660
-
661
- def _yield_sub_loopers(self) -> Iterator[Looper[Any]]:
662
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
663
- yield from super()._yield_sub_loopers() # pyright: ignore[reportAttributeAccessIssue]
664
- yield self._publish_service # skipif-ci-and-not-linux
611
+ async def _try_publish[T](
612
+ redis: Redis,
613
+ channel: str,
614
+ data: bytes | str | T,
615
+ /,
616
+ *,
617
+ serializer: Callable[[T], EncodableT] | None = None,
618
+ timeout: Delta | None = _PUBLISH_TIMEOUT,
619
+ ) -> bool:
620
+ try:
621
+ _ = await publish(redis, channel, data, serializer=serializer, timeout=timeout)
622
+ except TimeoutError:
623
+ return False
624
+ return True
665
625
 
666
626
 
667
627
  ##
668
628
 
669
629
 
670
- _SUBSCRIBE_TIMEOUT: TimeDelta = SECOND
671
- _SUBSCRIBE_SLEEP: TimeDelta = MILLISECOND
630
+ _SUBSCRIBE_TIMEOUT: Delta = SECOND
631
+ _SUBSCRIBE_SLEEP: Delta = MILLISECOND
672
632
 
673
633
 
674
634
  @overload
675
- @asynccontextmanager
635
+ @enhanced_async_context_manager
676
636
  def subscribe(
677
637
  redis: Redis,
678
638
  channels: MaybeIterable[str],
679
639
  queue: Queue[_RedisMessage],
680
640
  /,
681
641
  *,
682
- timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT,
683
- sleep: TimeDelta = _SUBSCRIBE_SLEEP,
642
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
684
643
  output: Literal["raw"],
685
- filter_: Callable[[_RedisMessage], bool] | None = None,
644
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
645
+ filter_: Callable[[bytes], bool] | None = None,
646
+ error_filter: Callable[[bytes, Exception], None] | None = None,
647
+ sleep: Delta = _SUBSCRIBE_SLEEP,
686
648
  ) -> AsyncIterator[Task[None]]: ...
687
649
  @overload
688
- @asynccontextmanager
650
+ @enhanced_async_context_manager
689
651
  def subscribe(
690
652
  redis: Redis,
691
653
  channels: MaybeIterable[str],
692
654
  queue: Queue[bytes],
693
655
  /,
694
656
  *,
695
- timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT,
696
- sleep: TimeDelta = _SUBSCRIBE_SLEEP,
657
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
697
658
  output: Literal["bytes"],
659
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
698
660
  filter_: Callable[[bytes], bool] | None = None,
661
+ error_filter: Callable[[bytes, Exception], None] | None = None,
662
+ sleep: Delta = _SUBSCRIBE_SLEEP,
699
663
  ) -> AsyncIterator[Task[None]]: ...
700
664
  @overload
701
- @asynccontextmanager
665
+ @enhanced_async_context_manager
702
666
  def subscribe(
703
667
  redis: Redis,
704
668
  channels: MaybeIterable[str],
705
669
  queue: Queue[str],
706
670
  /,
707
671
  *,
708
- timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT,
709
- sleep: TimeDelta = _SUBSCRIBE_SLEEP,
672
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
710
673
  output: Literal["text"] = "text",
674
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
711
675
  filter_: Callable[[str], bool] | None = None,
676
+ error_filter: Callable[[str, Exception], None] | None = None,
677
+ sleep: Delta = _SUBSCRIBE_SLEEP,
712
678
  ) -> AsyncIterator[Task[None]]: ...
713
679
  @overload
714
- @asynccontextmanager
680
+ @enhanced_async_context_manager
715
681
  def subscribe[T](
716
682
  redis: Redis,
717
683
  channels: MaybeIterable[str],
718
684
  queue: Queue[T],
719
685
  /,
720
686
  *,
721
- timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT,
722
- sleep: TimeDelta = _SUBSCRIBE_SLEEP,
687
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
723
688
  output: Callable[[bytes], T],
689
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
724
690
  filter_: Callable[[T], bool] | None = None,
691
+ error_filter: Callable[[T, Exception], None] | None = None,
692
+ sleep: Delta = _SUBSCRIBE_SLEEP,
725
693
  ) -> AsyncIterator[Task[None]]: ...
726
- @asynccontextmanager
694
+ @enhanced_async_context_manager
727
695
  async def subscribe[T](
728
696
  redis: Redis,
729
697
  channels: MaybeIterable[str],
730
698
  queue: Queue[_RedisMessage] | Queue[bytes] | Queue[T],
731
699
  /,
732
700
  *,
733
- timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT,
734
- sleep: TimeDelta = _SUBSCRIBE_SLEEP,
701
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
735
702
  output: Literal["raw", "bytes", "text"] | Callable[[bytes], T] = "text",
736
- filter_: Callable[[Any], bool] | None = None,
703
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
704
+ filter_: Callable[[T], bool] | None = None,
705
+ error_filter: Callable[[T, Exception], None] | None = None,
706
+ sleep: Delta = _SUBSCRIBE_SLEEP,
737
707
  ) -> AsyncIterator[Task[None]]:
738
708
  """Subscribe to the data of a given channel(s)."""
739
709
  channels = list(always_iterable(channels)) # skipif-ci-and-not-linux
740
710
  match output: # skipif-ci-and-not-linux
741
711
  case "raw":
742
- transform = cast("Any", identity)
712
+ transform = cast("Callable[[_RedisMessage], T]", identity)
743
713
  case "bytes":
744
- transform = cast("Any", itemgetter("data"))
714
+ transform = cast("Callable[[_RedisMessage], T]", itemgetter("data"))
745
715
  case "text":
746
-
747
- def transform(message: _RedisMessage, /) -> str: # pyright: ignore[reportRedeclaration]
748
- return message["data"].decode()
749
-
716
+ transform = cast("Callable[[_RedisMessage], T]", _decoded_data)
750
717
  case Callable() as deserialize:
751
718
 
752
719
  def transform(message: _RedisMessage, /) -> T:
753
720
  return deserialize(message["data"])
754
721
 
755
- case _ as never:
722
+ case never:
756
723
  assert_never(never)
757
724
 
758
725
  task = create_task( # skipif-ci-and-not-linux
@@ -762,31 +729,43 @@ async def subscribe[T](
762
729
  transform,
763
730
  queue,
764
731
  timeout=timeout,
765
- sleep=sleep,
732
+ error_transform=error_transform,
766
733
  filter_=filter_,
734
+ error_filter=error_filter,
735
+ sleep=sleep,
767
736
  )
768
737
  )
769
738
  try: # skipif-ci-and-not-linux
770
739
  yield task
771
740
  finally: # skipif-ci-and-not-linux
772
- _ = task.cancel()
741
+ try:
742
+ _ = task.cancel()
743
+ except RuntimeError as error: # pragma: no cover
744
+ if (not is_pytest()) or (error.args[0] != "Event loop is closed"):
745
+ raise
773
746
  with suppress(CancelledError):
774
747
  await task
775
748
 
776
749
 
777
- async def _subscribe_core(
750
+ def _decoded_data(message: _RedisMessage, /) -> str:
751
+ return message["data"].decode()
752
+
753
+
754
+ async def _subscribe_core[T](
778
755
  redis: Redis,
779
756
  channels: MaybeIterable[str],
780
- transform: Callable[[_RedisMessage], Any],
757
+ transform: Callable[[_RedisMessage], T],
781
758
  queue: Queue[Any],
782
759
  /,
783
760
  *,
784
- timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT,
785
- sleep: TimeDelta = _SUBSCRIBE_SLEEP,
786
- filter_: Callable[[Any], bool] | None = None,
761
+ timeout: Delta | None = _SUBSCRIBE_TIMEOUT,
762
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
763
+ filter_: Callable[[T], bool] | None = None,
764
+ error_filter: Callable[[T, Exception], None] | None = None,
765
+ sleep: Delta = _SUBSCRIBE_SLEEP,
787
766
  ) -> None:
788
767
  timeout_use = ( # skipif-ci-and-not-linux
789
- None if timeout is None else timeout.in_seconds()
768
+ None if timeout is None else (to_nanoseconds(timeout) / 1e9)
790
769
  )
791
770
  is_subscribe_message = partial( # skipif-ci-and-not-linux
792
771
  _is_message, channels={c.encode() for c in channels}
@@ -795,12 +774,14 @@ async def _subscribe_core(
795
774
  while True:
796
775
  message = await pubsub.get_message(timeout=timeout_use)
797
776
  if is_subscribe_message(message):
798
- transformed = transform(message)
799
- if (filter_ is None) or filter_(transformed):
800
- if isinstance(queue, EnhancedQueue):
801
- queue.put_right_nowait(transformed)
802
- else:
803
- queue.put_nowait(transformed)
777
+ _handle_message(
778
+ message,
779
+ transform,
780
+ queue,
781
+ error_transform=error_transform,
782
+ filter_=filter_,
783
+ error_filter=error_filter,
784
+ )
804
785
  else:
805
786
  await sleep_td(sleep)
806
787
 
@@ -808,17 +789,34 @@ async def _subscribe_core(
808
789
  def _is_message(
809
790
  message: Any, /, *, channels: Collection[bytes]
810
791
  ) -> TypeGuard[_RedisMessage]:
811
- return (
812
- isinstance(message, Mapping)
813
- and ("type" in message)
814
- and (message["type"] in {"subscribe", "psubscribe", "message", "pmessage"})
815
- and ("pattern" in message)
816
- and ((message["pattern"] is None) or isinstance(message["pattern"], str))
817
- and ("channel" in message)
818
- and (message["channel"] in channels)
819
- and ("data" in message)
820
- and isinstance(message["data"], bytes)
821
- )
792
+ return is_instance_gen(message, _RedisMessage) and (message["channel"] in channels)
793
+
794
+
795
+ def _handle_message[T](
796
+ message: _RedisMessage,
797
+ transform: Callable[[_RedisMessage], T],
798
+ queue: Queue[Any],
799
+ /,
800
+ *,
801
+ error_transform: Callable[[_RedisMessage, Exception], None] | None = None,
802
+ filter_: Callable[[T], bool] | None = None,
803
+ error_filter: Callable[[T, Exception], None] | None = None,
804
+ ) -> None:
805
+ try:
806
+ transformed = transform(message)
807
+ except Exception as error: # noqa: BLE001
808
+ if error_transform is not None:
809
+ error_transform(message, error)
810
+ return
811
+ if filter_ is None:
812
+ queue.put_nowait(transformed)
813
+ return
814
+ try:
815
+ if filter_(transformed):
816
+ queue.put_nowait(transformed)
817
+ except Exception as error: # noqa: BLE001
818
+ if error_filter is not None:
819
+ error_filter(transformed, error)
822
820
 
823
821
 
824
822
  class _RedisMessage(TypedDict):
@@ -831,125 +829,7 @@ class _RedisMessage(TypedDict):
831
829
  ##
832
830
 
833
831
 
834
- @dataclass(kw_only=True)
835
- class SubscribeService[T](Looper[T]):
836
- """Service to subscribe to Redis."""
837
-
838
- # base
839
- freq: TimeDelta = field(default=MILLISECOND, repr=False)
840
- backoff: TimeDelta = field(default=SECOND, repr=False)
841
- logger: str | None = field(default=__name__, repr=False)
842
- # self
843
- redis: Redis
844
- channel: str
845
- deserializer: Callable[[bytes], T] = deserialize
846
- subscribe_timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT
847
- subscribe_sleep: TimeDelta = _SUBSCRIBE_SLEEP
848
- filter_: Callable[[T], bool] | None = None
849
- _is_subscribed: Event = field(default_factory=Event, init=False, repr=False)
850
-
851
- @override
852
- async def __aenter__(self) -> Self:
853
- _ = await super().__aenter__() # skipif-ci-and-not-linux
854
- match self._is_subscribed.is_set(): # skipif-ci-and-not-linux
855
- case True:
856
- _ = self._debug and self._logger.debug("%s: already subscribing", self)
857
- case False:
858
- _ = self._debug and self._logger.debug(
859
- "%s: starting subscription...", self
860
- )
861
- self._is_subscribed.set()
862
- _ = await self._stack.enter_async_context(
863
- subscribe(
864
- self.redis,
865
- self.channel,
866
- self._queue,
867
- timeout=self.subscribe_timeout,
868
- sleep=self.subscribe_sleep,
869
- output=self.deserializer,
870
- filter_=self.filter_,
871
- )
872
- )
873
- case _ as never:
874
- assert_never(never)
875
- return self # skipif-ci-and-not-linux
876
-
877
- @override
878
- async def __aexit__(
879
- self,
880
- exc_type: type[BaseException] | None = None,
881
- exc_value: BaseException | None = None,
882
- traceback: TracebackType | None = None,
883
- ) -> None:
884
- await super().__aexit__( # skipif-ci-and-not-linux
885
- exc_type=exc_type, exc_value=exc_value, traceback=traceback
886
- )
887
- match self._is_subscribed.is_set(): # skipif-ci-and-not-linux
888
- case True:
889
- _ = self._debug and self._logger.debug(
890
- "%s: stopping subscription...", self
891
- )
892
- self._is_subscribed.clear()
893
- case False:
894
- _ = self._debug and self._logger.debug(
895
- "%s: already stopped subscription", self
896
- )
897
- case _ as never:
898
- assert_never(never)
899
-
900
-
901
- ##
902
-
903
-
904
- @dataclass(kw_only=True)
905
- class SubscribeServiceMixin[T]:
906
- """Mix-in for the subscribe service."""
907
-
908
- # base - looper
909
- subscribe_service_freq: TimeDelta = field(default=MILLISECOND, repr=False)
910
- subscribe_service_backoff: TimeDelta = field(default=SECOND, repr=False)
911
- subscribe_service_empty_upon_exit: bool = field(default=False, repr=False)
912
- subscribe_service_logger: str | None = field(default=None, repr=False)
913
- subscribe_service_timeout: TimeDelta | None = field(default=None, repr=False)
914
- subscribe_service_debug: bool = field(default=False, repr=False)
915
- # base - looper
916
- subscribe_service_redis: Redis
917
- subscribe_service_channel: str
918
- subscribe_service_deserializer: Callable[[bytes], T] = deserialize
919
- subscribe_service_subscribe_sleep: TimeDelta = _SUBSCRIBE_SLEEP
920
- subscribe_service_subscribe_timeout: TimeDelta | None = _SUBSCRIBE_TIMEOUT
921
- # self
922
- _subscribe_service: SubscribeService[T] = field(init=False, repr=False)
923
-
924
- def __post_init__(self) -> None:
925
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
926
- super().__post_init__() # pyright: ignore[reportAttributeAccessIssue]
927
- self._subscribe_service = SubscribeService( # skipif-ci-and-not-linux
928
- # looper
929
- freq=self.subscribe_service_freq,
930
- backoff=self.subscribe_service_backoff,
931
- empty_upon_exit=self.subscribe_service_empty_upon_exit,
932
- logger=self.subscribe_service_logger,
933
- timeout=self.subscribe_service_timeout,
934
- _debug=self.subscribe_service_debug,
935
- # subscribe service
936
- redis=self.subscribe_service_redis,
937
- channel=self.subscribe_service_channel,
938
- deserializer=self.subscribe_service_deserializer,
939
- subscribe_sleep=self.subscribe_service_subscribe_sleep,
940
- subscribe_timeout=self.subscribe_service_subscribe_timeout,
941
- )
942
-
943
- def _yield_sub_loopers(self) -> Iterator[Looper[Any]]:
944
- with suppress_super_object_attribute_error(): # skipif-ci-and-not-linux
945
- yield from super()._yield_sub_loopers() # pyright: ignore[reportAttributeAccessIssue]
946
- yield self._subscribe_service # skipif-ci-and-not-linux
947
-
948
-
949
- ##
950
-
951
-
952
- @asynccontextmanager
832
+ @enhanced_async_context_manager
953
833
  async def yield_pubsub(
954
834
  redis: Redis, channels: MaybeIterable[str], /
955
835
  ) -> AsyncIterator[PubSub]:
@@ -971,7 +851,7 @@ _HOST = "localhost"
971
851
  _PORT = 6379
972
852
 
973
853
 
974
- @asynccontextmanager
854
+ @enhanced_async_context_manager
975
855
  async def yield_redis(
976
856
  *,
977
857
  host: str = _HOST,
@@ -1030,13 +910,10 @@ def _deserialize[T](
1030
910
 
1031
911
 
1032
912
  __all__ = [
1033
- "PublishService",
1034
- "PublishServiceMixin",
1035
913
  "RedisHashMapKey",
1036
914
  "RedisKey",
1037
- "SubscribeService",
1038
- "SubscribeServiceMixin",
1039
915
  "publish",
916
+ "publish_many",
1040
917
  "redis_hash_map_key",
1041
918
  "redis_key",
1042
919
  "subscribe",