dycw-utilities 0.127.1__py3-none-any.whl → 0.129.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dycw-utilities
3
- Version: 0.127.1
3
+ Version: 0.129.0
4
4
  Author-email: Derek Wan <d.wan@icloud.com>
5
5
  License-File: LICENSE
6
6
  Requires-Python: >=3.12
@@ -1,6 +1,6 @@
1
- utilities/__init__.py,sha256=X-YBx01AtGeh8rblelYCi8ZH6Md1qNjdGUJTtLYqEdE,60
1
+ utilities/__init__.py,sha256=SLPkIGR28QU6Zy5OKMEX-IvxA2c0MLdH8FuPAUwRzrc,60
2
2
  utilities/altair.py,sha256=Gpja-flOo-Db0PIPJLJsgzAlXWoKUjPU1qY-DQ829ek,9156
3
- utilities/asyncio.py,sha256=wKxwNnxdWxsiy5U0b1F3UgpWRHlPKM0y_OcmURzqxR8,51396
3
+ utilities/asyncio.py,sha256=OIQ4JddpQw8tSubzwDR0WyqQ-uE-L5DdbwuTqRQK5MQ,38202
4
4
  utilities/atomicwrites.py,sha256=geFjn9Pwn-tTrtoGjDDxWli9NqbYfy3gGL6ZBctiqSo,5393
5
5
  utilities/atools.py,sha256=IYMuFSFGSKyuQmqD6v5IUtDlz8PPw0Sr87Cub_gRU3M,1168
6
6
  utilities/cachetools.py,sha256=C1zqOg7BYz0IfQFK8e3qaDDgEZxDpo47F15RTfJM37Q,2910
@@ -11,26 +11,25 @@ utilities/contextvars.py,sha256=RsSGGrbQqqZ67rOydnM7WWIsM2lIE31UHJLejnHJPWY,505
11
11
  utilities/cryptography.py,sha256=_CiK_K6c_-uQuUhsUNjNjTL-nqxAh4_1zTfS11Xe120,972
12
12
  utilities/cvxpy.py,sha256=Rv1-fD-XYerosCavRF8Pohop2DBkU3AlFaGTfD8AEAA,13776
13
13
  utilities/dataclasses.py,sha256=iiC1wpGXWhaocIikzwBt8bbLWyImoUlOlcDZJGejaIg,33011
14
- utilities/datetime.py,sha256=uYoaOi_C1YtNXGfTN9xlTrW62Re2b1_4Skuv14_MeYQ,38985
14
+ utilities/datetime.py,sha256=aiPh2OZK2g9gn4yEeSO0lODOmvx8U_rGn6XeSzyk4VY,38738
15
15
  utilities/enum.py,sha256=HoRwVCWzsnH0vpO9ZEcAAIZLMv0Sn2vJxxA4sYMQgDs,5793
16
16
  utilities/errors.py,sha256=nC7ZYtxxDBMfrTHtT_MByBfup_wfGQFRo3eDt-0ZPe8,1045
17
17
  utilities/eventkit.py,sha256=6M5Xu1SzN-juk9PqBHwy5dS-ta7T0qA6SMpDsakOJ0E,13039
18
- utilities/fastapi.py,sha256=LG1-Q8RDi7wsyVN6v74qptPYX8WGXPkFOQFniMvtzjc,2439
18
+ utilities/fastapi.py,sha256=gZrXYxKAc7ZEAL_tDmkcbqebkm-KfMCY0X8r-1HF5dI,2962
19
19
  utilities/fpdf2.py,sha256=y1NGXR5chWqLXWpewGV3hlRGMr_5yV1lVRkPBhPEgJI,1843
20
20
  utilities/functions.py,sha256=jgt592voaHNtX56qX0SRvFveVCRmSIxCZmqvpLZCnY8,27305
21
21
  utilities/functools.py,sha256=WrpHt7NLNWSUn9A1Q_ZIWlNaYZOEI4IFKyBG9HO3BC4,1643
22
22
  utilities/getpass.py,sha256=DfN5UgMAtFCqS3dSfFHUfqIMZX2shXvwphOz_6J6f6A,103
23
- utilities/git.py,sha256=wpt5dZ5Oi5931pN24_VLZYaQOvmR0OcQuVtgHzFUN1k,2359
24
23
  utilities/hashlib.py,sha256=SVTgtguur0P4elppvzOBbLEjVM3Pea0eWB61yg2ilxo,309
25
24
  utilities/http.py,sha256=WcahTcKYRtZ04WXQoWt5EGCgFPcyHD3EJdlMfxvDt-0,946
26
- utilities/hypothesis.py,sha256=snJ35u9-dXKn3Unac4IPW2V4JtRUg5B7SsDBrQHIx9g,44834
25
+ utilities/hypothesis.py,sha256=UnUMJmeqwJuK7uyUqw_i3opUYzVKud4RMG0RMOSRBQY,44463
27
26
  utilities/importlib.py,sha256=mV1xT_O_zt_GnZZ36tl3xOmMaN_3jErDWY54fX39F6Y,429
28
27
  utilities/ipython.py,sha256=V2oMYHvEKvlNBzxDXdLvKi48oUq2SclRg5xasjaXStw,763
29
28
  utilities/iterables.py,sha256=mDqw2_0MUVp-P8FklgcaVTi2TXduH0MxbhTDzzhSBho,44915
30
29
  utilities/jupyter.py,sha256=ft5JA7fBxXKzP-L9W8f2-wbF0QeYc_2uLQNFDVk4Z-M,2917
31
30
  utilities/libcst.py,sha256=Jto5ppzRzsxn4AD32IS8n0lbgLYXwsVJB6EY8giNZyY,4974
32
31
  utilities/lightweight_charts.py,sha256=0xNfcsrgFI0R9xL25LtSm-W5yhfBI93qQNT6HyaXAhg,2769
33
- utilities/logging.py,sha256=gwo3pusPjnWO1ollrtn1VKYyRAQJTue4SkCbMeNvec4,25715
32
+ utilities/logging.py,sha256=a99gX9oQUe_Oxs5rDtTwUVuOwhRyeO_GfoFNKVaEny0,25641
34
33
  utilities/loguru.py,sha256=MEMQVWrdECxk1e3FxGzmOf21vWT9j8CAir98SEXFKPA,3809
35
34
  utilities/luigi.py,sha256=fpH9MbxJDuo6-k9iCXRayFRtiVbUtibCJKugf7ygpv0,5988
36
35
  utilities/math.py,sha256=-mQgbah-dPJwOEWf3SonrFoVZ2AVxMgpeQ3dfVa-oJA,26764
@@ -43,7 +42,7 @@ utilities/optuna.py,sha256=loyJGWTzljgdJaoLhP09PT8Jz6o_pwBOwehY33lHkhw,1923
43
42
  utilities/orjson.py,sha256=AvPFxzJdxC-3PBID3cqdiMyN8FeC7aW9QUgGwbvKuAM,36948
44
43
  utilities/os.py,sha256=D_FyyT-6TtqiN9KSS7c9g1fnUtgxmyMtzAjmYLkk46A,3587
45
44
  utilities/parse.py,sha256=vsZ2jf_ceSI_Kta9titixufysJaVXh0Whjz1T4awJZw,18938
46
- utilities/pathlib.py,sha256=31WPMXdLIyXgYOMMl_HOI2wlo66MGSE-cgeelk-Lias,1410
45
+ utilities/pathlib.py,sha256=0cQpqmZs-Pe2693xZwGFApq-B9mADqhX-pclf_5iLco,3041
47
46
  utilities/period.py,sha256=o4wXYEXVlFomop4-Ra4L0yRP4i99NZFjIe_fa7NdZck,11024
48
47
  utilities/pickle.py,sha256=Bhvd7cZl-zQKQDFjUerqGuSKlHvnW1K2QXeU5UZibtg,657
49
48
  utilities/platform.py,sha256=48IOKx1IC6ZJXWG-b56ZQptITcNFhWRjELW72o2dGTA,2398
@@ -54,21 +53,21 @@ utilities/pqdm.py,sha256=foRytQybmOQ05pjt5LF7ANyzrIa--4ScDE3T2wd31a4,3118
54
53
  utilities/psutil.py,sha256=RtbLKOoIJhqrJmEoHDBVeSD-KPzshtS0FtRXBP9_w2s,3751
55
54
  utilities/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
55
  utilities/pydantic.py,sha256=f6qtR5mO2YMuyvNmbaEj5YeD9eGA4YYfb7Bjzh9jUs0,1845
57
- utilities/pyinstrument.py,sha256=OJFDh4o1CWIa4aYPYURdQjgap_nvP45KUsCEe94rQHY,829
56
+ utilities/pyinstrument.py,sha256=O2dngLsmUUnpMtW1eN3OiM0rGQNBIlXSvZmym_jAsvU,904
58
57
  utilities/pyrsistent.py,sha256=wVOVIe_68AAaa-lUE9y-TEzDawVp1uEIc_zfoDgr5ww,2287
59
58
  utilities/pytest.py,sha256=KoHSwJbIY2CHtFUlUr_gnEk7z1DVTaldl8RDQ4tDkG4,7837
60
- utilities/pytest_regressions.py,sha256=-SVT9647Dg6-JcdsiaDKXe3NdOmmrvGevLKWwGjxq3c,5088
61
- utilities/python_dotenv.py,sha256=iWcnpXbH7S6RoXHiLlGgyuH6udCupAcPd_gQ0eAenQ0,3190
59
+ utilities/pytest_regressions.py,sha256=YI55B7EtLjhz7zPJZ6NK9bWrxrKCKabWZJe1cwcbA5o,5082
60
+ utilities/python_dotenv.py,sha256=edXsvHZhZnYeqfMfrsRRpj7_9eJI6uizh3xLx8Q9B3w,3228
62
61
  utilities/random.py,sha256=lYdjgxB7GCfU_fwFVl5U-BIM_HV3q6_urL9byjrwDM8,4157
63
62
  utilities/re.py,sha256=5J4d8VwIPFVrX2Eb8zfoxImDv7IwiN_U7mJ07wR2Wvs,3958
64
- utilities/redis.py,sha256=EZgqWeoGpvN-BfCQL93F3rYlfB4U_zhzHCBuZpDmKpo,37157
63
+ utilities/redis.py,sha256=7Sc-G43VXVzFQU7MHKyI1y3u7My3oT1UoWXPGcKM_-0,36008
65
64
  utilities/reprlib.py,sha256=ssYTcBW-TeRh3fhCJv57sopTZHF5FrPyyUg9yp5XBlo,3953
66
65
  utilities/scipy.py,sha256=X6ROnHwiUhAmPhM0jkfEh0-Fd9iRvwiqtCQMOLmOQF8,945
67
66
  utilities/sentinel.py,sha256=3jIwgpMekWgDAxPDA_hXMP2St43cPhciKN3LWiZ7kv0,1248
68
67
  utilities/shelve.py,sha256=HZsMwK4tcIfg3sh0gApx4-yjQnrY4o3V3ZRimvRhoW0,738
69
- utilities/slack_sdk.py,sha256=jqQyiYSKseZNdg2lCkvPzrAows9p7kVDDjvEnatioKo,5702
68
+ utilities/slack_sdk.py,sha256=ltmzv68aa73CJGqTDvt8L9vDm22YU9iOCo3NCiNd3vA,4347
70
69
  utilities/socket.py,sha256=K77vfREvzoVTrpYKo6MZakol0EYu2q1sWJnnZqL0So0,118
71
- utilities/sqlalchemy.py,sha256=XTZmNKXD9SUbZ7V1xNNxok-0Ej0Cf4ya5pjuIeH-kdg,39388
70
+ utilities/sqlalchemy.py,sha256=I81qR7JtS-q1sLnw42p7L0FC0imT98gJHGte_KOjpAA,37890
72
71
  utilities/sqlalchemy_polars.py,sha256=s7hQNep2O5DTgIRXyN_JRQma7a4DAtNd25tshaZW8iw,15490
73
72
  utilities/statsmodels.py,sha256=koyiBHvpMcSiBfh99wFUfSggLNx7cuAw3rwyfAhoKpQ,3410
74
73
  utilities/streamlit.py,sha256=U9PJBaKP1IdSykKhPZhIzSPTZsmLsnwbEPZWzNhJPKk,2955
@@ -80,17 +79,17 @@ utilities/text.py,sha256=ymBFlP_cA8OgNnZRVNs7FAh7OG8HxE6YkiLEMZv5g_A,11297
80
79
  utilities/threading.py,sha256=GvBOp4CyhHfN90wGXZuA2VKe9fGzMaEa7oCl4f3nnPU,1009
81
80
  utilities/timer.py,sha256=Rkc49KSpHuC8s7vUxGO9DU55U9I6yDKnchsQqrUCVBs,4075
82
81
  utilities/traceback.py,sha256=Jg7HS3AwQ-W-msdwHp22_PSHZcR54PbmsSf115B6TSM,27435
83
- utilities/types.py,sha256=2f1DqTZTMRlpCPWPd9-rh_uwmRPv9UdBoi_Bfv7Ccmo,18374
82
+ utilities/types.py,sha256=gP04CcCOyFrG7BgblVCsrrChiuO2x842NDVW-GF7odo,18370
84
83
  utilities/typing.py,sha256=H6ysJkI830aRwLsMKz0SZIw4cpcsm7d6KhQOwr-SDh0,13817
85
84
  utilities/tzdata.py,sha256=yCf70NICwAeazN3_JcXhWvRqCy06XJNQ42j7r6gw3HY,1217
86
85
  utilities/tzlocal.py,sha256=3upDNFBvGh1l9njmLR2z2S6K6VxQSb7QizYGUbAH3JU,960
87
86
  utilities/uuid.py,sha256=jJTFxz-CWgltqNuzmythB7iEQ-Q1mCwPevUfKthZT3c,611
88
- utilities/version.py,sha256=QFuyEeQA6jI0ruBEcmhqG36f-etg1AEiD1drBBqhQrs,5358
87
+ utilities/version.py,sha256=ufhJMmI6KPs1-3wBI71aj5wCukd3sP_m11usLe88DNA,5117
89
88
  utilities/warnings.py,sha256=un1LvHv70PU-LLv8RxPVmugTzDJkkGXRMZTE2-fTQHw,1771
90
89
  utilities/whenever.py,sha256=jS31ZAY5OMxFxLja_Yo5Fidi87Pd-GoVZ7Vi_teqVDA,16743
91
90
  utilities/zipfile.py,sha256=24lQc9ATcJxHXBPc_tBDiJk48pWyRrlxO2fIsFxU0A8,699
92
91
  utilities/zoneinfo.py,sha256=-5j7IQ9nb7gR43rdgA7ms05im-XuqhAk9EJnQBXxCoQ,1874
93
- dycw_utilities-0.127.1.dist-info/METADATA,sha256=f5sCvR07FCJWNnW1cZFk3qz78HANmQJ3RNl-qnuG_0Q,12803
94
- dycw_utilities-0.127.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
95
- dycw_utilities-0.127.1.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
96
- dycw_utilities-0.127.1.dist-info/RECORD,,
92
+ dycw_utilities-0.129.0.dist-info/METADATA,sha256=FRizi23CYGCVbYnOw9jcaTlerh4kCxHwLLm-Tpi-XR4,12803
93
+ dycw_utilities-0.129.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
94
+ dycw_utilities-0.129.0.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
95
+ dycw_utilities-0.129.0.dist-info/RECORD,,
utilities/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from __future__ import annotations
2
2
 
3
- __version__ = "0.127.1"
3
+ __version__ = "0.129.0"
utilities/asyncio.py CHANGED
@@ -1,9 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- import datetime as dt
4
- from abc import ABC, abstractmethod
5
3
  from asyncio import (
6
- CancelledError,
7
4
  Event,
8
5
  Lock,
9
6
  PriorityQueue,
@@ -19,7 +16,7 @@ from asyncio import (
19
16
  sleep,
20
17
  timeout,
21
18
  )
22
- from collections.abc import Callable, Hashable, Iterable, Iterator, Mapping
19
+ from collections.abc import Callable, Iterable, Iterator
23
20
  from contextlib import (
24
21
  AbstractAsyncContextManager,
25
22
  AsyncExitStack,
@@ -37,7 +34,6 @@ from typing import (
37
34
  TYPE_CHECKING,
38
35
  Any,
39
36
  Generic,
40
- NoReturn,
41
37
  Self,
42
38
  TextIO,
43
39
  TypeVar,
@@ -50,27 +46,19 @@ from typing_extensions import deprecated
50
46
 
51
47
  from utilities.dataclasses import replace_non_sentinel
52
48
  from utilities.datetime import (
53
- MINUTE,
54
49
  SECOND,
55
50
  datetime_duration_to_float,
56
- datetime_duration_to_timedelta,
57
51
  get_now,
58
52
  round_datetime,
59
53
  )
60
- from utilities.errors import ImpossibleCaseError, repr_error
61
- from utilities.functions import ensure_int, ensure_not_none, get_class_name
54
+ from utilities.errors import repr_error
55
+ from utilities.functions import ensure_int, ensure_not_none
62
56
  from utilities.random import SYSTEM_RANDOM
63
57
  from utilities.sentinel import Sentinel, sentinel
64
- from utilities.types import (
65
- Coroutine1,
66
- DurationOrEveryDuration,
67
- MaybeCallableEvent,
68
- MaybeType,
69
- THashable,
70
- TSupportsRichComparison,
71
- )
58
+ from utilities.types import MaybeCallableEvent, THashable, TSupportsRichComparison
72
59
 
73
60
  if TYPE_CHECKING:
61
+ import datetime as dt
74
62
  from asyncio import _CoroutineLike
75
63
  from asyncio.subprocess import Process
76
64
  from collections import deque
@@ -319,346 +307,6 @@ class EnhancedTaskGroup(TaskGroup):
319
307
  ##
320
308
 
321
309
 
322
- @dataclass(kw_only=True, unsafe_hash=True)
323
- class InfiniteLooper(ABC, Generic[THashable]):
324
- """An infinite loop which can throw exceptions by setting events."""
325
-
326
- sleep_core: DurationOrEveryDuration = field(default=SECOND, repr=False)
327
- sleep_restart: DurationOrEveryDuration = field(default=MINUTE, repr=False)
328
- duration: Duration | None = field(default=None, repr=False)
329
- logger: str | None = field(default=None, repr=False)
330
- _await_upon_aenter: bool = field(default=True, init=False, repr=False)
331
- _depth: int = field(default=0, init=False, repr=False)
332
- _events: Mapping[THashable | None, Event] = field(
333
- default_factory=dict, init=False, repr=False, hash=False
334
- )
335
- _stack: AsyncExitStack = field(
336
- default_factory=AsyncExitStack, init=False, repr=False
337
- )
338
- _task: Task[None] | None = field(default=None, init=False, repr=False)
339
-
340
- def __post_init__(self) -> None:
341
- self._events = {
342
- event: Event() for event, _ in self._yield_events_and_exceptions()
343
- }
344
-
345
- async def __aenter__(self) -> Self:
346
- """Context manager entry."""
347
- if self._depth == 0:
348
- self._task = create_task(self._run_looper())
349
- if self._await_upon_aenter:
350
- with suppress(CancelledError):
351
- await self._task
352
- _ = await self._stack.__aenter__()
353
- self._depth += 1
354
- return self
355
-
356
- async def __aexit__(
357
- self,
358
- exc_type: type[BaseException] | None = None,
359
- exc_value: BaseException | None = None,
360
- traceback: TracebackType | None = None,
361
- ) -> None:
362
- """Context manager exit."""
363
- _ = (exc_type, exc_value, traceback)
364
- self._depth = max(self._depth - 1, 0)
365
- if (self._depth == 0) and (self._task is not None):
366
- with suppress(CancelledError):
367
- await self._task
368
- self._task = None
369
- try:
370
- await self._teardown()
371
- except Exception as error: # noqa: BLE001
372
- self._error_upon_teardown(error)
373
- _ = await self._stack.__aexit__(exc_type, exc_value, traceback)
374
-
375
- async def stop(self) -> None:
376
- """Stop the service."""
377
- if self._task is None:
378
- raise ImpossibleCaseError(case=[f"{self._task=}"]) # pragma: no cover
379
- with suppress(CancelledError):
380
- _ = self._task.cancel()
381
-
382
- async def _run_looper(self) -> None:
383
- """Run the looper."""
384
- match self.duration:
385
- case None:
386
- await self._run_looper_without_timeout()
387
- case int() | float() | dt.timedelta() as duration:
388
- try:
389
- async with timeout_dur(duration=duration):
390
- return await self._run_looper_without_timeout()
391
- except TimeoutError:
392
- await self.stop()
393
- case _ as never:
394
- assert_never(never)
395
- return None
396
-
397
- async def _run_looper_without_timeout(self) -> None:
398
- """Run the looper without a timeout."""
399
- coroutines = list(self._yield_coroutines())
400
- loopers = list(self._yield_loopers())
401
- if (len(coroutines) == 0) and (len(loopers) == 0):
402
- return await self._run_looper_by_itself()
403
- return await self._run_looper_with_others(coroutines, loopers)
404
-
405
- async def _run_looper_by_itself(self) -> None:
406
- """Run the looper by itself."""
407
- whitelisted = tuple(self._yield_whitelisted_errors())
408
- blacklisted = tuple(self._yield_blacklisted_errors())
409
- while True:
410
- try:
411
- self._reset_events()
412
- try:
413
- await self._initialize()
414
- except Exception as error: # noqa: BLE001
415
- self._error_upon_initialize(error)
416
- await self._run_sleep(self.sleep_restart)
417
- else:
418
- while True:
419
- try:
420
- event = next(
421
- key
422
- for (key, value) in self._events.items()
423
- if value.is_set()
424
- )
425
- except StopIteration:
426
- await self._core()
427
- await self._run_sleep(self.sleep_core)
428
- else:
429
- self._raise_error(event)
430
- except InfiniteLooperError:
431
- raise
432
- except BaseException as error1:
433
- match error1:
434
- case Exception():
435
- if isinstance(error1, blacklisted):
436
- raise
437
- case BaseException():
438
- if not isinstance(error1, whitelisted):
439
- raise
440
- case _ as never:
441
- assert_never(never)
442
- self._error_upon_core(error1)
443
- try:
444
- await self._teardown()
445
- except BaseException as error2: # noqa: BLE001
446
- self._error_upon_teardown(error2)
447
- finally:
448
- await self._run_sleep(self.sleep_restart)
449
-
450
- async def _run_looper_with_others(
451
- self,
452
- coroutines: Iterable[Callable[[], Coroutine1[None]]],
453
- loopers: Iterable[InfiniteLooper[Any]],
454
- /,
455
- ) -> None:
456
- """Run multiple loopers."""
457
- while True:
458
- self._reset_events()
459
- try:
460
- async with TaskGroup() as tg, AsyncExitStack() as stack:
461
- _ = tg.create_task(self._run_looper_by_itself())
462
- _ = [tg.create_task(c()) for c in coroutines]
463
- _ = [
464
- tg.create_task(stack.enter_async_context(lo)) for lo in loopers
465
- ]
466
- except ExceptionGroup as error:
467
- self._error_group_upon_others(error)
468
- await self._run_sleep(self.sleep_restart)
469
-
470
- async def _initialize(self) -> None:
471
- """Initialize the loop."""
472
-
473
- async def _core(self) -> None:
474
- """Run the core part of the loop."""
475
-
476
- async def _teardown(self) -> None:
477
- """Tear down the loop."""
478
-
479
- def _error_upon_initialize(self, error: Exception, /) -> None:
480
- """Handle any errors upon initializing the looper."""
481
- if self.logger is not None:
482
- getLogger(name=self.logger).error(
483
- "%r encountered %r whilst initializing; sleeping %s...",
484
- get_class_name(self),
485
- repr_error(error),
486
- self._sleep_restart_desc,
487
- )
488
-
489
- def _error_upon_core(self, error: BaseException, /) -> None:
490
- """Handle any errors upon running the core function."""
491
- if self.logger is not None:
492
- getLogger(name=self.logger).error(
493
- "%r encountered %r; sleeping %s...",
494
- get_class_name(self),
495
- repr_error(error),
496
- self._sleep_restart_desc,
497
- )
498
-
499
- def _error_upon_teardown(self, error: BaseException, /) -> None:
500
- """Handle any errors upon tearing down the looper."""
501
- if self.logger is not None:
502
- getLogger(name=self.logger).error(
503
- "%r encountered %r whilst tearing down; sleeping %s...",
504
- get_class_name(self),
505
- repr_error(error),
506
- self._sleep_restart_desc,
507
- )
508
-
509
- def _error_group_upon_others(self, group: ExceptionGroup, /) -> None:
510
- """Handle any errors upon running the core function."""
511
- if self.logger is not None:
512
- errors = group.exceptions
513
- n = len(errors)
514
- msgs = [f"{get_class_name(self)!r} encountered {n} error(s):"]
515
- msgs.extend(
516
- f"- Error #{i}/{n}: {repr_error(e)}"
517
- for i, e in enumerate(errors, start=1)
518
- )
519
- msgs.append(f"Sleeping {self._sleep_restart_desc}...")
520
- getLogger(name=self.logger).error("\n".join(msgs))
521
-
522
- def _raise_error(self, event: THashable | None, /) -> NoReturn:
523
- """Raise the error corresponding to given event."""
524
- mapping = dict(self._yield_events_and_exceptions())
525
- error = mapping.get(event, InfiniteLooperError)
526
- raise error
527
-
528
- def _reset_events(self) -> None:
529
- """Reset the events."""
530
- self._events = {
531
- event: Event() for event, _ in self._yield_events_and_exceptions()
532
- }
533
-
534
- async def _run_sleep(self, sleep: DurationOrEveryDuration, /) -> None:
535
- """Sleep until the next part of the loop."""
536
- match sleep:
537
- case int() | float() | dt.timedelta() as duration:
538
- await sleep_dur(duration=duration)
539
- case "every", (int() | float() | dt.timedelta()) as duration:
540
- await sleep_until_rounded(duration)
541
- case _ as never:
542
- assert_never(never)
543
-
544
- @property
545
- def _sleep_restart_desc(self) -> str:
546
- """Get a description of the sleep until restart."""
547
- match self.sleep_restart:
548
- case int() | float() | dt.timedelta() as duration:
549
- timedelta = datetime_duration_to_timedelta(duration)
550
- return f"for {timedelta}"
551
- case "every", (int() | float() | dt.timedelta()) as duration:
552
- timedelta = datetime_duration_to_timedelta(duration)
553
- return f"until next {timedelta}"
554
- case _ as never:
555
- assert_never(never)
556
-
557
- def _set_event(self, *, event: THashable | None = None) -> None:
558
- """Set the given event."""
559
- try:
560
- event_obj = self._events[event]
561
- except KeyError:
562
- raise _InfiniteLooperNoSuchEventError(looper=self, event=event) from None
563
- event_obj.set()
564
-
565
- def _yield_events_and_exceptions(
566
- self,
567
- ) -> Iterator[tuple[THashable | None, MaybeType[Exception]]]:
568
- """Yield the events & exceptions."""
569
- yield (None, _InfiniteLooperDefaultEventError(looper=self))
570
-
571
- def _yield_coroutines(self) -> Iterator[Callable[[], Coroutine1[None]]]:
572
- """Yield any other coroutines which must also be run."""
573
- yield from []
574
-
575
- def _yield_loopers(self) -> Iterator[InfiniteLooper[Any]]:
576
- """Yield any other loopers which must also be run."""
577
- yield from []
578
-
579
- def _yield_blacklisted_errors(self) -> Iterator[type[Exception]]:
580
- """Yield any exceptions which the looper ought to catch terminate upon."""
581
- yield from []
582
-
583
- def _yield_whitelisted_errors(self) -> Iterator[type[BaseException]]:
584
- """Yield any exceptions which the looper ought to catch and allow running."""
585
- yield from []
586
-
587
-
588
- @dataclass(kw_only=True, slots=True)
589
- class InfiniteLooperError(Exception):
590
- looper: InfiniteLooper[Any]
591
-
592
-
593
- @dataclass(kw_only=True, slots=True)
594
- class _InfiniteLooperNoSuchEventError(InfiniteLooperError):
595
- event: Hashable
596
-
597
- @override
598
- def __str__(self) -> str:
599
- return f"{get_class_name(self.looper)!r} does not have an event {self.event!r}"
600
-
601
-
602
- @dataclass(kw_only=True, slots=True)
603
- class _InfiniteLooperDefaultEventError(InfiniteLooperError):
604
- @override
605
- def __str__(self) -> str:
606
- return f"{get_class_name(self.looper)!r} default event error"
607
-
608
-
609
- ##
610
-
611
-
612
- @dataclass(kw_only=True)
613
- class InfiniteQueueLooper(InfiniteLooper[THashable], Generic[THashable, _T]):
614
- """An infinite loop which processes a queue."""
615
-
616
- _await_upon_aenter: bool = field(default=False, init=False, repr=False)
617
- _queue: EnhancedQueue[_T] = field(
618
- default_factory=EnhancedQueue, init=False, repr=False
619
- )
620
-
621
- def __len__(self) -> int:
622
- return self._queue.qsize()
623
-
624
- @override
625
- async def _core(self) -> None:
626
- """Run the core part of the loop."""
627
- if self.empty():
628
- return
629
- await self._process_queue()
630
-
631
- @abstractmethod
632
- async def _process_queue(self) -> None:
633
- """Process the queue."""
634
-
635
- def empty(self) -> bool:
636
- """Check if the queue is empty."""
637
- return self._queue.empty()
638
-
639
- def put_left_nowait(self, *items: _T) -> None:
640
- """Put items into the queue at the start without blocking."""
641
- self._queue.put_left_nowait(*items) # pragma: no cover
642
-
643
- def put_right_nowait(self, *items: _T) -> None:
644
- """Put items into the queue at the end without blocking."""
645
- self._queue.put_right_nowait(*items) # pragma: no cover
646
-
647
- def qsize(self) -> int:
648
- """Get the number of items in the queue."""
649
- return self._queue.qsize()
650
-
651
- async def run_until_empty(self, *, stop: bool = False) -> None:
652
- """Run until the queue is empty."""
653
- while not self.empty():
654
- await self._process_queue()
655
- if stop:
656
- await self.stop()
657
-
658
-
659
- ##
660
-
661
-
662
310
  @dataclass(kw_only=True, slots=True)
663
311
  class LooperError(Exception): ...
664
312
 
@@ -1415,9 +1063,6 @@ async def timeout_dur(
1415
1063
  __all__ = [
1416
1064
  "EnhancedQueue",
1417
1065
  "EnhancedTaskGroup",
1418
- "InfiniteLooper",
1419
- "InfiniteLooperError",
1420
- "InfiniteQueueLooper",
1421
1066
  "Looper",
1422
1067
  "LooperError",
1423
1068
  "StreamCommandOutput",
utilities/datetime.py CHANGED
@@ -509,14 +509,6 @@ def get_datetime(*, datetime: MaybeCallableDateTime) -> dt.datetime: ...
509
509
  def get_datetime(*, datetime: None) -> None: ...
510
510
  @overload
511
511
  def get_datetime(*, datetime: Sentinel) -> Sentinel: ...
512
- @overload
513
- def get_datetime(
514
- *, datetime: MaybeCallableDateTime | Sentinel
515
- ) -> dt.datetime | Sentinel: ...
516
- @overload
517
- def get_datetime(
518
- *, datetime: MaybeCallableDateTime | None | Sentinel = sentinel
519
- ) -> dt.datetime | None | Sentinel: ...
520
512
  def get_datetime(
521
513
  *, datetime: MaybeCallableDateTime | None | Sentinel = sentinel
522
514
  ) -> dt.datetime | None | Sentinel:
utilities/fastapi.py CHANGED
@@ -1,15 +1,18 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from asyncio import Task, create_task
3
4
  from dataclasses import InitVar, dataclass, field
4
- from typing import TYPE_CHECKING, Any, Literal, override
5
+ from typing import TYPE_CHECKING, Any, Literal, Self, override
5
6
 
6
7
  from fastapi import FastAPI
7
8
  from uvicorn import Config, Server
8
9
 
9
- from utilities.asyncio import InfiniteLooper
10
+ from utilities.asyncio import Looper
10
11
  from utilities.datetime import SECOND, datetime_duration_to_float
11
12
 
12
13
  if TYPE_CHECKING:
14
+ from types import TracebackType
15
+
13
16
  from utilities.types import Duration
14
17
 
15
18
 
@@ -36,7 +39,7 @@ class _PingerReceiverApp(FastAPI):
36
39
 
37
40
 
38
41
  @dataclass(kw_only=True)
39
- class PingReceiver(InfiniteLooper):
42
+ class PingReceiver(Looper[None]):
40
43
  """A ping receiver."""
41
44
 
42
45
  host: InitVar[str] = _LOCALHOST
@@ -44,12 +47,31 @@ class PingReceiver(InfiniteLooper):
44
47
  _app: _PingerReceiverApp = field(
45
48
  default_factory=_PingerReceiverApp, init=False, repr=False
46
49
  )
47
- _await_upon_aenter: bool = field(default=False, init=False, repr=False)
48
50
  _server: Server = field(init=False, repr=False)
51
+ _server_task: Task[None] | None = field(default=None, init=False, repr=False)
49
52
 
53
+ @override
50
54
  def __post_init__(self, host: str, port: int, /) -> None:
55
+ super().__post_init__() # skipif-ci
51
56
  self._server = Server(Config(self._app, host=host, port=port)) # skipif-ci
52
57
 
58
+ @override
59
+ async def __aenter__(self) -> Self:
60
+ _ = await super().__aenter__() # skipif-ci
61
+ async with self._lock: # skipif-ci
62
+ self._server_task = create_task(self._server.serve())
63
+ return self # skipif-ci
64
+
65
+ @override
66
+ async def __aexit__(
67
+ self,
68
+ exc_type: type[BaseException] | None = None,
69
+ exc_value: BaseException | None = None,
70
+ traceback: TracebackType | None = None,
71
+ ) -> None:
72
+ await super().__aexit__(exc_type, exc_value, traceback) # skipif-ci
73
+ await self._server.shutdown() # skipif-ci
74
+
53
75
  @classmethod
54
76
  async def ping(
55
77
  cls, port: int, /, *, host: str = _LOCALHOST, timeout: Duration = _TIMEOUT
@@ -66,13 +88,5 @@ class PingReceiver(InfiniteLooper):
66
88
  return False
67
89
  return response.text if response.status_code == 200 else False # skipif-ci
68
90
 
69
- @override
70
- async def _initialize(self) -> None:
71
- await self._server.serve() # skipif-ci
72
-
73
- @override
74
- async def _teardown(self) -> None:
75
- await self._server.shutdown() # skipif-ci
76
-
77
91
 
78
92
  __all__ = ["PingReceiver"]
utilities/hypothesis.py CHANGED
@@ -506,13 +506,7 @@ def floats_extra(
506
506
 
507
507
 
508
508
  @composite
509
- def git_repos(
510
- draw: DrawFn,
511
- /,
512
- *,
513
- branch: MaybeSearchStrategy[str | None] = None,
514
- remote: MaybeSearchStrategy[str | None] = None,
515
- ) -> Path:
509
+ def git_repos(draw: DrawFn, /) -> Path:
516
510
  path = draw(temp_paths())
517
511
  with temp_cwd(path):
518
512
  _ = check_call(["git", "init", "-b", "master"])
@@ -525,10 +519,6 @@ def git_repos(
525
519
  _ = check_call(["git", "commit", "-m", "add"])
526
520
  _ = check_call(["git", "rm", file_str])
527
521
  _ = check_call(["git", "commit", "-m", "rm"])
528
- if (branch_ := draw2(draw, branch)) is not None:
529
- _ = check_call(["git", "checkout", "-b", branch_])
530
- if (remote_ := draw2(draw, remote)) is not None:
531
- _ = check_call(["git", "remote", "add", "origin", remote_])
532
522
  return path
533
523
 
534
524
 
utilities/logging.py CHANGED
@@ -46,9 +46,8 @@ from utilities.datetime import (
46
46
  serialize_compact,
47
47
  )
48
48
  from utilities.errors import ImpossibleCaseError
49
- from utilities.git import get_repo_root
50
49
  from utilities.iterables import OneEmptyError, always_iterable, one
51
- from utilities.pathlib import ensure_suffix, resolve_path
50
+ from utilities.pathlib import ensure_suffix, get_path, get_root
52
51
  from utilities.reprlib import (
53
52
  RICH_EXPAND_ALL,
54
53
  RICH_INDENT_SIZE,
@@ -68,9 +67,9 @@ if TYPE_CHECKING:
68
67
  from utilities.types import (
69
68
  LoggerOrName,
70
69
  LogLevel,
70
+ MaybeCallablePathLike,
71
71
  MaybeIterable,
72
72
  PathLike,
73
- PathLikeOrCallable,
74
73
  )
75
74
  from utilities.version import MaybeCallableVersionLike
76
75
 
@@ -383,10 +382,10 @@ class StandaloneFileHandler(Handler):
383
382
 
384
383
  @override
385
384
  def __init__(
386
- self, *, level: int = NOTSET, path: PathLikeOrCallable | None = None
385
+ self, *, level: int = NOTSET, path: MaybeCallablePathLike | None = None
387
386
  ) -> None:
388
387
  super().__init__(level=level)
389
- self._path = path
388
+ self._path = get_path(path=path)
390
389
 
391
390
  @override
392
391
  def emit(self, record: LogRecord) -> None:
@@ -394,10 +393,8 @@ class StandaloneFileHandler(Handler):
394
393
  from utilities.tzlocal import get_now_local
395
394
 
396
395
  try:
397
- path = (
398
- resolve_path(path=self._path)
399
- .joinpath(serialize_compact(get_now_local()))
400
- .with_suffix(".txt")
396
+ path = self._path.joinpath(serialize_compact(get_now_local())).with_suffix(
397
+ ".txt"
401
398
  )
402
399
  formatted = self.format(record)
403
400
  with writer(path, overwrite=True) as temp, temp.open(mode="w") as fh:
@@ -473,7 +470,7 @@ class FilterForKeyError(Exception):
473
470
 
474
471
  def get_default_logging_path() -> Path:
475
472
  """Get the logging default path."""
476
- return get_repo_root().joinpath(".logs")
473
+ return get_root().joinpath(".logs")
477
474
 
478
475
 
479
476
  ##
@@ -520,7 +517,7 @@ def setup_logging(
520
517
  console_level: LogLevel | None = "INFO",
521
518
  console_filters: Iterable[_FilterType] | None = None,
522
519
  console_fmt: str = "❯ {_zoned_datetime_str} | {name}:{funcName}:{lineno} | {message}", # noqa: RUF001
523
- files_dir: PathLikeOrCallable | None = get_default_logging_path,
520
+ files_dir: MaybeCallablePathLike | None = get_default_logging_path,
524
521
  files_when: _When = "D",
525
522
  files_interval: int = 1,
526
523
  files_backup_count: int = 10,
@@ -616,7 +613,7 @@ def setup_logging(
616
613
  logger_use.addHandler(console_high_and_exc_handler)
617
614
 
618
615
  # debug & info
619
- directory = resolve_path(path=files_dir) # skipif-ci-and-windows
616
+ directory = get_path(path=files_dir) # skipif-ci-and-windows
620
617
  levels: list[LogLevel] = ["DEBUG", "INFO"] # skipif-ci-and-windows
621
618
  for level, (subpath, files_or_plain_formatter) in product( # skipif-ci-and-windows
622
619
  levels, [(Path(), files_formatter), (Path("plain"), plain_formatter)]
utilities/pathlib.py CHANGED
@@ -1,15 +1,21 @@
1
1
  from __future__ import annotations
2
2
 
3
- from contextlib import contextmanager
3
+ from collections.abc import Callable
4
+ from contextlib import contextmanager, suppress
5
+ from dataclasses import dataclass
4
6
  from itertools import chain
5
7
  from os import chdir
6
8
  from pathlib import Path
7
- from typing import TYPE_CHECKING
9
+ from re import IGNORECASE, search
10
+ from subprocess import PIPE, CalledProcessError, check_output
11
+ from typing import TYPE_CHECKING, assert_never, overload, override
12
+
13
+ from utilities.sentinel import Sentinel, sentinel
8
14
 
9
15
  if TYPE_CHECKING:
10
16
  from collections.abc import Iterator, Sequence
11
17
 
12
- from utilities.types import PathLike, PathLikeOrCallable
18
+ from utilities.types import MaybeCallablePathLike, PathLike
13
19
 
14
20
  PWD = Path.cwd()
15
21
 
@@ -25,20 +31,73 @@ def ensure_suffix(path: PathLike, suffix: str, /) -> Path:
25
31
  return path.with_name(name)
26
32
 
27
33
 
28
- def list_dir(path: PathLike, /) -> Sequence[Path]:
29
- """List the contents of a directory."""
30
- return sorted(Path(path).iterdir())
34
+ ##
31
35
 
32
36
 
33
- def resolve_path(*, path: PathLikeOrCallable | None = None) -> Path:
34
- """Resolve for a path."""
37
+ @overload
38
+ def get_path(*, path: MaybeCallablePathLike | None) -> Path: ...
39
+ @overload
40
+ def get_path(*, path: Sentinel) -> Sentinel: ...
41
+ def get_path(
42
+ *, path: MaybeCallablePathLike | None | Sentinel = sentinel
43
+ ) -> Path | None | Sentinel:
44
+ """Get the path."""
35
45
  match path:
46
+ case Path() | Sentinel():
47
+ return path
48
+ case str():
49
+ return Path(path)
36
50
  case None:
37
51
  return Path.cwd()
38
- case Path() | str():
39
- return Path(path)
40
- case _:
41
- return Path(path())
52
+ case Callable() as func:
53
+ return get_path(path=func())
54
+ case _ as never:
55
+ assert_never(never)
56
+
57
+
58
+ ##
59
+
60
+
61
+ def get_root(*, path: MaybeCallablePathLike | None = None) -> Path:
62
+ """Get the root of a path."""
63
+ path = get_path(path=path)
64
+ try:
65
+ output = check_output(
66
+ ["git", "rev-parse", "--show-toplevel"], stderr=PIPE, cwd=path, text=True
67
+ )
68
+ except CalledProcessError as error:
69
+ # newer versions of git report "Not a git repository", whilst older
70
+ # versions report "not a git repository"
71
+ if not search("fatal: not a git repository", error.stderr, flags=IGNORECASE):
72
+ raise # pragma: no cover
73
+ else:
74
+ return Path(output.strip("\n"))
75
+ all_paths = list(chain([path], path.parents))
76
+ with suppress(StopIteration):
77
+ return next(
78
+ p for p in all_paths if any(p_i.name == ".envrc" for p_i in p.iterdir())
79
+ )
80
+ raise GetRootError(path=path)
81
+
82
+
83
+ @dataclass(kw_only=True, slots=True)
84
+ class GetRootError(Exception):
85
+ path: PathLike
86
+
87
+ @override
88
+ def __str__(self) -> str:
89
+ return f"Unable to determine root from {str(self.path)!r}"
90
+
91
+
92
+ ##
93
+
94
+
95
+ def list_dir(path: PathLike, /) -> Sequence[Path]:
96
+ """List the contents of a directory."""
97
+ return sorted(Path(path).iterdir())
98
+
99
+
100
+ ##
42
101
 
43
102
 
44
103
  @contextmanager
@@ -52,4 +111,4 @@ def temp_cwd(path: PathLike, /) -> Iterator[None]:
52
111
  chdir(prev)
53
112
 
54
113
 
55
- __all__ = ["ensure_suffix", "list_dir", "resolve_path", "temp_cwd"]
114
+ __all__ = ["PWD", "ensure_suffix", "get_path", "list_dir", "temp_cwd"]
utilities/pyinstrument.py CHANGED
@@ -7,23 +7,25 @@ from typing import TYPE_CHECKING
7
7
  from pyinstrument.profiler import Profiler
8
8
 
9
9
  from utilities.datetime import serialize_compact
10
- from utilities.pathlib import PWD
10
+ from utilities.pathlib import get_path
11
11
  from utilities.tzlocal import get_now_local
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from collections.abc import Iterator
15
15
 
16
- from utilities.types import PathLike
16
+ from utilities.types import MaybeCallablePathLike
17
17
 
18
18
 
19
19
  @contextmanager
20
- def profile(*, path: PathLike = PWD) -> Iterator[None]:
20
+ def profile(*, path: MaybeCallablePathLike | None = Path.cwd) -> Iterator[None]:
21
21
  """Profile the contents of a block."""
22
22
  from utilities.atomicwrites import writer
23
23
 
24
24
  with Profiler() as profiler:
25
25
  yield
26
- filename = Path(path, f"profile__{serialize_compact(get_now_local())}.html")
26
+ filename = get_path(path=path).joinpath(
27
+ f"profile__{serialize_compact(get_now_local())}.html"
28
+ )
27
29
  with writer(filename) as temp, temp.open(mode="w") as fh:
28
30
  _ = fh.write(profiler.output_html())
29
31
 
@@ -10,8 +10,8 @@ from pytest import fixture
10
10
  from pytest_regressions.file_regression import FileRegressionFixture
11
11
 
12
12
  from utilities.functions import ensure_str
13
- from utilities.git import get_repo_root
14
13
  from utilities.operator import is_equal
14
+ from utilities.pathlib import get_root
15
15
  from utilities.pytest import node_id_to_path
16
16
 
17
17
  if TYPE_CHECKING:
@@ -153,7 +153,7 @@ def polars_regression(
153
153
 
154
154
  def _get_path(request: FixtureRequest, /) -> Path:
155
155
  tail = node_id_to_path(request.node.nodeid, head=_PATH_TESTS)
156
- return get_repo_root().joinpath(_PATH_TESTS, "regressions", tail)
156
+ return get_root().joinpath(_PATH_TESTS, "regressions", tail)
157
157
 
158
158
 
159
159
  __all__ = [
@@ -2,29 +2,33 @@ from __future__ import annotations
2
2
 
3
3
  from dataclasses import dataclass
4
4
  from os import environ
5
+ from pathlib import Path
5
6
  from typing import TYPE_CHECKING, override
6
7
 
7
8
  from dotenv import dotenv_values
8
9
 
9
10
  from utilities.dataclasses import _ParseDataClassMissingValuesError, parse_dataclass
10
- from utilities.git import get_repo_root
11
11
  from utilities.iterables import MergeStrMappingsError, merge_str_mappings
12
- from utilities.pathlib import PWD
12
+ from utilities.pathlib import get_root
13
13
  from utilities.reprlib import get_repr
14
14
 
15
15
  if TYPE_CHECKING:
16
16
  from collections.abc import Mapping
17
17
  from collections.abc import Set as AbstractSet
18
- from pathlib import Path
19
18
 
20
- from utilities.types import ParseObjectExtra, PathLike, StrMapping, TDataclass
19
+ from utilities.types import (
20
+ MaybeCallablePathLike,
21
+ ParseObjectExtra,
22
+ StrMapping,
23
+ TDataclass,
24
+ )
21
25
 
22
26
 
23
27
  def load_settings(
24
28
  cls: type[TDataclass],
25
29
  /,
26
30
  *,
27
- cwd: PathLike = PWD,
31
+ path: MaybeCallablePathLike | None = Path.cwd,
28
32
  globalns: StrMapping | None = None,
29
33
  localns: StrMapping | None = None,
30
34
  warn_name_errors: bool = False,
@@ -33,7 +37,7 @@ def load_settings(
33
37
  extra_parsers: ParseObjectExtra | None = None,
34
38
  ) -> TDataclass:
35
39
  """Load a set of settings from the `.env` file."""
36
- path = get_repo_root(cwd=cwd).joinpath(".env")
40
+ path = get_root(path=path).joinpath(".env")
37
41
  if not path.exists():
38
42
  raise _LoadSettingsFileNotFoundError(path=path) from None
39
43
  maybe_values_dotenv = dotenv_values(path)
utilities/redis.py CHANGED
@@ -23,9 +23,8 @@ from typing import (
23
23
  )
24
24
 
25
25
  from redis.asyncio import Redis
26
- from redis.typing import EncodableT
27
26
 
28
- from utilities.asyncio import EnhancedQueue, InfiniteQueueLooper, Looper, timeout_dur
27
+ from utilities.asyncio import EnhancedQueue, Looper, timeout_dur
29
28
  from utilities.contextlib import suppress_super_object_attribute_error
30
29
  from utilities.datetime import (
31
30
  MILLISECOND,
@@ -34,7 +33,7 @@ from utilities.datetime import (
34
33
  datetime_duration_to_timedelta,
35
34
  )
36
35
  from utilities.errors import ImpossibleCaseError
37
- from utilities.functions import ensure_int, get_class_name, identity
36
+ from utilities.functions import ensure_int, identity
38
37
  from utilities.iterables import always_iterable, one
39
38
  from utilities.orjson import deserialize, serialize
40
39
 
@@ -51,10 +50,10 @@ if TYPE_CHECKING:
51
50
 
52
51
  from redis.asyncio import ConnectionPool
53
52
  from redis.asyncio.client import PubSub
54
- from redis.typing import ResponseT
53
+ from redis.typing import EncodableT, ResponseT
55
54
 
56
55
  from utilities.iterables import MaybeIterable
57
- from utilities.types import Duration, MaybeType, TypeLike
56
+ from utilities.types import Duration, TypeLike
58
57
 
59
58
 
60
59
  _K = TypeVar("_K")
@@ -620,42 +619,6 @@ class PublishError(Exception):
620
619
  ##
621
620
 
622
621
 
623
- @dataclass(kw_only=True)
624
- class Publisher(InfiniteQueueLooper[None, tuple[str, EncodableT]]):
625
- """Publish a set of messages to Redis."""
626
-
627
- redis: Redis
628
- serializer: Callable[[Any], EncodableT] | None = None
629
- timeout: Duration = _PUBLISH_TIMEOUT
630
-
631
- @override
632
- async def _process_queue(self) -> None:
633
- for item in self._queue.get_all_nowait(): # skipif-ci-and-not-linux
634
- channel, data = item
635
- _ = await publish(
636
- self.redis,
637
- channel,
638
- data,
639
- serializer=self.serializer,
640
- timeout=self.timeout,
641
- )
642
-
643
- @override
644
- def _yield_events_and_exceptions(
645
- self,
646
- ) -> Iterator[tuple[None, MaybeType[Exception]]]:
647
- yield (None, PublisherError) # skipif-ci-and-not-linux
648
-
649
-
650
- @dataclass(kw_only=True)
651
- class PublisherError(Exception):
652
- publisher: Publisher
653
-
654
- @override
655
- def __str__(self) -> str:
656
- return f"Error running {get_class_name(self.publisher)!r}" # skipif-ci-and-not-linux
657
-
658
-
659
622
  @dataclass(kw_only=True)
660
623
  class PublishService(Looper[tuple[str, _T]]):
661
624
  """Service to publish items to Redis."""
@@ -1095,8 +1058,6 @@ def _deserialize(
1095
1058
  __all__ = [
1096
1059
  "PublishService",
1097
1060
  "PublishServiceMixin",
1098
- "Publisher",
1099
- "PublisherError",
1100
1061
  "RedisHashMapKey",
1101
1062
  "RedisKey",
1102
1063
  "SubscribeService",
utilities/slack_sdk.py CHANGED
@@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any, Self, override
7
7
 
8
8
  from slack_sdk.webhook.async_client import AsyncWebhookClient
9
9
 
10
- from utilities.asyncio import InfiniteQueueLooper, Looper, timeout_dur
10
+ from utilities.asyncio import Looper, timeout_dur
11
11
  from utilities.datetime import MINUTE, SECOND, datetime_duration_to_float
12
12
  from utilities.functools import cache
13
13
  from utilities.math import safe_round
@@ -27,53 +27,10 @@ _TIMEOUT: Duration = MINUTE
27
27
  ##
28
28
 
29
29
 
30
- _SLEEP: Duration = SECOND
31
-
32
-
33
30
  async def _send_adapter(url: str, text: str, /) -> None:
34
31
  await send_to_slack(url, text) # pragma: no cover
35
32
 
36
33
 
37
- @dataclass(init=False, unsafe_hash=True)
38
- class SlackHandler(Handler, InfiniteQueueLooper[None, str]):
39
- """Handler for sending messages to Slack."""
40
-
41
- @override
42
- def __init__(
43
- self,
44
- url: str,
45
- /,
46
- *,
47
- level: int = NOTSET,
48
- sleep_core: Duration = _SLEEP,
49
- sleep_restart: Duration = _SLEEP,
50
- sender: Callable[[str, str], Coroutine1[None]] = _send_adapter,
51
- timeout: Duration = _TIMEOUT,
52
- ) -> None:
53
- InfiniteQueueLooper.__init__(self) # InfiniteQueueLooper first
54
- InfiniteQueueLooper.__post_init__(self)
55
- Handler.__init__(self, level=level) # Handler next
56
- self.url = url
57
- self.sender = sender
58
- self.timeout = timeout
59
- self.sleep_core = sleep_core
60
- self.sleep_restart = sleep_restart
61
-
62
- @override
63
- def emit(self, record: LogRecord) -> None:
64
- try:
65
- self.put_right_nowait(self.format(record))
66
- except Exception: # noqa: BLE001 # pragma: no cover
67
- self.handleError(record)
68
-
69
- @override
70
- async def _process_queue(self) -> None:
71
- messages = self._queue.get_all_nowait()
72
- text = "\n".join(messages)
73
- async with timeout_dur(duration=self.timeout):
74
- await self.sender(self.url, text)
75
-
76
-
77
34
  @dataclass(init=False, unsafe_hash=True)
78
35
  class SlackHandlerService(Handler, Looper[str]):
79
36
  """Service to send messages to Slack."""
@@ -187,4 +144,4 @@ def _get_client(url: str, /, *, timeout: Duration = _TIMEOUT) -> AsyncWebhookCli
187
144
  return AsyncWebhookClient(url, timeout=timeout_use)
188
145
 
189
146
 
190
- __all__ = ["SendToSlackError", "SlackHandler", "SlackHandlerService", "send_to_slack"]
147
+ __all__ = ["SendToSlackError", "SlackHandlerService", "send_to_slack"]
utilities/sqlalchemy.py CHANGED
@@ -57,7 +57,7 @@ from sqlalchemy.orm import (
57
57
  from sqlalchemy.orm.exc import UnmappedClassError
58
58
  from sqlalchemy.pool import NullPool, Pool
59
59
 
60
- from utilities.asyncio import InfiniteQueueLooper, Looper, timeout_dur
60
+ from utilities.asyncio import Looper, timeout_dur
61
61
  from utilities.contextlib import suppress_super_object_attribute_error
62
62
  from utilities.datetime import SECOND
63
63
  from utilities.functions import (
@@ -82,13 +82,7 @@ from utilities.iterables import (
82
82
  )
83
83
  from utilities.reprlib import get_repr
84
84
  from utilities.text import snake_case
85
- from utilities.types import (
86
- Duration,
87
- MaybeIterable,
88
- MaybeType,
89
- StrMapping,
90
- TupleOrStrMapping,
91
- )
85
+ from utilities.types import Duration, MaybeIterable, StrMapping, TupleOrStrMapping
92
86
 
93
87
  _T = TypeVar("_T")
94
88
  type _EngineOrConnectionOrAsync = Engine | Connection | AsyncEngine | AsyncConnection
@@ -610,52 +604,6 @@ class TablenameMixin:
610
604
  ##
611
605
 
612
606
 
613
- @dataclass(kw_only=True)
614
- class Upserter(InfiniteQueueLooper[None, _InsertItem]):
615
- """Upsert a set of items to a database."""
616
-
617
- engine: AsyncEngine
618
- snake: bool = False
619
- selected_or_all: _SelectedOrAll = "selected"
620
- chunk_size_frac: float = CHUNK_SIZE_FRAC
621
- assume_tables_exist: bool = False
622
- timeout_create: Duration | None = None
623
- error_create: type[Exception] = TimeoutError
624
- timeout_insert: Duration | None = None
625
- error_insert: type[Exception] = TimeoutError
626
-
627
- @override
628
- async def _process_queue(self) -> None:
629
- items = self._queue.get_all_nowait()
630
- await upsert_items(
631
- self.engine,
632
- *items,
633
- snake=self.snake,
634
- selected_or_all=self.selected_or_all,
635
- chunk_size_frac=self.chunk_size_frac,
636
- assume_tables_exist=self.assume_tables_exist,
637
- timeout_create=self.timeout_create,
638
- error_create=self.error_create,
639
- timeout_insert=self.timeout_insert,
640
- error_insert=self.error_insert,
641
- )
642
-
643
- @override
644
- def _yield_events_and_exceptions(
645
- self,
646
- ) -> Iterator[tuple[None, MaybeType[Exception]]]:
647
- yield (None, UpserterError)
648
-
649
-
650
- @dataclass(kw_only=True)
651
- class UpserterError(Exception):
652
- upserter: Upserter
653
-
654
- @override
655
- def __str__(self) -> str:
656
- return f"Error running {get_class_name(self.upserter)!r}"
657
-
658
-
659
607
  @dataclass(kw_only=True)
660
608
  class UpsertService(Looper[_InsertItem]):
661
609
  """Service to upsert items to a database."""
@@ -1202,8 +1150,6 @@ __all__ = [
1202
1150
  "UpsertItemsError",
1203
1151
  "UpsertService",
1204
1152
  "UpsertServiceMixin",
1205
- "Upserter",
1206
- "UpserterError",
1207
1153
  "check_engine",
1208
1154
  "columnwise_max",
1209
1155
  "columnwise_min",
utilities/types.py CHANGED
@@ -241,8 +241,8 @@ type SerializeObjectExtra = Mapping[Any, Callable[[Any], str]]
241
241
 
242
242
 
243
243
  # pathlib
244
+ type MaybeCallablePathLike = MaybeCallable[PathLike]
244
245
  type PathLike = MaybeStr[Path]
245
- type PathLikeOrCallable = PathLike | Callable[[], PathLike]
246
246
 
247
247
 
248
248
  # random
@@ -282,6 +282,7 @@ __all__ = [
282
282
  "MaybeCallableDate",
283
283
  "MaybeCallableDateTime",
284
284
  "MaybeCallableEvent",
285
+ "MaybeCallablePathLike",
285
286
  "MaybeCoroutine1",
286
287
  "MaybeIterable",
287
288
  "MaybeIterableHashable",
@@ -293,7 +294,6 @@ __all__ = [
293
294
  "Parallelism",
294
295
  "ParseObjectExtra",
295
296
  "PathLike",
296
- "PathLikeOrCallable",
297
297
  "RoundMode",
298
298
  "Seed",
299
299
  "SerializeObjectExtra",
utilities/version.py CHANGED
@@ -137,14 +137,6 @@ def get_version(*, version: MaybeCallableVersionLike) -> Version: ...
137
137
  def get_version(*, version: None) -> None: ...
138
138
  @overload
139
139
  def get_version(*, version: Sentinel) -> Sentinel: ...
140
- @overload
141
- def get_version(
142
- *, version: MaybeCallableVersionLike | Sentinel
143
- ) -> Version | Sentinel: ...
144
- @overload
145
- def get_version(
146
- *, version: MaybeCallableVersionLike | None | Sentinel = sentinel
147
- ) -> Version | None | Sentinel: ...
148
140
  def get_version(
149
141
  *, version: MaybeCallableVersionLike | None | Sentinel = sentinel
150
142
  ) -> Version | None | Sentinel:
utilities/git.py DELETED
@@ -1,93 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import dataclass
4
- from pathlib import Path
5
- from re import IGNORECASE, search
6
- from subprocess import PIPE, CalledProcessError, check_call, check_output
7
- from typing import TYPE_CHECKING, override
8
-
9
- from utilities.pathlib import PWD
10
-
11
- if TYPE_CHECKING:
12
- from utilities.types import PathLike
13
-
14
-
15
- def fetch_all_tags(*, cwd: PathLike = PWD) -> None:
16
- """Fetch the tags."""
17
- _ = check_call(["git", "fetch", "--all", "--tags"], cwd=cwd)
18
-
19
-
20
- ##
21
-
22
-
23
- def get_branch_name(*, cwd: PathLike = PWD) -> str:
24
- """Get the current branch name."""
25
- output = check_output(
26
- _GIT_REV_PARSE_ABBREV_REV_HEAD, stderr=PIPE, cwd=cwd, text=True
27
- )
28
- return output.strip("\n")
29
-
30
-
31
- _GIT_REV_PARSE_ABBREV_REV_HEAD = ["git", "rev-parse", "--abbrev-ref", "HEAD"]
32
-
33
-
34
- ##
35
-
36
-
37
- def get_ref_tags(ref: str, /, *, cwd: PathLike = PWD) -> list[str]:
38
- """Get the tags of a reference."""
39
- output = check_output([*_GIT_TAG_POINTS_AT, ref], stderr=PIPE, cwd=cwd, text=True)
40
- return output.strip("\n").splitlines()
41
-
42
-
43
- _GIT_TAG_POINTS_AT = ["git", "tag", "--points-at"]
44
-
45
-
46
- ##
47
-
48
-
49
- def get_repo_name(*, cwd: PathLike = PWD) -> str:
50
- """Get the repo name."""
51
- output = check_output(_GIT_REMOTE_GET_URL_ORIGIN, stderr=PIPE, cwd=cwd, text=True)
52
- return Path(output.strip("\n")).stem # not valid_path
53
-
54
-
55
- _GIT_REMOTE_GET_URL_ORIGIN = ["git", "remote", "get-url", "origin"]
56
-
57
-
58
- ##
59
-
60
-
61
- def get_repo_root(*, cwd: PathLike = PWD) -> Path:
62
- """Get the repo root."""
63
- try:
64
- output = check_output(
65
- ["git", "rev-parse", "--show-toplevel"], stderr=PIPE, cwd=cwd, text=True
66
- )
67
- except CalledProcessError as error:
68
- # newer versions of git report "Not a git repository", whilst older
69
- # versions report "not a git repository"
70
- if search("fatal: not a git repository", error.stderr, flags=IGNORECASE):
71
- raise GetRepoRootError(cwd=cwd) from error
72
- raise # pragma: no cover
73
- else:
74
- return Path(output.strip("\n"))
75
-
76
-
77
- @dataclass(kw_only=True, slots=True)
78
- class GetRepoRootError(Exception):
79
- cwd: PathLike
80
-
81
- @override
82
- def __str__(self) -> str:
83
- return f"Path is not part of a `git` repository: {self.cwd}"
84
-
85
-
86
- __all__ = [
87
- "GetRepoRootError",
88
- "fetch_all_tags",
89
- "get_branch_name",
90
- "get_ref_tags",
91
- "get_repo_name",
92
- "get_repo_root",
93
- ]