dycw-utilities 0.117.1__py3-none-any.whl → 0.119.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dycw_utilities-0.117.1.dist-info → dycw_utilities-0.119.0.dist-info}/METADATA +27 -27
- {dycw_utilities-0.117.1.dist-info → dycw_utilities-0.119.0.dist-info}/RECORD +17 -17
- utilities/__init__.py +1 -1
- utilities/asyncio.py +2 -224
- utilities/click.py +19 -19
- utilities/datetime.py +5 -5
- utilities/fastapi.py +3 -8
- utilities/hypothesis.py +44 -44
- utilities/orjson.py +2 -2
- utilities/period.py +2 -2
- utilities/redis.py +1 -18
- utilities/slack_sdk.py +2 -68
- utilities/sqlalchemy.py +1 -44
- utilities/whenever.py +66 -104
- utilities/zoneinfo.py +3 -3
- {dycw_utilities-0.117.1.dist-info → dycw_utilities-0.119.0.dist-info}/WHEEL +0 -0
- {dycw_utilities-0.117.1.dist-info → dycw_utilities-0.119.0.dist-info}/licenses/LICENSE +0 -0
utilities/datetime.py
CHANGED
@@ -457,7 +457,7 @@ class EnsureMonthError(Exception):
|
|
457
457
|
|
458
458
|
|
459
459
|
def format_datetime_local_and_utc(datetime: dt.datetime, /) -> str:
|
460
|
-
"""Format a
|
460
|
+
"""Format a plain datetime locally & in UTC."""
|
461
461
|
time_zone = ensure_time_zone(datetime)
|
462
462
|
if time_zone is UTC:
|
463
463
|
return datetime.strftime("%Y-%m-%d %H:%M:%S (%a, UTC)")
|
@@ -706,8 +706,8 @@ def is_integral_timedelta(duration: Duration, /) -> bool:
|
|
706
706
|
##
|
707
707
|
|
708
708
|
|
709
|
-
def
|
710
|
-
"""Check if an object is a
|
709
|
+
def is_plain_datetime(obj: Any, /) -> TypeGuard[dt.datetime]:
|
710
|
+
"""Check if an object is a plain datetime."""
|
711
711
|
return isinstance(obj, dt.datetime) and (obj.tzinfo is None)
|
712
712
|
|
713
713
|
|
@@ -1105,7 +1105,7 @@ class SerializeCompactError(Exception):
|
|
1105
1105
|
|
1106
1106
|
@override
|
1107
1107
|
def __str__(self) -> str:
|
1108
|
-
return f"Unable to serialize
|
1108
|
+
return f"Unable to serialize plain datetime {self.datetime}"
|
1109
1109
|
|
1110
1110
|
|
1111
1111
|
def parse_date_compact(text: str, /) -> dt.date:
|
@@ -1388,7 +1388,7 @@ __all__ = [
|
|
1388
1388
|
"get_today",
|
1389
1389
|
"get_years",
|
1390
1390
|
"is_integral_timedelta",
|
1391
|
-
"
|
1391
|
+
"is_plain_datetime",
|
1392
1392
|
"is_weekday",
|
1393
1393
|
"is_zero_time",
|
1394
1394
|
"is_zoned_datetime",
|
utilities/fastapi.py
CHANGED
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any, Literal, override
|
|
6
6
|
from fastapi import FastAPI
|
7
7
|
from uvicorn import Config, Server
|
8
8
|
|
9
|
-
from utilities.asyncio import
|
9
|
+
from utilities.asyncio import InfiniteLooper
|
10
10
|
from utilities.datetime import SECOND, datetime_duration_to_float
|
11
11
|
|
12
12
|
if TYPE_CHECKING:
|
@@ -36,7 +36,7 @@ class _PingerReceiverApp(FastAPI):
|
|
36
36
|
|
37
37
|
|
38
38
|
@dataclass(kw_only=True)
|
39
|
-
class PingReceiver(
|
39
|
+
class PingReceiver(InfiniteLooper):
|
40
40
|
"""A ping receiver."""
|
41
41
|
|
42
42
|
host: InitVar[str] = _LOCALHOST
|
@@ -67,13 +67,8 @@ class PingReceiver(AsyncService):
|
|
67
67
|
return response.text if response.status_code == 200 else False # skipif-ci
|
68
68
|
|
69
69
|
@override
|
70
|
-
async def
|
70
|
+
async def _initialize(self) -> None:
|
71
71
|
await self._server.serve() # skipif-ci
|
72
72
|
|
73
|
-
@override
|
74
|
-
async def stop(self) -> None:
|
75
|
-
await self._server.shutdown() # skipif-ci
|
76
|
-
await super().stop() # skipif-ci
|
77
|
-
|
78
73
|
|
79
74
|
__all__ = ["PingReceiver"]
|
utilities/hypothesis.py
CHANGED
@@ -642,43 +642,6 @@ def lists_fixed_length(
|
|
642
642
|
##
|
643
643
|
|
644
644
|
|
645
|
-
@composite
|
646
|
-
def local_datetimes(
|
647
|
-
draw: DrawFn,
|
648
|
-
/,
|
649
|
-
*,
|
650
|
-
min_value: MaybeSearchStrategy[dt.datetime] = DATETIME_MIN_NAIVE,
|
651
|
-
max_value: MaybeSearchStrategy[dt.datetime] = DATETIME_MAX_NAIVE,
|
652
|
-
round_: RoundMode | None = None,
|
653
|
-
timedelta: dt.timedelta | None = None,
|
654
|
-
rel_tol: float | None = None,
|
655
|
-
abs_tol: float | None = None,
|
656
|
-
) -> dt.datetime:
|
657
|
-
"""Strategy for generating local datetimes."""
|
658
|
-
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
659
|
-
datetime = draw(datetimes(min_value=min_value_, max_value=max_value_))
|
660
|
-
if round_ is not None:
|
661
|
-
if timedelta is None:
|
662
|
-
raise LocalDateTimesError(round_=round_)
|
663
|
-
datetime = round_datetime(
|
664
|
-
datetime, timedelta, mode=round_, rel_tol=rel_tol, abs_tol=abs_tol
|
665
|
-
)
|
666
|
-
_ = assume(min_value_ <= datetime <= max_value_)
|
667
|
-
return datetime
|
668
|
-
|
669
|
-
|
670
|
-
@dataclass(kw_only=True, slots=True)
|
671
|
-
class LocalDateTimesError(Exception):
|
672
|
-
round_: RoundMode
|
673
|
-
|
674
|
-
@override
|
675
|
-
def __str__(self) -> str:
|
676
|
-
return "Rounding requires a timedelta; got None"
|
677
|
-
|
678
|
-
|
679
|
-
##
|
680
|
-
|
681
|
-
|
682
645
|
@composite
|
683
646
|
def min_and_max_datetimes(
|
684
647
|
draw: DrawFn,
|
@@ -991,6 +954,43 @@ def paths() -> SearchStrategy[Path]:
|
|
991
954
|
##
|
992
955
|
|
993
956
|
|
957
|
+
@composite
|
958
|
+
def plain_datetimes(
|
959
|
+
draw: DrawFn,
|
960
|
+
/,
|
961
|
+
*,
|
962
|
+
min_value: MaybeSearchStrategy[dt.datetime] = DATETIME_MIN_NAIVE,
|
963
|
+
max_value: MaybeSearchStrategy[dt.datetime] = DATETIME_MAX_NAIVE,
|
964
|
+
round_: RoundMode | None = None,
|
965
|
+
timedelta: dt.timedelta | None = None,
|
966
|
+
rel_tol: float | None = None,
|
967
|
+
abs_tol: float | None = None,
|
968
|
+
) -> dt.datetime:
|
969
|
+
"""Strategy for generating plain datetimes."""
|
970
|
+
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
971
|
+
datetime = draw(datetimes(min_value=min_value_, max_value=max_value_))
|
972
|
+
if round_ is not None:
|
973
|
+
if timedelta is None:
|
974
|
+
raise PlainDateTimesError(round_=round_)
|
975
|
+
datetime = round_datetime(
|
976
|
+
datetime, timedelta, mode=round_, rel_tol=rel_tol, abs_tol=abs_tol
|
977
|
+
)
|
978
|
+
_ = assume(min_value_ <= datetime <= max_value_)
|
979
|
+
return datetime
|
980
|
+
|
981
|
+
|
982
|
+
@dataclass(kw_only=True, slots=True)
|
983
|
+
class PlainDateTimesError(Exception):
|
984
|
+
round_: RoundMode
|
985
|
+
|
986
|
+
@override
|
987
|
+
def __str__(self) -> str:
|
988
|
+
return "Rounding requires a timedelta; got None"
|
989
|
+
|
990
|
+
|
991
|
+
##
|
992
|
+
|
993
|
+
|
994
994
|
@composite
|
995
995
|
def random_states(
|
996
996
|
draw: DrawFn, /, *, seed: MaybeSearchStrategy[int | None] = None
|
@@ -1478,7 +1478,7 @@ def zoned_datetimes(
|
|
1478
1478
|
) -> dt.datetime:
|
1479
1479
|
"""Strategy for generating zoned datetimes."""
|
1480
1480
|
from utilities.whenever import (
|
1481
|
-
|
1481
|
+
CheckValidZonedDateTimeError,
|
1482
1482
|
check_valid_zoned_datetime,
|
1483
1483
|
)
|
1484
1484
|
|
@@ -1496,7 +1496,7 @@ def zoned_datetimes(
|
|
1496
1496
|
max_value_ = max_value_.astimezone(time_zone_)
|
1497
1497
|
try:
|
1498
1498
|
datetime = draw(
|
1499
|
-
|
1499
|
+
plain_datetimes(
|
1500
1500
|
min_value=min_value_.replace(tzinfo=None),
|
1501
1501
|
max_value=max_value_.replace(tzinfo=None),
|
1502
1502
|
round_=round_,
|
@@ -1505,13 +1505,13 @@ def zoned_datetimes(
|
|
1505
1505
|
abs_tol=abs_tol,
|
1506
1506
|
)
|
1507
1507
|
)
|
1508
|
-
except
|
1508
|
+
except PlainDateTimesError as error:
|
1509
1509
|
raise ZonedDateTimesError(round_=error.round_) from None
|
1510
1510
|
datetime = datetime.replace(tzinfo=time_zone_)
|
1511
1511
|
_ = assume(min_value_ <= datetime <= max_value_)
|
1512
1512
|
if valid:
|
1513
1513
|
with assume_does_not_raise( # skipif-ci-and-windows
|
1514
|
-
|
1514
|
+
CheckValidZonedDateTimeError
|
1515
1515
|
):
|
1516
1516
|
check_valid_zoned_datetime(datetime)
|
1517
1517
|
return datetime
|
@@ -1528,8 +1528,8 @@ class ZonedDateTimesError(Exception):
|
|
1528
1528
|
|
1529
1529
|
__all__ = [
|
1530
1530
|
"Draw2Error",
|
1531
|
-
"LocalDateTimesError",
|
1532
1531
|
"MaybeSearchStrategy",
|
1532
|
+
"PlainDateTimesError",
|
1533
1533
|
"Shape",
|
1534
1534
|
"ZonedDateTimesError",
|
1535
1535
|
"assume_does_not_raise",
|
@@ -1548,8 +1548,6 @@ __all__ = [
|
|
1548
1548
|
"int64s",
|
1549
1549
|
"int_arrays",
|
1550
1550
|
"lists_fixed_length",
|
1551
|
-
"local_datetimes",
|
1552
|
-
"local_datetimes",
|
1553
1551
|
"min_and_max_datetimes",
|
1554
1552
|
"min_and_maybe_max_datetimes",
|
1555
1553
|
"min_and_maybe_max_sizes",
|
@@ -1559,6 +1557,8 @@ __all__ = [
|
|
1559
1557
|
"numbers",
|
1560
1558
|
"pairs",
|
1561
1559
|
"paths",
|
1560
|
+
"plain_datetimes",
|
1561
|
+
"plain_datetimes",
|
1562
1562
|
"random_states",
|
1563
1563
|
"sentinels",
|
1564
1564
|
"sets_fixed_length",
|
utilities/orjson.py
CHANGED
@@ -48,7 +48,7 @@ from utilities.uuid import UUID_PATTERN
|
|
48
48
|
from utilities.version import Version, parse_version
|
49
49
|
from utilities.whenever import (
|
50
50
|
parse_date,
|
51
|
-
|
51
|
+
parse_plain_datetime,
|
52
52
|
parse_time,
|
53
53
|
parse_timedelta,
|
54
54
|
parse_zoned_datetime,
|
@@ -424,7 +424,7 @@ def _object_hook(
|
|
424
424
|
if match := _FLOAT_PATTERN.search(text):
|
425
425
|
return float(match.group(1))
|
426
426
|
if match := _LOCAL_DATETIME_PATTERN.search(text):
|
427
|
-
return
|
427
|
+
return parse_plain_datetime(match.group(1))
|
428
428
|
if match := _PATH_PATTERN.search(text):
|
429
429
|
return Path(match.group(1))
|
430
430
|
if match := _TIME_PATTERN.search(text):
|
utilities/period.py
CHANGED
@@ -23,7 +23,7 @@ from utilities.sentinel import Sentinel, sentinel
|
|
23
23
|
from utilities.typing import is_instance_gen
|
24
24
|
from utilities.whenever import (
|
25
25
|
serialize_date,
|
26
|
-
|
26
|
+
serialize_plain_datetime,
|
27
27
|
serialize_zoned_datetime,
|
28
28
|
)
|
29
29
|
from utilities.zoneinfo import EnsureTimeZoneError, ensure_time_zone
|
@@ -137,7 +137,7 @@ class Period(Generic[_TPeriod]):
|
|
137
137
|
)
|
138
138
|
return f"{cls}({start}, {end})"
|
139
139
|
start, end = (
|
140
|
-
|
140
|
+
serialize_plain_datetime(t.replace(tzinfo=None))
|
141
141
|
for t in [result.start, result.end]
|
142
142
|
)
|
143
143
|
return f"{cls}({start}, {end}, {time_zone})"
|
utilities/redis.py
CHANGED
@@ -22,7 +22,7 @@ from redis.asyncio import Redis
|
|
22
22
|
from redis.asyncio.client import PubSub
|
23
23
|
from redis.typing import EncodableT
|
24
24
|
|
25
|
-
from utilities.asyncio import InfiniteQueueLooper,
|
25
|
+
from utilities.asyncio import InfiniteQueueLooper, timeout_dur
|
26
26
|
from utilities.datetime import (
|
27
27
|
MILLISECOND,
|
28
28
|
SECOND,
|
@@ -588,22 +588,6 @@ async def publish(
|
|
588
588
|
##
|
589
589
|
|
590
590
|
|
591
|
-
@dataclass(kw_only=True)
|
592
|
-
class Publisher(QueueProcessor[tuple[str, EncodableT]]):
|
593
|
-
"""Publish a set of messages to Redis."""
|
594
|
-
|
595
|
-
redis: Redis
|
596
|
-
serializer: Callable[[Any], EncodableT] | None = None
|
597
|
-
timeout: Duration = _PUBLISH_TIMEOUT
|
598
|
-
|
599
|
-
@override
|
600
|
-
async def _process_item(self, item: tuple[str, EncodableT], /) -> None:
|
601
|
-
channel, data = item # skipif-ci-and-not-linux
|
602
|
-
_ = await publish( # skipif-ci-and-not-linux
|
603
|
-
self.redis, channel, data, serializer=self.serializer, timeout=self.timeout
|
604
|
-
)
|
605
|
-
|
606
|
-
|
607
591
|
@dataclass(kw_only=True)
|
608
592
|
class PublisherIQL(InfiniteQueueLooper[None, tuple[str, EncodableT]]):
|
609
593
|
"""Publish a set of messages to Redis."""
|
@@ -828,7 +812,6 @@ _ = _TestRedis
|
|
828
812
|
|
829
813
|
|
830
814
|
__all__ = [
|
831
|
-
"Publisher",
|
832
815
|
"PublisherIQL",
|
833
816
|
"PublisherIQLError",
|
834
817
|
"RedisHashMapKey",
|
utilities/slack_sdk.py
CHANGED
@@ -3,18 +3,12 @@ from __future__ import annotations
|
|
3
3
|
from asyncio import Queue
|
4
4
|
from dataclasses import dataclass
|
5
5
|
from http import HTTPStatus
|
6
|
-
from itertools import chain
|
7
6
|
from logging import NOTSET, Handler, LogRecord
|
8
7
|
from typing import TYPE_CHECKING, override
|
9
8
|
|
10
9
|
from slack_sdk.webhook.async_client import AsyncWebhookClient
|
11
10
|
|
12
|
-
from utilities.asyncio import
|
13
|
-
InfiniteQueueLooper,
|
14
|
-
QueueProcessor,
|
15
|
-
sleep_dur,
|
16
|
-
timeout_dur,
|
17
|
-
)
|
11
|
+
from utilities.asyncio import InfiniteQueueLooper, timeout_dur
|
18
12
|
from utilities.datetime import MINUTE, SECOND, datetime_duration_to_float
|
19
13
|
from utilities.functools import cache
|
20
14
|
from utilities.math import safe_round
|
@@ -40,66 +34,6 @@ async def _send_adapter(url: str, text: str, /) -> None:
|
|
40
34
|
await send_to_slack(url, text) # pragma: no cover
|
41
35
|
|
42
36
|
|
43
|
-
@dataclass(init=False, order=True, unsafe_hash=True)
|
44
|
-
class SlackHandler(Handler, QueueProcessor[str]):
|
45
|
-
"""Handler for sending messages to Slack."""
|
46
|
-
|
47
|
-
@override
|
48
|
-
def __init__(
|
49
|
-
self,
|
50
|
-
url: str,
|
51
|
-
/,
|
52
|
-
*,
|
53
|
-
level: int = NOTSET,
|
54
|
-
queue_type: type[Queue[str]] = Queue,
|
55
|
-
queue_max_size: int | None = None,
|
56
|
-
sender: Callable[[str, str], Coroutine1[None]] = _send_adapter,
|
57
|
-
timeout: Duration = _TIMEOUT,
|
58
|
-
callback_failure: Callable[[str, Exception], None] | None = None,
|
59
|
-
callback_success: Callable[[str], None] | None = None,
|
60
|
-
callback_final: Callable[[str], None] | None = None,
|
61
|
-
sleep: Duration = _SLEEP,
|
62
|
-
) -> None:
|
63
|
-
QueueProcessor.__init__( # QueueProcessor first
|
64
|
-
self, queue_type=queue_type, queue_max_size=queue_max_size
|
65
|
-
)
|
66
|
-
QueueProcessor.__post_init__(self)
|
67
|
-
Handler.__init__(self, level=level)
|
68
|
-
self.url = url
|
69
|
-
self.sender = sender
|
70
|
-
self.timeout = timeout
|
71
|
-
self.callback_failure = callback_failure
|
72
|
-
self.callback_success = callback_success
|
73
|
-
self.callback_final = callback_final
|
74
|
-
self.sleep = sleep
|
75
|
-
|
76
|
-
@override
|
77
|
-
def emit(self, record: LogRecord) -> None:
|
78
|
-
try:
|
79
|
-
self.enqueue(self.format(record))
|
80
|
-
except Exception: # noqa: BLE001 # pragma: no cover
|
81
|
-
self.handleError(record)
|
82
|
-
|
83
|
-
@override
|
84
|
-
async def _process_item(self, item: str, /) -> None:
|
85
|
-
"""Process the first item."""
|
86
|
-
items = list(chain([item], self._get_items_nowait()))
|
87
|
-
text = "\n".join(items)
|
88
|
-
try:
|
89
|
-
async with timeout_dur(duration=self.timeout):
|
90
|
-
await self.sender(self.url, text)
|
91
|
-
except Exception as error: # noqa: BLE001
|
92
|
-
if self.callback_failure is not None:
|
93
|
-
self.callback_failure(text, error)
|
94
|
-
else:
|
95
|
-
if self.callback_success is not None:
|
96
|
-
self.callback_success(text)
|
97
|
-
finally:
|
98
|
-
if self.callback_final is not None:
|
99
|
-
self.callback_final(text)
|
100
|
-
await sleep_dur(duration=self.sleep)
|
101
|
-
|
102
|
-
|
103
37
|
@dataclass(init=False, unsafe_hash=True)
|
104
38
|
class SlackHandlerIQL(Handler, InfiniteQueueLooper[None, str]):
|
105
39
|
"""Handler for sending messages to Slack."""
|
@@ -176,4 +110,4 @@ def _get_client(url: str, /, *, timeout: Duration = _TIMEOUT) -> AsyncWebhookCli
|
|
176
110
|
return AsyncWebhookClient(url, timeout=timeout_use)
|
177
111
|
|
178
112
|
|
179
|
-
__all__ = ["SendToSlackError", "
|
113
|
+
__all__ = ["SendToSlackError", "SlackHandlerIQL", "send_to_slack"]
|
utilities/sqlalchemy.py
CHANGED
@@ -57,7 +57,7 @@ from sqlalchemy.orm import (
|
|
57
57
|
from sqlalchemy.orm.exc import UnmappedClassError
|
58
58
|
from sqlalchemy.pool import NullPool, Pool
|
59
59
|
|
60
|
-
from utilities.asyncio import InfiniteQueueLooper,
|
60
|
+
from utilities.asyncio import InfiniteQueueLooper, timeout_dur
|
61
61
|
from utilities.functions import (
|
62
62
|
ensure_str,
|
63
63
|
get_class_name,
|
@@ -608,48 +608,6 @@ class TablenameMixin:
|
|
608
608
|
##
|
609
609
|
|
610
610
|
|
611
|
-
@dataclass(kw_only=True)
|
612
|
-
class Upserter(QueueProcessor[_InsertItem]):
|
613
|
-
"""Upsert a set of items into a database."""
|
614
|
-
|
615
|
-
engine: AsyncEngine
|
616
|
-
snake: bool = False
|
617
|
-
selected_or_all: _SelectedOrAll = "selected"
|
618
|
-
chunk_size_frac: float = CHUNK_SIZE_FRAC
|
619
|
-
assume_tables_exist: bool = False
|
620
|
-
timeout_create: Duration | None = None
|
621
|
-
error_create: type[Exception] = TimeoutError
|
622
|
-
timeout_insert: Duration | None = None
|
623
|
-
error_insert: type[Exception] = TimeoutError
|
624
|
-
|
625
|
-
async def _pre_upsert(self, items: Sequence[_InsertItem], /) -> None:
|
626
|
-
"""Pre-upsert coroutine."""
|
627
|
-
_ = items
|
628
|
-
|
629
|
-
async def _post_upsert(self, items: Sequence[_InsertItem], /) -> None:
|
630
|
-
"""Post-upsert coroutine."""
|
631
|
-
_ = items
|
632
|
-
|
633
|
-
@override
|
634
|
-
async def _process_item(self, item: _InsertItem, /) -> None:
|
635
|
-
"""Process the first item."""
|
636
|
-
items = list(chain([item], self._get_items_nowait()))
|
637
|
-
await self._pre_upsert(items)
|
638
|
-
await upsert_items(
|
639
|
-
self.engine,
|
640
|
-
*items,
|
641
|
-
snake=self.snake,
|
642
|
-
selected_or_all=self.selected_or_all,
|
643
|
-
chunk_size_frac=self.chunk_size_frac,
|
644
|
-
assume_tables_exist=self.assume_tables_exist,
|
645
|
-
timeout_create=self.timeout_create,
|
646
|
-
error_create=self.error_create,
|
647
|
-
timeout_insert=self.timeout_insert,
|
648
|
-
error_insert=self.error_insert,
|
649
|
-
)
|
650
|
-
await self._post_upsert(items)
|
651
|
-
|
652
|
-
|
653
611
|
@dataclass(kw_only=True)
|
654
612
|
class UpserterIQL(InfiniteQueueLooper[None, _InsertItem]):
|
655
613
|
"""Upsert a set of items to a database."""
|
@@ -1150,7 +1108,6 @@ __all__ = [
|
|
1150
1108
|
"InsertItemsError",
|
1151
1109
|
"TablenameMixin",
|
1152
1110
|
"UpsertItemsError",
|
1153
|
-
"Upserter",
|
1154
1111
|
"UpserterIQL",
|
1155
1112
|
"UpserterIQLError",
|
1156
1113
|
"check_engine",
|