dycw-utilities 0.136.2__py3-none-any.whl → 0.136.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dycw_utilities-0.136.2.dist-info → dycw_utilities-0.136.4.dist-info}/METADATA +1 -1
- {dycw_utilities-0.136.2.dist-info → dycw_utilities-0.136.4.dist-info}/RECORD +6 -7
- utilities/__init__.py +1 -1
- utilities/asyncio.py +13 -3
- utilities/arq.py +0 -216
- {dycw_utilities-0.136.2.dist-info → dycw_utilities-0.136.4.dist-info}/WHEEL +0 -0
- {dycw_utilities-0.136.2.dist-info → dycw_utilities-0.136.4.dist-info}/licenses/LICENSE +0 -0
@@ -1,8 +1,7 @@
|
|
1
|
-
utilities/__init__.py,sha256=
|
1
|
+
utilities/__init__.py,sha256=dsiNRoGm7Boxbp7neOt74yIhZx6QM31tZ2CTGJY7K20,60
|
2
2
|
utilities/aiolimiter.py,sha256=mD0wEiqMgwpty4XTbawFpnkkmJS6R4JRsVXFUaoitSU,628
|
3
3
|
utilities/altair.py,sha256=HeZBVUocjkrTNwwKrClppsIqgNFF-ykv05HfZSoHYno,9104
|
4
|
-
utilities/
|
5
|
-
utilities/asyncio.py,sha256=kr0S02DrAgpjPU3ySmhD806S5x_7wFUFeV7-zWMrinY,38249
|
4
|
+
utilities/asyncio.py,sha256=dcGeKQzjLBXxKzZkVIk5oZsFXEcynVbRB9iNB5XEDZk,38526
|
6
5
|
utilities/atomicwrites.py,sha256=geFjn9Pwn-tTrtoGjDDxWli9NqbYfy3gGL6ZBctiqSo,5393
|
7
6
|
utilities/atools.py,sha256=9im2g8OCf-Iynqa8bAv8N0Ycj9QvrJmGO7yLCZEdgII,986
|
8
7
|
utilities/cachetools.py,sha256=v1-9sXHLdOLiwmkq6NB0OUbxeKBuVVN6wmAWefWoaHI,2744
|
@@ -89,7 +88,7 @@ utilities/warnings.py,sha256=un1LvHv70PU-LLv8RxPVmugTzDJkkGXRMZTE2-fTQHw,1771
|
|
89
88
|
utilities/whenever.py,sha256=A-yoOqBqrcVD1yDINDsTFDw7dq9-zgUGn_f8CxVUQJs,23332
|
90
89
|
utilities/zipfile.py,sha256=24lQc9ATcJxHXBPc_tBDiJk48pWyRrlxO2fIsFxU0A8,699
|
91
90
|
utilities/zoneinfo.py,sha256=oEH-nL3t4h9uawyZqWDtNtDAl6M-CLpLYGI_nI6DulM,1971
|
92
|
-
dycw_utilities-0.136.
|
93
|
-
dycw_utilities-0.136.
|
94
|
-
dycw_utilities-0.136.
|
95
|
-
dycw_utilities-0.136.
|
91
|
+
dycw_utilities-0.136.4.dist-info/METADATA,sha256=2ONGjvk-6Ada8oSDMVX6w1cxOniaJPZpMI-Spv4Lhzs,1637
|
92
|
+
dycw_utilities-0.136.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
93
|
+
dycw_utilities-0.136.4.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
|
94
|
+
dycw_utilities-0.136.4.dist-info/RECORD,,
|
utilities/__init__.py
CHANGED
utilities/asyncio.py
CHANGED
@@ -220,6 +220,7 @@ class EnhancedQueue[T](Queue[T]):
|
|
220
220
|
class EnhancedTaskGroup(TaskGroup):
|
221
221
|
"""Task group with enhanced features."""
|
222
222
|
|
223
|
+
_max_tasks: int | None
|
223
224
|
_semaphore: Semaphore | None
|
224
225
|
_timeout: TimeDelta | None
|
225
226
|
_error: MaybeType[BaseException]
|
@@ -237,7 +238,11 @@ class EnhancedTaskGroup(TaskGroup):
|
|
237
238
|
debug: MaybeCallableBool = False,
|
238
239
|
) -> None:
|
239
240
|
super().__init__()
|
240
|
-
self.
|
241
|
+
self._max_tasks = max_tasks
|
242
|
+
if (max_tasks is None) or (max_tasks <= 0):
|
243
|
+
self._semaphore = None
|
244
|
+
else:
|
245
|
+
self._semaphore = Semaphore(max_tasks)
|
241
246
|
self._timeout = timeout
|
242
247
|
self._error = error
|
243
248
|
self._debug = debug
|
@@ -257,7 +262,7 @@ class EnhancedTaskGroup(TaskGroup):
|
|
257
262
|
tb: TracebackType | None,
|
258
263
|
) -> None:
|
259
264
|
_ = await self._stack.__aexit__(et, exc, tb)
|
260
|
-
match
|
265
|
+
match self._is_debug():
|
261
266
|
case True:
|
262
267
|
with suppress(Exception):
|
263
268
|
_ = await super().__aexit__(et, exc, tb)
|
@@ -293,7 +298,7 @@ class EnhancedTaskGroup(TaskGroup):
|
|
293
298
|
name: str | None = None,
|
294
299
|
context: Context | None = None,
|
295
300
|
) -> T | Task[T]:
|
296
|
-
match
|
301
|
+
match self._is_debug():
|
297
302
|
case True:
|
298
303
|
return await coro
|
299
304
|
case False:
|
@@ -301,6 +306,11 @@ class EnhancedTaskGroup(TaskGroup):
|
|
301
306
|
case _ as never:
|
302
307
|
assert_never(never)
|
303
308
|
|
309
|
+
def _is_debug(self) -> bool:
|
310
|
+
return to_bool(bool_=self._debug) or (
|
311
|
+
(self._max_tasks is not None) and (self._max_tasks <= 0)
|
312
|
+
)
|
313
|
+
|
304
314
|
async def _wrap_with_semaphore[T](
|
305
315
|
self, semaphore: Semaphore, coroutine: _CoroutineLike[T], /
|
306
316
|
) -> T:
|
utilities/arq.py
DELETED
@@ -1,216 +0,0 @@
|
|
1
|
-
from __future__ import annotations
|
2
|
-
|
3
|
-
from dataclasses import dataclass
|
4
|
-
from functools import wraps
|
5
|
-
from itertools import chain
|
6
|
-
from typing import TYPE_CHECKING, Any, Self, cast, override
|
7
|
-
|
8
|
-
from arq.constants import default_queue_name, expires_extra_ms
|
9
|
-
from arq.cron import cron
|
10
|
-
|
11
|
-
from utilities.dataclasses import replace_non_sentinel
|
12
|
-
from utilities.sentinel import Sentinel, sentinel
|
13
|
-
|
14
|
-
if TYPE_CHECKING:
|
15
|
-
from collections.abc import Callable, Iterable, Sequence
|
16
|
-
from datetime import datetime, timedelta, timezone
|
17
|
-
|
18
|
-
from arq.connections import ArqRedis, RedisSettings
|
19
|
-
from arq.cron import CronJob
|
20
|
-
from arq.jobs import Deserializer, Job, Serializer
|
21
|
-
from arq.typing import (
|
22
|
-
OptionType,
|
23
|
-
SecondsTimedelta,
|
24
|
-
StartupShutdown,
|
25
|
-
WeekdayOptionType,
|
26
|
-
WorkerCoroutine,
|
27
|
-
)
|
28
|
-
from arq.worker import Function
|
29
|
-
|
30
|
-
from utilities.types import Coro, StrMapping
|
31
|
-
|
32
|
-
|
33
|
-
def cron_raw(
|
34
|
-
coroutine: Callable[..., Coro[Any]],
|
35
|
-
/,
|
36
|
-
*,
|
37
|
-
name: str | None = None,
|
38
|
-
month: OptionType = None,
|
39
|
-
day: OptionType = None,
|
40
|
-
weekday: WeekdayOptionType = None,
|
41
|
-
hour: OptionType = None,
|
42
|
-
minute: OptionType = None,
|
43
|
-
second: OptionType = 0,
|
44
|
-
microsecond: int = 123_456,
|
45
|
-
run_at_startup: bool = False,
|
46
|
-
unique: bool = True,
|
47
|
-
job_id: str | None = None,
|
48
|
-
timeout: SecondsTimedelta | None = None,
|
49
|
-
keep_result: float | None = 0,
|
50
|
-
keep_result_forever: bool | None = False,
|
51
|
-
max_tries: int | None = 1,
|
52
|
-
args: Iterable[Any] | None = None,
|
53
|
-
kwargs: StrMapping | None = None,
|
54
|
-
) -> CronJob:
|
55
|
-
"""Create a cron job with a raw coroutine function."""
|
56
|
-
lifted = _lift_cron(
|
57
|
-
coroutine, *(() if args is None else args), **({} if kwargs is None else kwargs)
|
58
|
-
)
|
59
|
-
return cron(
|
60
|
-
lifted,
|
61
|
-
name=name,
|
62
|
-
month=month,
|
63
|
-
day=day,
|
64
|
-
weekday=weekday,
|
65
|
-
hour=hour,
|
66
|
-
minute=minute,
|
67
|
-
second=second,
|
68
|
-
microsecond=microsecond,
|
69
|
-
run_at_startup=run_at_startup,
|
70
|
-
unique=unique,
|
71
|
-
job_id=job_id,
|
72
|
-
timeout=timeout,
|
73
|
-
keep_result=keep_result,
|
74
|
-
keep_result_forever=keep_result_forever,
|
75
|
-
max_tries=max_tries,
|
76
|
-
)
|
77
|
-
|
78
|
-
|
79
|
-
def _lift_cron[**P, T](
|
80
|
-
func: Callable[P, Coro[T]], *args: P.args, **kwargs: P.kwargs
|
81
|
-
) -> WorkerCoroutine:
|
82
|
-
"""Lift a coroutine function & call arg/kwargs for `cron`."""
|
83
|
-
|
84
|
-
@wraps(func)
|
85
|
-
async def wrapped(ctx: StrMapping, /) -> T:
|
86
|
-
_ = ctx
|
87
|
-
return await func(*args, **kwargs)
|
88
|
-
|
89
|
-
return cast("Any", wrapped)
|
90
|
-
|
91
|
-
|
92
|
-
##
|
93
|
-
|
94
|
-
|
95
|
-
@dataclass(kw_only=True, slots=True)
|
96
|
-
class _JobEnqueuer:
|
97
|
-
"""Enqueuer of jobs."""
|
98
|
-
|
99
|
-
job_id: str | None = None
|
100
|
-
queue_name: str | None = None
|
101
|
-
defer_until: datetime | None = None
|
102
|
-
defer_by: int | float | timedelta | None = None
|
103
|
-
expires: int | float | timedelta | None = None
|
104
|
-
job_try: int | None = None
|
105
|
-
|
106
|
-
async def __call__[**P, T](
|
107
|
-
self,
|
108
|
-
redis: ArqRedis,
|
109
|
-
function: Callable[P, Coro[T]],
|
110
|
-
*args: P.args,
|
111
|
-
**kwargs: P.kwargs,
|
112
|
-
) -> Job | None:
|
113
|
-
return await redis.enqueue_job( # skipif-ci-and-not-linux
|
114
|
-
function.__name__,
|
115
|
-
*args,
|
116
|
-
_job_id=self.job_id,
|
117
|
-
_queue_name=self.queue_name,
|
118
|
-
_defer_until=self.defer_until,
|
119
|
-
_defer_by=self.defer_by,
|
120
|
-
_expires=self.expires,
|
121
|
-
_job_try=self.job_try,
|
122
|
-
**kwargs,
|
123
|
-
)
|
124
|
-
|
125
|
-
def settings(
|
126
|
-
self,
|
127
|
-
*,
|
128
|
-
job_id: str | None | Sentinel = sentinel,
|
129
|
-
queue_name: str | None | Sentinel = sentinel,
|
130
|
-
defer_until: datetime | None | Sentinel = sentinel,
|
131
|
-
defer_by: float | timedelta | None | Sentinel = sentinel,
|
132
|
-
expires: float | timedelta | None | Sentinel = sentinel,
|
133
|
-
job_try: int | None | Sentinel = sentinel,
|
134
|
-
) -> Self:
|
135
|
-
"""Replace elements of the enqueuer."""
|
136
|
-
return replace_non_sentinel( # skipif-ci-and-not-linux
|
137
|
-
self,
|
138
|
-
job_id=job_id,
|
139
|
-
queue_name=queue_name,
|
140
|
-
defer_until=defer_until,
|
141
|
-
defer_by=defer_by,
|
142
|
-
expires=expires,
|
143
|
-
job_try=job_try,
|
144
|
-
)
|
145
|
-
|
146
|
-
|
147
|
-
job_enqueuer = _JobEnqueuer()
|
148
|
-
|
149
|
-
|
150
|
-
##
|
151
|
-
|
152
|
-
|
153
|
-
class _WorkerMeta(type):
|
154
|
-
@override
|
155
|
-
def __new__(
|
156
|
-
mcs: type[_WorkerMeta],
|
157
|
-
name: str,
|
158
|
-
bases: tuple[type, ...],
|
159
|
-
namespace: dict[str, Any],
|
160
|
-
/,
|
161
|
-
) -> type[Worker]:
|
162
|
-
cls = cast("type[Worker]", super().__new__(mcs, name, bases, namespace))
|
163
|
-
cls.functions = tuple(chain(cls.functions, map(cls._lift, cls.functions_raw)))
|
164
|
-
return cls
|
165
|
-
|
166
|
-
@classmethod
|
167
|
-
def _lift[**P, T](cls, func: Callable[P, Coro[T]]) -> WorkerCoroutine:
|
168
|
-
"""Lift a coroutine function to accept the required `ctx` argument."""
|
169
|
-
|
170
|
-
@wraps(func)
|
171
|
-
async def wrapped(ctx: StrMapping, *args: P.args, **kwargs: P.kwargs) -> T:
|
172
|
-
_ = ctx
|
173
|
-
return await func(*args, **kwargs)
|
174
|
-
|
175
|
-
return cast("Any", wrapped)
|
176
|
-
|
177
|
-
|
178
|
-
@dataclass(kw_only=True)
|
179
|
-
class Worker(metaclass=_WorkerMeta):
|
180
|
-
"""Base class for all workers."""
|
181
|
-
|
182
|
-
functions: Sequence[Function | WorkerCoroutine] = ()
|
183
|
-
functions_raw: Sequence[Callable[..., Coro[Any]]] = ()
|
184
|
-
queue_name: str | None = default_queue_name
|
185
|
-
cron_jobs: Sequence[CronJob] | None = None
|
186
|
-
redis_settings: RedisSettings | None = None
|
187
|
-
redis_pool: ArqRedis | None = None
|
188
|
-
burst: bool = False
|
189
|
-
on_startup: StartupShutdown | None = None
|
190
|
-
on_shutdown: StartupShutdown | None = None
|
191
|
-
on_job_start: StartupShutdown | None = None
|
192
|
-
on_job_end: StartupShutdown | None = None
|
193
|
-
after_job_end: StartupShutdown | None = None
|
194
|
-
handle_signals: bool = True
|
195
|
-
job_completion_wait: int = 0
|
196
|
-
max_jobs: int = 10
|
197
|
-
job_timeout: SecondsTimedelta = 300
|
198
|
-
keep_result: SecondsTimedelta = 3600
|
199
|
-
keep_result_forever: bool = False
|
200
|
-
poll_delay: SecondsTimedelta = 0.5
|
201
|
-
queue_read_limit: int | None = None
|
202
|
-
max_tries: int = 5
|
203
|
-
health_check_interval: SecondsTimedelta = 3600
|
204
|
-
health_check_key: str | None = None
|
205
|
-
ctx: dict[Any, Any] | None = None
|
206
|
-
retry_jobs: bool = True
|
207
|
-
allow_abort_jobs: bool = False
|
208
|
-
max_burst_jobs: int = -1
|
209
|
-
job_serializer: Serializer | None = None
|
210
|
-
job_deserializer: Deserializer | None = None
|
211
|
-
expires_extra_ms: int = expires_extra_ms
|
212
|
-
timezone: timezone | None = None
|
213
|
-
log_results: bool = True
|
214
|
-
|
215
|
-
|
216
|
-
__all__ = ["Worker", "cron", "job_enqueuer"]
|
File without changes
|
File without changes
|