dycw-utilities 0.117.0__py3-none-any.whl → 0.118.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dycw-utilities
3
- Version: 0.117.0
3
+ Version: 0.118.0
4
4
  Author-email: Derek Wan <d.wan@icloud.com>
5
5
  License-File: LICENSE
6
6
  Requires-Python: >=3.12
@@ -1,7 +1,7 @@
1
- utilities/__init__.py,sha256=P9EtLQQfrOCDM1TAOhodQ4gfhNisheGhmugAZrQJYYc,60
1
+ utilities/__init__.py,sha256=m9DsK8iICHcyqc8Uf9izHc2F4wKsVwGwOOvxhVTp46A,60
2
2
  utilities/altair.py,sha256=Gpja-flOo-Db0PIPJLJsgzAlXWoKUjPU1qY-DQ829ek,9156
3
3
  utilities/astor.py,sha256=xuDUkjq0-b6fhtwjhbnebzbqQZAjMSHR1IIS5uOodVg,777
4
- utilities/asyncio.py,sha256=HqPgdti3ZJPH7uHJkvmZ2weIVKYEpB6FKh6FBriMAPU,24287
4
+ utilities/asyncio.py,sha256=HGX79AKzpQbbDBW3paxAXrhWYeudcJjiO1ETU40d_-8,18463
5
5
  utilities/atomicwrites.py,sha256=geFjn9Pwn-tTrtoGjDDxWli9NqbYfy3gGL6ZBctiqSo,5393
6
6
  utilities/atools.py,sha256=IYMuFSFGSKyuQmqD6v5IUtDlz8PPw0Sr87Cub_gRU3M,1168
7
7
  utilities/cachetools.py,sha256=C1zqOg7BYz0IfQFK8e3qaDDgEZxDpo47F15RTfJM37Q,2910
@@ -16,7 +16,7 @@ utilities/datetime.py,sha256=VOwjPibw63Myv-CRYhT2eEHpz277GqUiEDEaI7p-nQw,38985
16
16
  utilities/enum.py,sha256=HoRwVCWzsnH0vpO9ZEcAAIZLMv0Sn2vJxxA4sYMQgDs,5793
17
17
  utilities/errors.py,sha256=gxsaa7eq7jbYl41Of40-ivjXqJB5gt4QAcJ0smZZMJE,829
18
18
  utilities/eventkit.py,sha256=6M5Xu1SzN-juk9PqBHwy5dS-ta7T0qA6SMpDsakOJ0E,13039
19
- utilities/fastapi.py,sha256=y-35at3005jzlNx2wJoiSvB1Ch5bMo30wgU_so3IDdI,2467
19
+ utilities/fastapi.py,sha256=eiisloI6kQVCkPfDpBzlLrDZDi8yJ0VmrSPlJ2k84Mo,2334
20
20
  utilities/fpdf2.py,sha256=y1NGXR5chWqLXWpewGV3hlRGMr_5yV1lVRkPBhPEgJI,1843
21
21
  utilities/functions.py,sha256=jgt592voaHNtX56qX0SRvFveVCRmSIxCZmqvpLZCnY8,27305
22
22
  utilities/functools.py,sha256=WrpHt7NLNWSUn9A1Q_ZIWlNaYZOEI4IFKyBG9HO3BC4,1643
@@ -59,15 +59,15 @@ utilities/pytest_regressions.py,sha256=-SVT9647Dg6-JcdsiaDKXe3NdOmmrvGevLKWwGjxq
59
59
  utilities/python_dotenv.py,sha256=iWcnpXbH7S6RoXHiLlGgyuH6udCupAcPd_gQ0eAenQ0,3190
60
60
  utilities/random.py,sha256=lYdjgxB7GCfU_fwFVl5U-BIM_HV3q6_urL9byjrwDM8,4157
61
61
  utilities/re.py,sha256=5J4d8VwIPFVrX2Eb8zfoxImDv7IwiN_U7mJ07wR2Wvs,3958
62
- utilities/redis.py,sha256=P766qKT2SkDeKa9PpPEZIPCnAc2QDIi35ow2EEHGd20,27225
62
+ utilities/redis.py,sha256=OHw3J2dBA5QssDluKXAG1zIAK2mJJTd6uBuf_1YQuAE,26646
63
63
  utilities/reprlib.py,sha256=Re9bk3n-kC__9DxQmRlevqFA86pE6TtVfWjUgpbVOv0,1849
64
64
  utilities/rich.py,sha256=t50MwwVBsoOLxzmeVFSVpjno4OW6Ufum32skXbV8-Bs,1911
65
65
  utilities/scipy.py,sha256=X6ROnHwiUhAmPhM0jkfEh0-Fd9iRvwiqtCQMOLmOQF8,945
66
66
  utilities/sentinel.py,sha256=3jIwgpMekWgDAxPDA_hXMP2St43cPhciKN3LWiZ7kv0,1248
67
67
  utilities/shelve.py,sha256=HZsMwK4tcIfg3sh0gApx4-yjQnrY4o3V3ZRimvRhoW0,738
68
- utilities/slack_sdk.py,sha256=wPqn9F5AMXgmkp3zgIrBMllLt2SDCCnBNNyi-ag3yzw,5555
68
+ utilities/slack_sdk.py,sha256=NLHmWYK6wc5bz4CGImugXceaToasNBLSqA5sd5ld2r4,3307
69
69
  utilities/socket.py,sha256=K77vfREvzoVTrpYKo6MZakol0EYu2q1sWJnnZqL0So0,118
70
- utilities/sqlalchemy.py,sha256=585hWuuXVTKTnyn0Pfd9JI6jp-hmKW6pLKGYMjXjytM,36959
70
+ utilities/sqlalchemy.py,sha256=09stMwvmI68zlk-DSy9GDk5_YxcMddLh87RPC8Bs4yY,35469
71
71
  utilities/sqlalchemy_polars.py,sha256=wjJpoUo-yO9E2ujpG_06vV5r2OdvBiQ4yvV6wKCa2Tk,15605
72
72
  utilities/statsmodels.py,sha256=koyiBHvpMcSiBfh99wFUfSggLNx7cuAw3rwyfAhoKpQ,3410
73
73
  utilities/streamlit.py,sha256=U9PJBaKP1IdSykKhPZhIzSPTZsmLsnwbEPZWzNhJPKk,2955
@@ -88,7 +88,7 @@ utilities/warnings.py,sha256=un1LvHv70PU-LLv8RxPVmugTzDJkkGXRMZTE2-fTQHw,1771
88
88
  utilities/whenever.py,sha256=fC0ZtnO0AyFHsxP4SWj0POI1bf4BIL3Hh4rR51BHfaw,17803
89
89
  utilities/zipfile.py,sha256=24lQc9ATcJxHXBPc_tBDiJk48pWyRrlxO2fIsFxU0A8,699
90
90
  utilities/zoneinfo.py,sha256=-Xm57PMMwDTYpxJdkiJG13wnbwK--I7XItBh5WVhD-o,1874
91
- dycw_utilities-0.117.0.dist-info/METADATA,sha256=OB8XvTMe2rLu98khXHV7Y43VtZl_4iotbtejKesZsZg,12943
92
- dycw_utilities-0.117.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
93
- dycw_utilities-0.117.0.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
94
- dycw_utilities-0.117.0.dist-info/RECORD,,
91
+ dycw_utilities-0.118.0.dist-info/METADATA,sha256=5nkN-STFMRlbctAGspdNN6t3wanBvmbU4JY8eBG3KVg,12943
92
+ dycw_utilities-0.118.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
93
+ dycw_utilities-0.118.0.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
94
+ dycw_utilities-0.118.0.dist-info/RECORD,,
utilities/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from __future__ import annotations
2
2
 
3
- __version__ = "0.117.0"
3
+ __version__ = "0.118.0"
utilities/asyncio.py CHANGED
@@ -1,8 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import datetime as dt
3
4
  from abc import ABC, abstractmethod
4
5
  from asyncio import (
5
- CancelledError,
6
6
  Event,
7
7
  PriorityQueue,
8
8
  Queue,
@@ -12,17 +12,11 @@ from asyncio import (
12
12
  Task,
13
13
  TaskGroup,
14
14
  create_subprocess_shell,
15
- create_task,
16
15
  sleep,
17
16
  timeout,
18
17
  )
19
18
  from collections.abc import Callable, Hashable, Iterable, Iterator, Mapping
20
- from contextlib import (
21
- AsyncExitStack,
22
- _AsyncGeneratorContextManager,
23
- asynccontextmanager,
24
- suppress,
25
- )
19
+ from contextlib import _AsyncGeneratorContextManager, asynccontextmanager
26
20
  from dataclasses import dataclass, field
27
21
  from io import StringIO
28
22
  from logging import getLogger
@@ -32,8 +26,8 @@ from typing import (
32
26
  TYPE_CHECKING,
33
27
  Any,
34
28
  Generic,
29
+ Literal,
35
30
  NoReturn,
36
- Self,
37
31
  TextIO,
38
32
  TypeVar,
39
33
  assert_never,
@@ -42,14 +36,14 @@ from typing import (
42
36
  )
43
37
 
44
38
  from utilities.datetime import (
45
- MILLISECOND,
46
39
  MINUTE,
47
40
  SECOND,
48
41
  datetime_duration_to_float,
42
+ datetime_duration_to_timedelta,
49
43
  get_now,
50
44
  round_datetime,
51
45
  )
52
- from utilities.errors import ImpossibleCaseError, repr_error
46
+ from utilities.errors import repr_error
53
47
  from utilities.functions import ensure_int, ensure_not_none, get_class_name
54
48
  from utilities.reprlib import get_repr
55
49
  from utilities.sentinel import Sentinel, sentinel
@@ -62,12 +56,10 @@ from utilities.types import (
62
56
  )
63
57
 
64
58
  if TYPE_CHECKING:
65
- import datetime as dt
66
59
  from asyncio import _CoroutineLike
67
60
  from asyncio.subprocess import Process
68
61
  from collections.abc import AsyncIterator, Sequence
69
62
  from contextvars import Context
70
- from types import TracebackType
71
63
 
72
64
  from utilities.types import Duration
73
65
 
@@ -78,120 +70,6 @@ _T = TypeVar("_T")
78
70
  ##
79
71
 
80
72
 
81
- @dataclass(kw_only=True)
82
- class AsyncService(ABC):
83
- """A long-running, asynchronous service."""
84
-
85
- duration: Duration | None = None
86
- _await_upon_aenter: bool = field(default=True, init=False, repr=False)
87
- _event: Event = field(default_factory=Event, init=False, repr=False)
88
- _stack: AsyncExitStack = field(
89
- default_factory=AsyncExitStack, init=False, repr=False
90
- )
91
- _state: bool = field(default=False, init=False, repr=False)
92
- _task: Task[None] | None = field(default=None, init=False, repr=False)
93
- _depth: int = field(default=0, init=False, repr=False)
94
-
95
- async def __aenter__(self) -> Self:
96
- """Context manager entry."""
97
- if (self._task is None) and (self._depth == 0):
98
- _ = await self._stack.__aenter__()
99
- self._task = create_task(self._start_runner())
100
- if self._await_upon_aenter:
101
- with suppress(CancelledError):
102
- await self._task
103
- elif (self._task is not None) and (self._depth >= 1):
104
- ...
105
- else:
106
- raise ImpossibleCaseError( # pragma: no cover
107
- case=[f"{self._task=}", f"{self._depth=}"]
108
- )
109
- self._depth += 1
110
- return self
111
-
112
- async def __aexit__(
113
- self,
114
- exc_type: type[BaseException] | None = None,
115
- exc_value: BaseException | None = None,
116
- traceback: TracebackType | None = None,
117
- ) -> None:
118
- """Context manager exit."""
119
- _ = (exc_type, exc_value, traceback)
120
- if (self._task is None) or (self._depth == 0):
121
- raise ImpossibleCaseError( # pragma: no cover
122
- case=[f"{self._task=}", f"{self._depth=}"]
123
- )
124
- self._state = False
125
- self._depth -= 1
126
- if self._depth == 0:
127
- _ = await self._stack.__aexit__(exc_type, exc_value, traceback)
128
- await self.stop()
129
- with suppress(CancelledError):
130
- await self._task
131
- self._task = None
132
-
133
- @abstractmethod
134
- async def _start(self) -> None:
135
- """Start the service."""
136
-
137
- async def _start_runner(self) -> None:
138
- """Coroutine to start the service."""
139
- if self.duration is None:
140
- _ = await self._start()
141
- _ = await self._event.wait()
142
- else:
143
- try:
144
- async with timeout_dur(duration=self.duration):
145
- _ = await self._start()
146
- except TimeoutError:
147
- await self.stop()
148
-
149
- async def stop(self) -> None:
150
- """Stop the service."""
151
- if self._task is None:
152
- raise ImpossibleCaseError(case=[f"{self._task=}"]) # pragma: no cover
153
- with suppress(CancelledError):
154
- _ = self._task.cancel()
155
-
156
-
157
- ##
158
-
159
-
160
- @dataclass(kw_only=True)
161
- class AsyncLoopingService(AsyncService):
162
- """A long-running, asynchronous service which loops a core function."""
163
-
164
- sleep: Duration = MILLISECOND
165
- _await_upon_aenter: bool = field(default=True, init=False, repr=False)
166
-
167
- @abstractmethod
168
- async def _run(self) -> None:
169
- """Run the core function once."""
170
- raise NotImplementedError # pragma: no cover
171
-
172
- async def _run_failure(self, error: Exception, /) -> None:
173
- """Process the failure."""
174
- raise error
175
-
176
- @override
177
- async def _start(self) -> None:
178
- """Start the service, assuming no task is present."""
179
- while True:
180
- try:
181
- await self._run()
182
- except CancelledError:
183
- await self.stop()
184
- break
185
- except Exception as error: # noqa: BLE001
186
- await self._run_failure(error)
187
- await sleep_dur(duration=self.sleep)
188
- else:
189
- await sleep_dur(duration=self.sleep)
190
-
191
-
192
- ##
193
-
194
-
195
73
  class EnhancedTaskGroup(TaskGroup):
196
74
  """Task group with enhanced features."""
197
75
 
@@ -243,106 +121,15 @@ class EnhancedTaskGroup(TaskGroup):
243
121
  ##
244
122
 
245
123
 
246
- @dataclass(kw_only=True)
247
- class QueueProcessor(AsyncService, Generic[_T]):
248
- """Process a set of items in a queue."""
249
-
250
- queue_type: type[Queue[_T]] = field(default=Queue, repr=False)
251
- queue_max_size: int | None = field(default=None, repr=False)
252
- sleep: Duration = MILLISECOND
253
- _await_upon_aenter: bool = field(default=False, init=False, repr=False)
254
- _queue: Queue[_T] = field(init=False, repr=False)
255
-
256
- def __post_init__(self) -> None:
257
- self._queue = self.queue_type(
258
- maxsize=0 if self.queue_max_size is None else self.queue_max_size
259
- )
260
-
261
- def __len__(self) -> int:
262
- return self._queue.qsize()
263
-
264
- def empty(self) -> bool:
265
- """Check if the queue is empty."""
266
- return self._queue.empty()
267
-
268
- def enqueue(self, *items: _T) -> None:
269
- """Enqueue a set items."""
270
- for item in items:
271
- self._queue.put_nowait(item)
272
-
273
- async def run_until_empty(self) -> None:
274
- """Run the processor until the queue is empty."""
275
- while not self.empty():
276
- await self._run()
277
- await sleep_dur(duration=self.sleep)
278
-
279
- def _get_items_nowait(self, *, max_size: int | None = None) -> Sequence[_T]:
280
- """Get items from the queue; no waiting."""
281
- return get_items_nowait(self._queue, max_size=max_size)
282
-
283
- @abstractmethod
284
- async def _process_item(self, item: _T, /) -> None:
285
- """Process the first item."""
286
- raise NotImplementedError(item) # pragma: no cover
287
-
288
- async def _process_item_failure(self, item: _T, error: Exception, /) -> None:
289
- """Process the failure."""
290
- _ = item
291
- raise error
292
-
293
- async def _run(self) -> None:
294
- """Run the processer."""
295
- try:
296
- (item,) = self._get_items_nowait(max_size=1)
297
- except ValueError:
298
- raise QueueEmpty from None
299
- try:
300
- await self._process_item(item)
301
- except Exception as error: # noqa: BLE001
302
- await self._process_item_failure(item, error)
303
-
304
- @override
305
- async def _start(self) -> None:
306
- """Start the processor."""
307
- while True:
308
- try:
309
- await self._run()
310
- except QueueEmpty:
311
- await sleep_dur(duration=self.sleep)
312
- except CancelledError:
313
- await self.stop()
314
- break
315
- else:
316
- await sleep_dur(duration=self.sleep)
317
-
318
- @override
319
- async def stop(self) -> None:
320
- """Stop the processor."""
321
- await self.run_until_empty()
322
- await super().stop()
323
-
324
-
325
- @dataclass(kw_only=True)
326
- class ExceptionProcessor(QueueProcessor[Exception | type[Exception]]):
327
- """Raise an exception in a queue."""
328
-
329
- queue_max_size: int | None = field(default=1, repr=False)
330
-
331
- @override
332
- async def _process_item(self, item: Exception | type[Exception], /) -> None:
333
- """Run the processor on the first item."""
334
- raise item
335
-
336
-
337
- ##
124
+ type _DurationOrEvery = Duration | tuple[Literal["every"], Duration]
338
125
 
339
126
 
340
127
  @dataclass(kw_only=True, unsafe_hash=True)
341
128
  class InfiniteLooper(ABC, Generic[THashable]):
342
129
  """An infinite loop which can throw exceptions by setting events."""
343
130
 
344
- sleep_core: Duration = SECOND
345
- sleep_restart: Duration = MINUTE
131
+ sleep_core: _DurationOrEvery = SECOND
132
+ sleep_restart: _DurationOrEvery = MINUTE
346
133
  logger: str | None = None
347
134
  _events: Mapping[THashable, Event] = field(
348
135
  default_factory=dict, init=False, repr=False, hash=False
@@ -369,7 +156,7 @@ class InfiniteLooper(ABC, Generic[THashable]):
369
156
  await self._initialize()
370
157
  except Exception as error: # noqa: BLE001
371
158
  self._error_upon_initialize(error)
372
- await sleep_dur(duration=self.sleep_restart)
159
+ await self._run_sleep(self.sleep_restart)
373
160
  else:
374
161
  while True:
375
162
  try:
@@ -380,14 +167,14 @@ class InfiniteLooper(ABC, Generic[THashable]):
380
167
  )
381
168
  except StopIteration:
382
169
  await self._core()
383
- await sleep_dur(duration=self.sleep_core)
170
+ await self._run_sleep(self.sleep_core)
384
171
  else:
385
172
  self._raise_error(event)
386
173
  except InfiniteLooperError:
387
174
  raise
388
175
  except Exception as error: # noqa: BLE001
389
176
  self._error_upon_core(error)
390
- await sleep_dur(duration=self.sleep_restart)
177
+ await self._run_sleep(self.sleep_restart)
391
178
 
392
179
  async def _run_looper_with_coroutines(
393
180
  self, *coroutines: Callable[[], Coroutine1[None]]
@@ -401,7 +188,7 @@ class InfiniteLooper(ABC, Generic[THashable]):
401
188
  _ = [tg.create_task(c()) for c in coroutines]
402
189
  except ExceptionGroup as error:
403
190
  self._error_group_upon_coroutines(error)
404
- await sleep_dur(duration=self.sleep_restart)
191
+ await self._run_sleep(self.sleep_restart)
405
192
 
406
193
  async def _initialize(self) -> None:
407
194
  """Initialize the loop."""
@@ -413,20 +200,20 @@ class InfiniteLooper(ABC, Generic[THashable]):
413
200
  """Handle any errors upon initializing the looper."""
414
201
  if self.logger is not None:
415
202
  getLogger(name=self.logger).error(
416
- "%r encountered %r whilst initializing; sleeping for %s...",
203
+ "%r encountered %r whilst initializing; sleeping %s...",
417
204
  get_class_name(self),
418
205
  repr_error(error),
419
- self.sleep_restart,
206
+ self._sleep_restart_desc,
420
207
  )
421
208
 
422
209
  def _error_upon_core(self, error: Exception, /) -> None:
423
210
  """Handle any errors upon running the core function."""
424
211
  if self.logger is not None:
425
212
  getLogger(name=self.logger).error(
426
- "%r encountered %r; sleeping for %s...",
213
+ "%r encountered %r; sleeping %s...",
427
214
  get_class_name(self),
428
215
  repr_error(error),
429
- self.sleep_restart,
216
+ self._sleep_restart_desc,
430
217
  )
431
218
 
432
219
  def _error_group_upon_coroutines(self, group: ExceptionGroup, /) -> None:
@@ -439,7 +226,7 @@ class InfiniteLooper(ABC, Generic[THashable]):
439
226
  f"- Error #{i}/{n}: {repr_error(e)}"
440
227
  for i, e in enumerate(errors, start=1)
441
228
  )
442
- msgs.append(f"Sleeping for {self.sleep_restart}...")
229
+ msgs.append(f"Sleeping {self._sleep_restart_desc}...")
443
230
  getLogger(name=self.logger).error("\n".join(msgs))
444
231
 
445
232
  def _raise_error(self, event: THashable, /) -> NoReturn:
@@ -454,6 +241,29 @@ class InfiniteLooper(ABC, Generic[THashable]):
454
241
  event: Event() for event, _ in self._yield_events_and_exceptions()
455
242
  }
456
243
 
244
+ async def _run_sleep(self, sleep: _DurationOrEvery, /) -> None:
245
+ """Sleep until the next part of the loop."""
246
+ match sleep:
247
+ case int() | float() | dt.timedelta() as duration:
248
+ await sleep_dur(duration=duration)
249
+ case "every", (int() | float() | dt.timedelta()) as duration:
250
+ await sleep_until_rounded(duration)
251
+ case _ as never:
252
+ assert_never(never)
253
+
254
+ @property
255
+ def _sleep_restart_desc(self) -> str:
256
+ """Get a description of the sleep until restart."""
257
+ match self.sleep_restart:
258
+ case int() | float() | dt.timedelta() as duration:
259
+ timedelta = datetime_duration_to_timedelta(duration)
260
+ return f"for {timedelta}"
261
+ case "every", (int() | float() | dt.timedelta()) as duration:
262
+ timedelta = datetime_duration_to_timedelta(duration)
263
+ return f"until next {timedelta}"
264
+ case _ as never:
265
+ assert_never(never)
266
+
457
267
  def _set_event(self, event: THashable, /) -> None:
458
268
  """Set the given event."""
459
269
  try:
@@ -779,15 +589,11 @@ async def timeout_dur(
779
589
 
780
590
 
781
591
  __all__ = [
782
- "AsyncLoopingService",
783
- "AsyncService",
784
592
  "EnhancedTaskGroup",
785
- "ExceptionProcessor",
786
593
  "InfiniteLooper",
787
594
  "InfiniteLooperError",
788
595
  "InfiniteQueueLooper",
789
596
  "InfiniteQueueLooperError",
790
- "QueueProcessor",
791
597
  "StreamCommandOutput",
792
598
  "UniquePriorityQueue",
793
599
  "UniqueQueue",
utilities/fastapi.py CHANGED
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any, Literal, override
6
6
  from fastapi import FastAPI
7
7
  from uvicorn import Config, Server
8
8
 
9
- from utilities.asyncio import AsyncService
9
+ from utilities.asyncio import InfiniteLooper
10
10
  from utilities.datetime import SECOND, datetime_duration_to_float
11
11
 
12
12
  if TYPE_CHECKING:
@@ -36,7 +36,7 @@ class _PingerReceiverApp(FastAPI):
36
36
 
37
37
 
38
38
  @dataclass(kw_only=True)
39
- class PingReceiver(AsyncService):
39
+ class PingReceiver(InfiniteLooper):
40
40
  """A ping receiver."""
41
41
 
42
42
  host: InitVar[str] = _LOCALHOST
@@ -67,13 +67,8 @@ class PingReceiver(AsyncService):
67
67
  return response.text if response.status_code == 200 else False # skipif-ci
68
68
 
69
69
  @override
70
- async def _start(self) -> None:
70
+ async def _initialize(self) -> None:
71
71
  await self._server.serve() # skipif-ci
72
72
 
73
- @override
74
- async def stop(self) -> None:
75
- await self._server.shutdown() # skipif-ci
76
- await super().stop() # skipif-ci
77
-
78
73
 
79
74
  __all__ = ["PingReceiver"]
utilities/redis.py CHANGED
@@ -22,7 +22,7 @@ from redis.asyncio import Redis
22
22
  from redis.asyncio.client import PubSub
23
23
  from redis.typing import EncodableT
24
24
 
25
- from utilities.asyncio import InfiniteQueueLooper, QueueProcessor, timeout_dur
25
+ from utilities.asyncio import InfiniteQueueLooper, timeout_dur
26
26
  from utilities.datetime import (
27
27
  MILLISECOND,
28
28
  SECOND,
@@ -588,22 +588,6 @@ async def publish(
588
588
  ##
589
589
 
590
590
 
591
- @dataclass(kw_only=True)
592
- class Publisher(QueueProcessor[tuple[str, EncodableT]]):
593
- """Publish a set of messages to Redis."""
594
-
595
- redis: Redis
596
- serializer: Callable[[Any], EncodableT] | None = None
597
- timeout: Duration = _PUBLISH_TIMEOUT
598
-
599
- @override
600
- async def _process_item(self, item: tuple[str, EncodableT], /) -> None:
601
- channel, data = item # skipif-ci-and-not-linux
602
- _ = await publish( # skipif-ci-and-not-linux
603
- self.redis, channel, data, serializer=self.serializer, timeout=self.timeout
604
- )
605
-
606
-
607
591
  @dataclass(kw_only=True)
608
592
  class PublisherIQL(InfiniteQueueLooper[None, tuple[str, EncodableT]]):
609
593
  """Publish a set of messages to Redis."""
@@ -828,7 +812,6 @@ _ = _TestRedis
828
812
 
829
813
 
830
814
  __all__ = [
831
- "Publisher",
832
815
  "PublisherIQL",
833
816
  "PublisherIQLError",
834
817
  "RedisHashMapKey",
utilities/slack_sdk.py CHANGED
@@ -3,18 +3,12 @@ from __future__ import annotations
3
3
  from asyncio import Queue
4
4
  from dataclasses import dataclass
5
5
  from http import HTTPStatus
6
- from itertools import chain
7
6
  from logging import NOTSET, Handler, LogRecord
8
7
  from typing import TYPE_CHECKING, override
9
8
 
10
9
  from slack_sdk.webhook.async_client import AsyncWebhookClient
11
10
 
12
- from utilities.asyncio import (
13
- InfiniteQueueLooper,
14
- QueueProcessor,
15
- sleep_dur,
16
- timeout_dur,
17
- )
11
+ from utilities.asyncio import InfiniteQueueLooper, timeout_dur
18
12
  from utilities.datetime import MINUTE, SECOND, datetime_duration_to_float
19
13
  from utilities.functools import cache
20
14
  from utilities.math import safe_round
@@ -40,66 +34,6 @@ async def _send_adapter(url: str, text: str, /) -> None:
40
34
  await send_to_slack(url, text) # pragma: no cover
41
35
 
42
36
 
43
- @dataclass(init=False, order=True, unsafe_hash=True)
44
- class SlackHandler(Handler, QueueProcessor[str]):
45
- """Handler for sending messages to Slack."""
46
-
47
- @override
48
- def __init__(
49
- self,
50
- url: str,
51
- /,
52
- *,
53
- level: int = NOTSET,
54
- queue_type: type[Queue[str]] = Queue,
55
- queue_max_size: int | None = None,
56
- sender: Callable[[str, str], Coroutine1[None]] = _send_adapter,
57
- timeout: Duration = _TIMEOUT,
58
- callback_failure: Callable[[str, Exception], None] | None = None,
59
- callback_success: Callable[[str], None] | None = None,
60
- callback_final: Callable[[str], None] | None = None,
61
- sleep: Duration = _SLEEP,
62
- ) -> None:
63
- QueueProcessor.__init__( # QueueProcessor first
64
- self, queue_type=queue_type, queue_max_size=queue_max_size
65
- )
66
- QueueProcessor.__post_init__(self)
67
- Handler.__init__(self, level=level)
68
- self.url = url
69
- self.sender = sender
70
- self.timeout = timeout
71
- self.callback_failure = callback_failure
72
- self.callback_success = callback_success
73
- self.callback_final = callback_final
74
- self.sleep = sleep
75
-
76
- @override
77
- def emit(self, record: LogRecord) -> None:
78
- try:
79
- self.enqueue(self.format(record))
80
- except Exception: # noqa: BLE001 # pragma: no cover
81
- self.handleError(record)
82
-
83
- @override
84
- async def _process_item(self, item: str, /) -> None:
85
- """Process the first item."""
86
- items = list(chain([item], self._get_items_nowait()))
87
- text = "\n".join(items)
88
- try:
89
- async with timeout_dur(duration=self.timeout):
90
- await self.sender(self.url, text)
91
- except Exception as error: # noqa: BLE001
92
- if self.callback_failure is not None:
93
- self.callback_failure(text, error)
94
- else:
95
- if self.callback_success is not None:
96
- self.callback_success(text)
97
- finally:
98
- if self.callback_final is not None:
99
- self.callback_final(text)
100
- await sleep_dur(duration=self.sleep)
101
-
102
-
103
37
  @dataclass(init=False, unsafe_hash=True)
104
38
  class SlackHandlerIQL(Handler, InfiniteQueueLooper[None, str]):
105
39
  """Handler for sending messages to Slack."""
@@ -176,4 +110,4 @@ def _get_client(url: str, /, *, timeout: Duration = _TIMEOUT) -> AsyncWebhookCli
176
110
  return AsyncWebhookClient(url, timeout=timeout_use)
177
111
 
178
112
 
179
- __all__ = ["SendToSlackError", "SlackHandler", "SlackHandlerIQL", "send_to_slack"]
113
+ __all__ = ["SendToSlackError", "SlackHandlerIQL", "send_to_slack"]
utilities/sqlalchemy.py CHANGED
@@ -57,7 +57,7 @@ from sqlalchemy.orm import (
57
57
  from sqlalchemy.orm.exc import UnmappedClassError
58
58
  from sqlalchemy.pool import NullPool, Pool
59
59
 
60
- from utilities.asyncio import InfiniteQueueLooper, QueueProcessor, timeout_dur
60
+ from utilities.asyncio import InfiniteQueueLooper, timeout_dur
61
61
  from utilities.functions import (
62
62
  ensure_str,
63
63
  get_class_name,
@@ -608,48 +608,6 @@ class TablenameMixin:
608
608
  ##
609
609
 
610
610
 
611
- @dataclass(kw_only=True)
612
- class Upserter(QueueProcessor[_InsertItem]):
613
- """Upsert a set of items into a database."""
614
-
615
- engine: AsyncEngine
616
- snake: bool = False
617
- selected_or_all: _SelectedOrAll = "selected"
618
- chunk_size_frac: float = CHUNK_SIZE_FRAC
619
- assume_tables_exist: bool = False
620
- timeout_create: Duration | None = None
621
- error_create: type[Exception] = TimeoutError
622
- timeout_insert: Duration | None = None
623
- error_insert: type[Exception] = TimeoutError
624
-
625
- async def _pre_upsert(self, items: Sequence[_InsertItem], /) -> None:
626
- """Pre-upsert coroutine."""
627
- _ = items
628
-
629
- async def _post_upsert(self, items: Sequence[_InsertItem], /) -> None:
630
- """Post-upsert coroutine."""
631
- _ = items
632
-
633
- @override
634
- async def _process_item(self, item: _InsertItem, /) -> None:
635
- """Process the first item."""
636
- items = list(chain([item], self._get_items_nowait()))
637
- await self._pre_upsert(items)
638
- await upsert_items(
639
- self.engine,
640
- *items,
641
- snake=self.snake,
642
- selected_or_all=self.selected_or_all,
643
- chunk_size_frac=self.chunk_size_frac,
644
- assume_tables_exist=self.assume_tables_exist,
645
- timeout_create=self.timeout_create,
646
- error_create=self.error_create,
647
- timeout_insert=self.timeout_insert,
648
- error_insert=self.error_insert,
649
- )
650
- await self._post_upsert(items)
651
-
652
-
653
611
  @dataclass(kw_only=True)
654
612
  class UpserterIQL(InfiniteQueueLooper[None, _InsertItem]):
655
613
  """Upsert a set of items to a database."""
@@ -1150,7 +1108,6 @@ __all__ = [
1150
1108
  "InsertItemsError",
1151
1109
  "TablenameMixin",
1152
1110
  "UpsertItemsError",
1153
- "Upserter",
1154
1111
  "UpserterIQL",
1155
1112
  "UpserterIQLError",
1156
1113
  "check_engine",