dycw-utilities 0.129.10__py3-none-any.whl → 0.175.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. dycw_utilities-0.175.17.dist-info/METADATA +34 -0
  2. dycw_utilities-0.175.17.dist-info/RECORD +103 -0
  3. dycw_utilities-0.175.17.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.175.17.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +14 -14
  7. utilities/asyncio.py +350 -819
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +77 -22
  10. utilities/cachetools.py +24 -29
  11. utilities/click.py +393 -237
  12. utilities/concurrent.py +8 -11
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +83 -118
  17. utilities/docker.py +293 -0
  18. utilities/enum.py +26 -23
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +29 -65
  21. utilities/fpdf2.py +3 -3
  22. utilities/functions.py +169 -416
  23. utilities/functools.py +18 -19
  24. utilities/git.py +9 -30
  25. utilities/grp.py +28 -0
  26. utilities/gzip.py +31 -0
  27. utilities/http.py +3 -2
  28. utilities/hypothesis.py +738 -589
  29. utilities/importlib.py +17 -1
  30. utilities/inflect.py +25 -0
  31. utilities/iterables.py +194 -262
  32. utilities/jinja2.py +148 -0
  33. utilities/json.py +70 -0
  34. utilities/libcst.py +38 -17
  35. utilities/lightweight_charts.py +5 -9
  36. utilities/logging.py +345 -543
  37. utilities/math.py +18 -13
  38. utilities/memory_profiler.py +11 -15
  39. utilities/more_itertools.py +200 -131
  40. utilities/operator.py +33 -29
  41. utilities/optuna.py +6 -6
  42. utilities/orjson.py +272 -137
  43. utilities/os.py +61 -4
  44. utilities/parse.py +59 -61
  45. utilities/pathlib.py +281 -40
  46. utilities/permissions.py +298 -0
  47. utilities/pickle.py +2 -2
  48. utilities/platform.py +24 -5
  49. utilities/polars.py +1214 -430
  50. utilities/polars_ols.py +1 -1
  51. utilities/postgres.py +408 -0
  52. utilities/pottery.py +113 -26
  53. utilities/pqdm.py +10 -11
  54. utilities/psutil.py +6 -57
  55. utilities/pwd.py +28 -0
  56. utilities/pydantic.py +4 -54
  57. utilities/pydantic_settings.py +240 -0
  58. utilities/pydantic_settings_sops.py +76 -0
  59. utilities/pyinstrument.py +8 -10
  60. utilities/pytest.py +227 -121
  61. utilities/pytest_plugins/__init__.py +1 -0
  62. utilities/pytest_plugins/pytest_randomly.py +23 -0
  63. utilities/pytest_plugins/pytest_regressions.py +56 -0
  64. utilities/pytest_regressions.py +26 -46
  65. utilities/random.py +13 -9
  66. utilities/re.py +58 -28
  67. utilities/redis.py +401 -550
  68. utilities/scipy.py +1 -1
  69. utilities/sentinel.py +10 -0
  70. utilities/shelve.py +4 -1
  71. utilities/shutil.py +25 -0
  72. utilities/slack_sdk.py +36 -106
  73. utilities/sqlalchemy.py +502 -473
  74. utilities/sqlalchemy_polars.py +38 -94
  75. utilities/string.py +2 -3
  76. utilities/subprocess.py +1572 -0
  77. utilities/tempfile.py +86 -4
  78. utilities/testbook.py +50 -0
  79. utilities/text.py +165 -42
  80. utilities/timer.py +37 -65
  81. utilities/traceback.py +158 -929
  82. utilities/types.py +146 -116
  83. utilities/typing.py +531 -71
  84. utilities/tzdata.py +1 -53
  85. utilities/tzlocal.py +6 -23
  86. utilities/uuid.py +43 -5
  87. utilities/version.py +27 -26
  88. utilities/whenever.py +1776 -386
  89. utilities/zoneinfo.py +84 -22
  90. dycw_utilities-0.129.10.dist-info/METADATA +0 -241
  91. dycw_utilities-0.129.10.dist-info/RECORD +0 -96
  92. dycw_utilities-0.129.10.dist-info/WHEEL +0 -4
  93. dycw_utilities-0.129.10.dist-info/licenses/LICENSE +0 -21
  94. utilities/datetime.py +0 -1409
  95. utilities/eventkit.py +0 -402
  96. utilities/loguru.py +0 -144
  97. utilities/luigi.py +0 -228
  98. utilities/period.py +0 -324
  99. utilities/pyrsistent.py +0 -89
  100. utilities/python_dotenv.py +0 -105
  101. utilities/streamlit.py +0 -105
  102. utilities/sys.py +0 -87
  103. utilities/tenacity.py +0 -145
utilities/polars_ols.py CHANGED
@@ -6,8 +6,8 @@ from polars import Expr, Series, struct
6
6
  from polars_ols import RollingKwargs, compute_rolling_least_squares
7
7
 
8
8
  from utilities.errors import ImpossibleCaseError
9
- from utilities.functions import is_sequence_of
10
9
  from utilities.polars import concat_series, ensure_expr_or_series
10
+ from utilities.typing import is_sequence_of
11
11
 
12
12
  if TYPE_CHECKING:
13
13
  from polars._typing import IntoExprColumn
utilities/postgres.py ADDED
@@ -0,0 +1,408 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from pathlib import Path
5
+ from shutil import rmtree
6
+ from typing import TYPE_CHECKING, Literal, assert_never, override
7
+
8
+ from sqlalchemy import Table
9
+ from sqlalchemy.orm import DeclarativeBase
10
+
11
+ from utilities.asyncio import stream_command
12
+ from utilities.docker import docker_exec_cmd
13
+ from utilities.iterables import always_iterable
14
+ from utilities.logging import to_logger
15
+ from utilities.os import temp_environ
16
+ from utilities.pathlib import ensure_suffix
17
+ from utilities.sqlalchemy import extract_url, get_table_name
18
+ from utilities.timer import Timer
19
+ from utilities.types import PathLike
20
+
21
+ if TYPE_CHECKING:
22
+ from sqlalchemy import URL
23
+
24
+ from utilities.sqlalchemy import TableOrORMInstOrClass
25
+ from utilities.types import (
26
+ LoggerLike,
27
+ MaybeCollection,
28
+ MaybeCollectionStr,
29
+ PathLike,
30
+ )
31
+
32
+
33
+ type _PGDumpFormat = Literal["plain", "custom", "directory", "tar"]
34
+
35
+
36
+ async def pg_dump(
37
+ url: URL,
38
+ path: PathLike,
39
+ /,
40
+ *,
41
+ docker_container: str | None = None,
42
+ format_: _PGDumpFormat = "plain",
43
+ jobs: int | None = None,
44
+ data_only: bool = False,
45
+ clean: bool = False,
46
+ create: bool = False,
47
+ extension: MaybeCollectionStr | None = None,
48
+ extension_exc: MaybeCollectionStr | None = None,
49
+ schema: MaybeCollectionStr | None = None,
50
+ schema_exc: MaybeCollectionStr | None = None,
51
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
52
+ table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
53
+ inserts: bool = False,
54
+ on_conflict_do_nothing: bool = False,
55
+ role: str | None = None,
56
+ dry_run: bool = False,
57
+ logger: LoggerLike | None = None,
58
+ ) -> bool:
59
+ """Run `pg_dump`."""
60
+ path = _path_pg_dump(path, format_=format_)
61
+ path.parent.mkdir(parents=True, exist_ok=True)
62
+ cmd = _build_pg_dump(
63
+ url,
64
+ path,
65
+ docker_container=docker_container,
66
+ format_=format_,
67
+ jobs=jobs,
68
+ data_only=data_only,
69
+ clean=clean,
70
+ create=create,
71
+ extension=extension,
72
+ extension_exc=extension_exc,
73
+ schema=schema,
74
+ schema_exc=schema_exc,
75
+ table=table,
76
+ table_exc=table_exc,
77
+ inserts=inserts,
78
+ on_conflict_do_nothing=on_conflict_do_nothing,
79
+ role=role,
80
+ )
81
+ if dry_run:
82
+ if logger is not None:
83
+ to_logger(logger).info("Would run:\n\t%r", str(cmd))
84
+ return True
85
+ with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
86
+ try:
87
+ output = await stream_command(cmd)
88
+ except KeyboardInterrupt:
89
+ if logger is not None:
90
+ to_logger(logger).info(
91
+ "Cancelled backup to %r after %s", str(path), timer
92
+ )
93
+ rmtree(path, ignore_errors=True)
94
+ return False
95
+ if output.return_code != 0:
96
+ if logger is not None:
97
+ to_logger(logger).exception(
98
+ "Backup to %r failed after %s\nstderr:\n%s",
99
+ str(path),
100
+ timer,
101
+ output.stderr,
102
+ )
103
+ rmtree(path, ignore_errors=True)
104
+ return False
105
+ if logger is not None: # pragma: no cover
106
+ to_logger(logger).info("Backup to %r finished after %s", str(path), timer)
107
+ return True # pragma: no cover
108
+
109
+
110
+ def _build_pg_dump(
111
+ url: URL,
112
+ path: PathLike,
113
+ /,
114
+ *,
115
+ docker_container: str | None = None,
116
+ format_: _PGDumpFormat = "plain",
117
+ jobs: int | None = None,
118
+ data_only: bool = False,
119
+ clean: bool = False,
120
+ create: bool = False,
121
+ extension: MaybeCollectionStr | None = None,
122
+ extension_exc: MaybeCollectionStr | None = None,
123
+ schema: MaybeCollectionStr | None = None,
124
+ schema_exc: MaybeCollectionStr | None = None,
125
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
126
+ table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
127
+ inserts: bool = False,
128
+ on_conflict_do_nothing: bool = False,
129
+ role: str | None = None,
130
+ ) -> str:
131
+ extracted = extract_url(url)
132
+ path = _path_pg_dump(path, format_=format_)
133
+ parts: list[str] = ["pg_dump"]
134
+ if docker_container is not None:
135
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
136
+ parts.extend([
137
+ # general options
138
+ f"--file={str(path)!r}",
139
+ f"--format={format_}",
140
+ "--verbose",
141
+ # output options
142
+ *_resolve_data_only_and_clean(data_only=data_only, clean=clean),
143
+ "--large-objects",
144
+ "--no-owner",
145
+ "--no-privileges",
146
+ # connection options
147
+ f"--dbname={extracted.database}",
148
+ f"--host={extracted.host}",
149
+ f"--port={extracted.port}",
150
+ f"--username={extracted.username}",
151
+ "--no-password",
152
+ ])
153
+ if (format_ == "directory") and (jobs is not None):
154
+ parts.append(f"--jobs={jobs}")
155
+ if create:
156
+ parts.append("--create")
157
+ if extension is not None:
158
+ parts.extend([f"--extension={e}" for e in always_iterable(extension)])
159
+ if extension_exc is not None:
160
+ parts.extend([
161
+ f"--exclude-extension={e}" for e in always_iterable(extension_exc)
162
+ ])
163
+ if schema is not None:
164
+ parts.extend([f"--schema={s}" for s in always_iterable(schema)])
165
+ if schema_exc is not None:
166
+ parts.extend([f"--exclude-schema={s}" for s in always_iterable(schema_exc)])
167
+ if table is not None:
168
+ parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(table)])
169
+ if table_exc is not None:
170
+ parts.extend([
171
+ f"--exclude-table={_get_table_name(t)}" for t in always_iterable(table_exc)
172
+ ])
173
+ if inserts:
174
+ parts.append("--inserts")
175
+ if on_conflict_do_nothing:
176
+ parts.append("--on-conflict-do-nothing")
177
+ if role is not None:
178
+ parts.append(f"--role={role}")
179
+ return " ".join(parts)
180
+
181
+
182
+ def _path_pg_dump(path: PathLike, /, *, format_: _PGDumpFormat = "plain") -> Path:
183
+ match format_:
184
+ case "plain":
185
+ suffix = ".sql"
186
+ case "custom":
187
+ suffix = ".pgdump"
188
+ case "directory":
189
+ suffix = None
190
+ case "tar":
191
+ suffix = ".tar"
192
+ case never:
193
+ assert_never(never)
194
+ path = Path(path)
195
+ if suffix is not None:
196
+ path = ensure_suffix(path, suffix)
197
+ return path
198
+
199
+
200
+ ##
201
+
202
+
203
+ async def restore(
204
+ url: URL,
205
+ path: PathLike,
206
+ /,
207
+ *,
208
+ psql: bool = False,
209
+ data_only: bool = False,
210
+ clean: bool = False,
211
+ create: bool = False,
212
+ jobs: int | None = None,
213
+ schema: MaybeCollectionStr | None = None,
214
+ schema_exc: MaybeCollectionStr | None = None,
215
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
216
+ role: str | None = None,
217
+ docker_container: str | None = None,
218
+ dry_run: bool = False,
219
+ logger: LoggerLike | None = None,
220
+ ) -> bool:
221
+ """Run `pg_restore`/`psql`."""
222
+ cmd = _build_pg_restore_or_psql(
223
+ url,
224
+ path,
225
+ psql=psql,
226
+ data_only=data_only,
227
+ clean=clean,
228
+ create=create,
229
+ jobs=jobs,
230
+ schema=schema,
231
+ schema_exc=schema_exc,
232
+ table=table,
233
+ role=role,
234
+ docker_container=docker_container,
235
+ )
236
+ if dry_run:
237
+ if logger is not None:
238
+ to_logger(logger).info("Would run:\n\t%r", str(cmd))
239
+ return True
240
+ with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
241
+ try:
242
+ output = await stream_command(cmd)
243
+ except KeyboardInterrupt:
244
+ if logger is not None:
245
+ to_logger(logger).info(
246
+ "Cancelled restore from %r after %s", str(path), timer
247
+ )
248
+ return False
249
+ if output.return_code != 0:
250
+ if logger is not None:
251
+ to_logger(logger).exception(
252
+ "Restore from %r failed after %s\nstderr:\n%s",
253
+ str(path),
254
+ timer,
255
+ output.stderr,
256
+ )
257
+ return False
258
+ if logger is not None: # pragma: no cover
259
+ to_logger(logger).info("Restore from %r finished after %s", str(path), timer)
260
+ return True # pragma: no cover
261
+
262
+
263
+ ##
264
+
265
+
266
+ def _build_pg_restore_or_psql(
267
+ url: URL,
268
+ path: PathLike,
269
+ /,
270
+ *,
271
+ psql: bool = False,
272
+ data_only: bool = False,
273
+ clean: bool = False,
274
+ create: bool = False,
275
+ jobs: int | None = None,
276
+ schema: MaybeCollectionStr | None = None,
277
+ schema_exc: MaybeCollectionStr | None = None,
278
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
279
+ role: str | None = None,
280
+ docker_container: str | None = None,
281
+ ) -> str:
282
+ path = Path(path)
283
+ if (path.suffix == ".sql") or psql:
284
+ return _build_psql(url, path, docker_container=docker_container)
285
+ return _build_pg_restore(
286
+ url,
287
+ path,
288
+ data_only=data_only,
289
+ clean=clean,
290
+ create=create,
291
+ jobs=jobs,
292
+ schemas=schema,
293
+ schemas_exc=schema_exc,
294
+ tables=table,
295
+ role=role,
296
+ docker_container=docker_container,
297
+ )
298
+
299
+
300
+ def _build_pg_restore(
301
+ url: URL,
302
+ path: PathLike,
303
+ /,
304
+ *,
305
+ data_only: bool = False,
306
+ clean: bool = False,
307
+ create: bool = False,
308
+ jobs: int | None = None,
309
+ schemas: MaybeCollectionStr | None = None,
310
+ schemas_exc: MaybeCollectionStr | None = None,
311
+ tables: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
312
+ role: str | None = None,
313
+ docker_container: str | None = None,
314
+ ) -> str:
315
+ """Run `pg_restore`."""
316
+ extracted = extract_url(url)
317
+ parts: list[str] = ["pg_restore"]
318
+ if docker_container is not None:
319
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
320
+ parts.extend([
321
+ # general options
322
+ "--verbose",
323
+ # restore options
324
+ *_resolve_data_only_and_clean(data_only=data_only, clean=clean),
325
+ "--exit-on-error",
326
+ "--no-owner",
327
+ "--no-privileges",
328
+ # connection options
329
+ f"--host={extracted.host}",
330
+ f"--port={extracted.port}",
331
+ f"--username={extracted.username}",
332
+ f"--dbname={extracted.database}",
333
+ "--no-password",
334
+ ])
335
+ if create:
336
+ parts.append("--create")
337
+ if jobs is not None:
338
+ parts.append(f"--jobs={jobs}")
339
+ if schemas is not None:
340
+ parts.extend([f"--schema={s}" for s in always_iterable(schemas)])
341
+ if schemas_exc is not None:
342
+ parts.extend([f"--exclude-schema={s}" for s in always_iterable(schemas_exc)])
343
+ if tables is not None:
344
+ parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(tables)])
345
+ if role is not None:
346
+ parts.append(f"--role={role}")
347
+ parts.append(str(path))
348
+ return " ".join(parts)
349
+
350
+
351
+ def _build_psql(
352
+ url: URL, path: PathLike, /, *, docker_container: str | None = None
353
+ ) -> str:
354
+ """Run `psql`."""
355
+ extracted = extract_url(url)
356
+ parts: list[str] = ["psql"]
357
+ if docker_container is not None:
358
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
359
+ parts.extend([
360
+ # general options
361
+ f"--dbname={extracted.database}",
362
+ f"--file={str(path)!r}",
363
+ # connection options
364
+ f"--host={extracted.host}",
365
+ f"--port={extracted.port}",
366
+ f"--username={extracted.username}",
367
+ "--no-password",
368
+ ])
369
+ return " ".join(parts)
370
+
371
+
372
+ ##
373
+
374
+
375
+ def _get_table_name(obj: TableOrORMInstOrClass | str, /) -> str:
376
+ match obj:
377
+ case Table() | DeclarativeBase() | type() as table_or_orm:
378
+ return get_table_name(table_or_orm)
379
+ case str() as name:
380
+ return name
381
+ case never:
382
+ assert_never(never)
383
+
384
+
385
+ def _resolve_data_only_and_clean(
386
+ *, data_only: bool = False, clean: bool = False
387
+ ) -> list[str]:
388
+ match data_only, clean:
389
+ case False, False:
390
+ return []
391
+ case True, False:
392
+ return ["--data-only"]
393
+ case False, True:
394
+ return ["--clean", "--if-exists"]
395
+ case True, True:
396
+ raise _ResolveDataOnlyAndCleanError
397
+ case never:
398
+ assert_never(never)
399
+
400
+
401
+ @dataclass(kw_only=True, slots=True)
402
+ class _ResolveDataOnlyAndCleanError(Exception):
403
+ @override
404
+ def __str__(self) -> str:
405
+ return "Cannot use '--data-only' and '--clean' together"
406
+
407
+
408
+ __all__ = ["pg_dump", "restore"]
utilities/pottery.py CHANGED
@@ -1,50 +1,137 @@
1
1
  from __future__ import annotations
2
2
 
3
- import asyncio
4
- from contextlib import asynccontextmanager, suppress
5
- from typing import TYPE_CHECKING
3
+ from contextlib import suppress
4
+ from dataclasses import dataclass
5
+ from sys import maxsize
6
+ from typing import TYPE_CHECKING, override
6
7
 
7
8
  from pottery import AIORedlock
8
9
  from pottery.exceptions import ReleaseUnlockedLock
9
10
  from redis.asyncio import Redis
10
11
 
11
- from utilities.datetime import MILLISECOND, SECOND, datetime_duration_to_float
12
+ from utilities.asyncio import sleep_td, timeout_td
13
+ from utilities.contextlib import enhanced_async_context_manager
12
14
  from utilities.iterables import always_iterable
15
+ from utilities.whenever import MILLISECOND, SECOND, to_nanoseconds
13
16
 
14
17
  if TYPE_CHECKING:
15
- from collections.abc import AsyncIterator
18
+ from collections.abc import AsyncIterator, Iterable
16
19
 
17
- from utilities.types import Duration, MaybeIterable
20
+ from whenever import Delta
18
21
 
22
+ from utilities.types import MaybeIterable
19
23
 
20
- @asynccontextmanager
21
- async def yield_locked_resource(
24
+ _NUM: int = 1
25
+ _TIMEOUT_RELEASE: Delta = 10 * SECOND
26
+ _SLEEP: Delta = MILLISECOND
27
+
28
+
29
+ ##
30
+
31
+
32
+ async def extend_lock(
33
+ *, lock: AIORedlock | None = None, raise_on_redis_errors: bool | None = None
34
+ ) -> None:
35
+ """Extend a lock."""
36
+ if lock is not None:
37
+ await lock.extend(raise_on_redis_errors=raise_on_redis_errors)
38
+
39
+
40
+ ##
41
+
42
+
43
+ @enhanced_async_context_manager
44
+ async def yield_access(
22
45
  redis: MaybeIterable[Redis],
23
46
  key: str,
24
47
  /,
25
48
  *,
26
- duration: Duration = 10 * SECOND,
27
- sleep: Duration = MILLISECOND,
28
- ) -> AsyncIterator[None]:
29
- """Yield a locked resource."""
49
+ num: int = _NUM,
50
+ timeout_release: Delta = _TIMEOUT_RELEASE,
51
+ num_extensions: int | None = None,
52
+ timeout_acquire: Delta | None = None,
53
+ sleep: Delta = _SLEEP,
54
+ throttle: Delta | None = None,
55
+ ) -> AsyncIterator[AIORedlock]:
56
+ """Acquire access to a locked resource."""
57
+ if num <= 0:
58
+ raise _YieldAccessNumLocksError(key=key, num=num)
30
59
  masters = ( # skipif-ci-and-not-linux
31
60
  {redis} if isinstance(redis, Redis) else set(always_iterable(redis))
32
61
  )
33
- duration_use = datetime_duration_to_float(duration) # skipif-ci-and-not-linux
34
- lock = AIORedlock( # skipif-ci-and-not-linux
35
- key=key,
36
- masters=masters,
37
- auto_release_time=duration_use,
38
- context_manager_timeout=duration_use,
39
- )
40
- sleep_use = datetime_duration_to_float(sleep) # skipif-ci-and-not-linux
41
- while not await lock.acquire(): # pragma: no cover
42
- _ = await asyncio.sleep(sleep_use)
62
+ locks = [ # skipif-ci-and-not-linux
63
+ AIORedlock(
64
+ key=f"{key}_{i}_of_{num}",
65
+ masters=masters,
66
+ auto_release_time=to_nanoseconds(timeout_release) / 1e9,
67
+ num_extensions=maxsize if num_extensions is None else num_extensions,
68
+ )
69
+ for i in range(1, num + 1)
70
+ ]
71
+ lock: AIORedlock | None = None # skipif-ci-and-not-linux
43
72
  try: # skipif-ci-and-not-linux
44
- yield
73
+ lock = await _get_first_available_lock(
74
+ key, locks, num=num, timeout=timeout_acquire, sleep=sleep
75
+ )
76
+ yield lock
45
77
  finally: # skipif-ci-and-not-linux
46
- with suppress(ReleaseUnlockedLock):
47
- await lock.release()
78
+ await sleep_td(throttle)
79
+ if lock is not None:
80
+ with suppress(ReleaseUnlockedLock):
81
+ await lock.release()
82
+
83
+
84
+ async def _get_first_available_lock(
85
+ key: str,
86
+ locks: Iterable[AIORedlock],
87
+ /,
88
+ *,
89
+ num: int = _NUM,
90
+ timeout: Delta | None = None,
91
+ sleep: Delta | None = _SLEEP,
92
+ ) -> AIORedlock:
93
+ locks = list(locks) # skipif-ci-and-not-linux
94
+ error = _YieldAccessUnableToAcquireLockError( # skipif-ci-and-not-linux
95
+ key=key, num=num, timeout=timeout
96
+ )
97
+ async with timeout_td(timeout, error=error): # skipif-ci-and-not-linux
98
+ while True:
99
+ if (result := await _get_first_available_lock_if_any(locks)) is not None:
100
+ return result
101
+ await sleep_td(sleep)
102
+
103
+
104
+ async def _get_first_available_lock_if_any(
105
+ locks: Iterable[AIORedlock], /
106
+ ) -> AIORedlock | None:
107
+ for lock in locks: # skipif-ci-and-not-linux
108
+ if await lock.acquire(blocking=False):
109
+ return lock
110
+ return None # skipif-ci-and-not-linux
111
+
112
+
113
+ @dataclass(kw_only=True, slots=True)
114
+ class YieldAccessError(Exception):
115
+ key: str
116
+
117
+
118
+ @dataclass(kw_only=True, slots=True)
119
+ class _YieldAccessNumLocksError(YieldAccessError):
120
+ num: int
121
+
122
+ @override
123
+ def __str__(self) -> str:
124
+ return f"Number of locks for {self.key!r} must be positive; got {self.num}"
125
+
126
+
127
+ @dataclass(kw_only=True, slots=True)
128
+ class _YieldAccessUnableToAcquireLockError(YieldAccessError):
129
+ num: int
130
+ timeout: Delta | None
131
+
132
+ @override
133
+ def __str__(self) -> str:
134
+ return f"Unable to acquire any 1 of {self.num} locks for {self.key!r} after {self.timeout}" # skipif-ci-and-not-linux
48
135
 
49
136
 
50
- __all__ = ["yield_locked_resource"]
137
+ __all__ = ["YieldAccessError", "extend_lock", "yield_access"]
utilities/pqdm.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from functools import partial
4
- from typing import TYPE_CHECKING, Any, Literal, TypeVar, assert_never
4
+ from typing import TYPE_CHECKING, Any, Literal, assert_never
5
5
 
6
6
  from pqdm import processes, threads
7
7
  from tqdm.auto import tqdm as tqdm_auto
@@ -9,7 +9,7 @@ from tqdm.auto import tqdm as tqdm_auto
9
9
  from utilities.functions import get_func_name
10
10
  from utilities.iterables import apply_to_varargs
11
11
  from utilities.os import get_cpu_use
12
- from utilities.sentinel import Sentinel, sentinel
12
+ from utilities.sentinel import Sentinel, is_sentinel, sentinel
13
13
 
14
14
  if TYPE_CHECKING:
15
15
  from collections.abc import Callable, Iterable
@@ -20,12 +20,11 @@ if TYPE_CHECKING:
20
20
  from utilities.types import Parallelism
21
21
 
22
22
 
23
- _T = TypeVar("_T")
24
23
  type _ExceptionBehaviour = Literal["ignore", "immediate", "deferred"]
25
24
 
26
25
 
27
- def pqdm_map(
28
- func: Callable[..., _T],
26
+ def pqdm_map[T](
27
+ func: Callable[..., T],
29
28
  /,
30
29
  *iterables: Iterable[Any],
31
30
  parallelism: Parallelism = "processes",
@@ -35,7 +34,7 @@ def pqdm_map(
35
34
  tqdm_class: tqdm_type = tqdm_auto, # pyright: ignore[reportArgumentType]
36
35
  desc: str | None | Sentinel = sentinel,
37
36
  **kwargs: Any,
38
- ) -> list[_T]:
37
+ ) -> list[T]:
39
38
  """Parallel map, powered by `pqdm`."""
40
39
  return pqdm_starmap(
41
40
  func,
@@ -50,8 +49,8 @@ def pqdm_map(
50
49
  )
51
50
 
52
51
 
53
- def pqdm_starmap(
54
- func: Callable[..., _T],
52
+ def pqdm_starmap[T](
53
+ func: Callable[..., T],
55
54
  iterable: Iterable[tuple[Any, ...]],
56
55
  /,
57
56
  *,
@@ -62,7 +61,7 @@ def pqdm_starmap(
62
61
  tqdm_class: tqdm_type = tqdm_auto, # pyright: ignore[reportArgumentType]
63
62
  desc: str | None | Sentinel = sentinel,
64
63
  **kwargs: Any,
65
- ) -> list[_T]:
64
+ ) -> list[T]:
66
65
  """Parallel starmap, powered by `pqdm`."""
67
66
  apply = partial(apply_to_varargs, func)
68
67
  n_jobs_use = get_cpu_use(n=n_jobs)
@@ -91,7 +90,7 @@ def pqdm_starmap(
91
90
  **_get_desc(desc, func),
92
91
  **kwargs,
93
92
  )
94
- case _ as never:
93
+ case never:
95
94
  assert_never(never)
96
95
  return list(result)
97
96
 
@@ -99,7 +98,7 @@ def pqdm_starmap(
99
98
  def _get_desc(
100
99
  desc: str | None | Sentinel, func: Callable[..., Any], /
101
100
  ) -> dict[str, str]:
102
- desc_use = get_func_name(func) if isinstance(desc, Sentinel) else desc
101
+ desc_use = get_func_name(func) if is_sentinel(desc) else desc
103
102
  return {} if desc_use is None else {"desc": desc_use}
104
103
 
105
104