dycw-utilities 0.135.0__py3-none-any.whl → 0.178.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dycw-utilities might be problematic. Click here for more details.
- dycw_utilities-0.178.1.dist-info/METADATA +34 -0
- dycw_utilities-0.178.1.dist-info/RECORD +105 -0
- dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
- dycw_utilities-0.178.1.dist-info/entry_points.txt +4 -0
- utilities/__init__.py +1 -1
- utilities/altair.py +13 -10
- utilities/asyncio.py +312 -787
- utilities/atomicwrites.py +18 -6
- utilities/atools.py +64 -4
- utilities/cachetools.py +9 -6
- utilities/click.py +195 -77
- utilities/concurrent.py +1 -1
- utilities/contextlib.py +216 -17
- utilities/contextvars.py +20 -1
- utilities/cryptography.py +3 -3
- utilities/dataclasses.py +15 -28
- utilities/docker.py +387 -0
- utilities/enum.py +2 -2
- utilities/errors.py +17 -3
- utilities/fastapi.py +28 -59
- utilities/fpdf2.py +2 -2
- utilities/functions.py +24 -269
- utilities/git.py +9 -30
- utilities/grp.py +28 -0
- utilities/gzip.py +31 -0
- utilities/http.py +3 -2
- utilities/hypothesis.py +513 -159
- utilities/importlib.py +17 -1
- utilities/inflect.py +12 -4
- utilities/iterables.py +33 -58
- utilities/jinja2.py +148 -0
- utilities/json.py +70 -0
- utilities/libcst.py +38 -17
- utilities/lightweight_charts.py +4 -7
- utilities/logging.py +136 -93
- utilities/math.py +8 -4
- utilities/more_itertools.py +43 -45
- utilities/operator.py +27 -27
- utilities/orjson.py +189 -36
- utilities/os.py +61 -4
- utilities/packaging.py +115 -0
- utilities/parse.py +8 -5
- utilities/pathlib.py +269 -40
- utilities/permissions.py +298 -0
- utilities/platform.py +7 -6
- utilities/polars.py +1205 -413
- utilities/polars_ols.py +1 -1
- utilities/postgres.py +408 -0
- utilities/pottery.py +43 -19
- utilities/pqdm.py +3 -3
- utilities/psutil.py +5 -57
- utilities/pwd.py +28 -0
- utilities/pydantic.py +4 -52
- utilities/pydantic_settings.py +240 -0
- utilities/pydantic_settings_sops.py +76 -0
- utilities/pyinstrument.py +7 -7
- utilities/pytest.py +104 -143
- utilities/pytest_plugins/__init__.py +1 -0
- utilities/pytest_plugins/pytest_randomly.py +23 -0
- utilities/pytest_plugins/pytest_regressions.py +56 -0
- utilities/pytest_regressions.py +26 -46
- utilities/random.py +11 -6
- utilities/re.py +1 -1
- utilities/redis.py +220 -343
- utilities/sentinel.py +10 -0
- utilities/shelve.py +4 -1
- utilities/shutil.py +25 -0
- utilities/slack_sdk.py +35 -104
- utilities/sqlalchemy.py +496 -471
- utilities/sqlalchemy_polars.py +29 -54
- utilities/string.py +2 -3
- utilities/subprocess.py +1977 -0
- utilities/tempfile.py +112 -4
- utilities/testbook.py +50 -0
- utilities/text.py +174 -42
- utilities/throttle.py +158 -0
- utilities/timer.py +2 -2
- utilities/traceback.py +70 -35
- utilities/types.py +102 -30
- utilities/typing.py +479 -19
- utilities/uuid.py +42 -5
- utilities/version.py +27 -26
- utilities/whenever.py +1559 -361
- utilities/zoneinfo.py +80 -22
- dycw_utilities-0.135.0.dist-info/METADATA +0 -39
- dycw_utilities-0.135.0.dist-info/RECORD +0 -96
- dycw_utilities-0.135.0.dist-info/WHEEL +0 -4
- dycw_utilities-0.135.0.dist-info/licenses/LICENSE +0 -21
- utilities/aiolimiter.py +0 -25
- utilities/arq.py +0 -216
- utilities/eventkit.py +0 -388
- utilities/luigi.py +0 -183
- utilities/period.py +0 -152
- utilities/pudb.py +0 -62
- utilities/python_dotenv.py +0 -101
- utilities/streamlit.py +0 -105
- utilities/typed_settings.py +0 -123
utilities/polars_ols.py
CHANGED
|
@@ -6,8 +6,8 @@ from polars import Expr, Series, struct
|
|
|
6
6
|
from polars_ols import RollingKwargs, compute_rolling_least_squares
|
|
7
7
|
|
|
8
8
|
from utilities.errors import ImpossibleCaseError
|
|
9
|
-
from utilities.functions import is_sequence_of
|
|
10
9
|
from utilities.polars import concat_series, ensure_expr_or_series
|
|
10
|
+
from utilities.typing import is_sequence_of
|
|
11
11
|
|
|
12
12
|
if TYPE_CHECKING:
|
|
13
13
|
from polars._typing import IntoExprColumn
|
utilities/postgres.py
ADDED
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from shutil import rmtree
|
|
6
|
+
from typing import TYPE_CHECKING, Literal, assert_never, override
|
|
7
|
+
|
|
8
|
+
from sqlalchemy import Table
|
|
9
|
+
from sqlalchemy.orm import DeclarativeBase
|
|
10
|
+
|
|
11
|
+
from utilities.asyncio import stream_command
|
|
12
|
+
from utilities.docker import docker_exec_cmd
|
|
13
|
+
from utilities.iterables import always_iterable
|
|
14
|
+
from utilities.logging import to_logger
|
|
15
|
+
from utilities.os import temp_environ
|
|
16
|
+
from utilities.pathlib import ensure_suffix
|
|
17
|
+
from utilities.sqlalchemy import extract_url, get_table_name
|
|
18
|
+
from utilities.timer import Timer
|
|
19
|
+
from utilities.types import PathLike
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from sqlalchemy import URL
|
|
23
|
+
|
|
24
|
+
from utilities.sqlalchemy import TableOrORMInstOrClass
|
|
25
|
+
from utilities.types import (
|
|
26
|
+
LoggerLike,
|
|
27
|
+
MaybeCollection,
|
|
28
|
+
MaybeCollectionStr,
|
|
29
|
+
PathLike,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
type _PGDumpFormat = Literal["plain", "custom", "directory", "tar"]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
async def pg_dump(
|
|
37
|
+
url: URL,
|
|
38
|
+
path: PathLike,
|
|
39
|
+
/,
|
|
40
|
+
*,
|
|
41
|
+
docker_container: str | None = None,
|
|
42
|
+
format_: _PGDumpFormat = "plain",
|
|
43
|
+
jobs: int | None = None,
|
|
44
|
+
data_only: bool = False,
|
|
45
|
+
clean: bool = False,
|
|
46
|
+
create: bool = False,
|
|
47
|
+
extension: MaybeCollectionStr | None = None,
|
|
48
|
+
extension_exc: MaybeCollectionStr | None = None,
|
|
49
|
+
schema: MaybeCollectionStr | None = None,
|
|
50
|
+
schema_exc: MaybeCollectionStr | None = None,
|
|
51
|
+
table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
52
|
+
table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
53
|
+
inserts: bool = False,
|
|
54
|
+
on_conflict_do_nothing: bool = False,
|
|
55
|
+
role: str | None = None,
|
|
56
|
+
dry_run: bool = False,
|
|
57
|
+
logger: LoggerLike | None = None,
|
|
58
|
+
) -> bool:
|
|
59
|
+
"""Run `pg_dump`."""
|
|
60
|
+
path = _path_pg_dump(path, format_=format_)
|
|
61
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
62
|
+
cmd = _build_pg_dump(
|
|
63
|
+
url,
|
|
64
|
+
path,
|
|
65
|
+
docker_container=docker_container,
|
|
66
|
+
format_=format_,
|
|
67
|
+
jobs=jobs,
|
|
68
|
+
data_only=data_only,
|
|
69
|
+
clean=clean,
|
|
70
|
+
create=create,
|
|
71
|
+
extension=extension,
|
|
72
|
+
extension_exc=extension_exc,
|
|
73
|
+
schema=schema,
|
|
74
|
+
schema_exc=schema_exc,
|
|
75
|
+
table=table,
|
|
76
|
+
table_exc=table_exc,
|
|
77
|
+
inserts=inserts,
|
|
78
|
+
on_conflict_do_nothing=on_conflict_do_nothing,
|
|
79
|
+
role=role,
|
|
80
|
+
)
|
|
81
|
+
if dry_run:
|
|
82
|
+
if logger is not None:
|
|
83
|
+
to_logger(logger).info("Would run:\n\t%r", str(cmd))
|
|
84
|
+
return True
|
|
85
|
+
with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
|
|
86
|
+
try:
|
|
87
|
+
output = await stream_command(cmd)
|
|
88
|
+
except KeyboardInterrupt:
|
|
89
|
+
if logger is not None:
|
|
90
|
+
to_logger(logger).info(
|
|
91
|
+
"Cancelled backup to %r after %s", str(path), timer
|
|
92
|
+
)
|
|
93
|
+
rmtree(path, ignore_errors=True)
|
|
94
|
+
return False
|
|
95
|
+
if output.return_code != 0:
|
|
96
|
+
if logger is not None:
|
|
97
|
+
to_logger(logger).exception(
|
|
98
|
+
"Backup to %r failed after %s\nstderr:\n%s",
|
|
99
|
+
str(path),
|
|
100
|
+
timer,
|
|
101
|
+
output.stderr,
|
|
102
|
+
)
|
|
103
|
+
rmtree(path, ignore_errors=True)
|
|
104
|
+
return False
|
|
105
|
+
if logger is not None: # pragma: no cover
|
|
106
|
+
to_logger(logger).info("Backup to %r finished after %s", str(path), timer)
|
|
107
|
+
return True # pragma: no cover
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _build_pg_dump(
|
|
111
|
+
url: URL,
|
|
112
|
+
path: PathLike,
|
|
113
|
+
/,
|
|
114
|
+
*,
|
|
115
|
+
docker_container: str | None = None,
|
|
116
|
+
format_: _PGDumpFormat = "plain",
|
|
117
|
+
jobs: int | None = None,
|
|
118
|
+
data_only: bool = False,
|
|
119
|
+
clean: bool = False,
|
|
120
|
+
create: bool = False,
|
|
121
|
+
extension: MaybeCollectionStr | None = None,
|
|
122
|
+
extension_exc: MaybeCollectionStr | None = None,
|
|
123
|
+
schema: MaybeCollectionStr | None = None,
|
|
124
|
+
schema_exc: MaybeCollectionStr | None = None,
|
|
125
|
+
table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
126
|
+
table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
127
|
+
inserts: bool = False,
|
|
128
|
+
on_conflict_do_nothing: bool = False,
|
|
129
|
+
role: str | None = None,
|
|
130
|
+
) -> str:
|
|
131
|
+
extracted = extract_url(url)
|
|
132
|
+
path = _path_pg_dump(path, format_=format_)
|
|
133
|
+
parts: list[str] = ["pg_dump"]
|
|
134
|
+
if docker_container is not None:
|
|
135
|
+
parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
|
|
136
|
+
parts.extend([
|
|
137
|
+
# general options
|
|
138
|
+
f"--file={str(path)!r}",
|
|
139
|
+
f"--format={format_}",
|
|
140
|
+
"--verbose",
|
|
141
|
+
# output options
|
|
142
|
+
*_resolve_data_only_and_clean(data_only=data_only, clean=clean),
|
|
143
|
+
"--large-objects",
|
|
144
|
+
"--no-owner",
|
|
145
|
+
"--no-privileges",
|
|
146
|
+
# connection options
|
|
147
|
+
f"--dbname={extracted.database}",
|
|
148
|
+
f"--host={extracted.host}",
|
|
149
|
+
f"--port={extracted.port}",
|
|
150
|
+
f"--username={extracted.username}",
|
|
151
|
+
"--no-password",
|
|
152
|
+
])
|
|
153
|
+
if (format_ == "directory") and (jobs is not None):
|
|
154
|
+
parts.append(f"--jobs={jobs}")
|
|
155
|
+
if create:
|
|
156
|
+
parts.append("--create")
|
|
157
|
+
if extension is not None:
|
|
158
|
+
parts.extend([f"--extension={e}" for e in always_iterable(extension)])
|
|
159
|
+
if extension_exc is not None:
|
|
160
|
+
parts.extend([
|
|
161
|
+
f"--exclude-extension={e}" for e in always_iterable(extension_exc)
|
|
162
|
+
])
|
|
163
|
+
if schema is not None:
|
|
164
|
+
parts.extend([f"--schema={s}" for s in always_iterable(schema)])
|
|
165
|
+
if schema_exc is not None:
|
|
166
|
+
parts.extend([f"--exclude-schema={s}" for s in always_iterable(schema_exc)])
|
|
167
|
+
if table is not None:
|
|
168
|
+
parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(table)])
|
|
169
|
+
if table_exc is not None:
|
|
170
|
+
parts.extend([
|
|
171
|
+
f"--exclude-table={_get_table_name(t)}" for t in always_iterable(table_exc)
|
|
172
|
+
])
|
|
173
|
+
if inserts:
|
|
174
|
+
parts.append("--inserts")
|
|
175
|
+
if on_conflict_do_nothing:
|
|
176
|
+
parts.append("--on-conflict-do-nothing")
|
|
177
|
+
if role is not None:
|
|
178
|
+
parts.append(f"--role={role}")
|
|
179
|
+
return " ".join(parts)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _path_pg_dump(path: PathLike, /, *, format_: _PGDumpFormat = "plain") -> Path:
|
|
183
|
+
match format_:
|
|
184
|
+
case "plain":
|
|
185
|
+
suffix = ".sql"
|
|
186
|
+
case "custom":
|
|
187
|
+
suffix = ".pgdump"
|
|
188
|
+
case "directory":
|
|
189
|
+
suffix = None
|
|
190
|
+
case "tar":
|
|
191
|
+
suffix = ".tar"
|
|
192
|
+
case never:
|
|
193
|
+
assert_never(never)
|
|
194
|
+
path = Path(path)
|
|
195
|
+
if suffix is not None:
|
|
196
|
+
path = ensure_suffix(path, suffix)
|
|
197
|
+
return path
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
##
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
async def restore(
|
|
204
|
+
url: URL,
|
|
205
|
+
path: PathLike,
|
|
206
|
+
/,
|
|
207
|
+
*,
|
|
208
|
+
psql: bool = False,
|
|
209
|
+
data_only: bool = False,
|
|
210
|
+
clean: bool = False,
|
|
211
|
+
create: bool = False,
|
|
212
|
+
jobs: int | None = None,
|
|
213
|
+
schema: MaybeCollectionStr | None = None,
|
|
214
|
+
schema_exc: MaybeCollectionStr | None = None,
|
|
215
|
+
table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
216
|
+
role: str | None = None,
|
|
217
|
+
docker_container: str | None = None,
|
|
218
|
+
dry_run: bool = False,
|
|
219
|
+
logger: LoggerLike | None = None,
|
|
220
|
+
) -> bool:
|
|
221
|
+
"""Run `pg_restore`/`psql`."""
|
|
222
|
+
cmd = _build_pg_restore_or_psql(
|
|
223
|
+
url,
|
|
224
|
+
path,
|
|
225
|
+
psql=psql,
|
|
226
|
+
data_only=data_only,
|
|
227
|
+
clean=clean,
|
|
228
|
+
create=create,
|
|
229
|
+
jobs=jobs,
|
|
230
|
+
schema=schema,
|
|
231
|
+
schema_exc=schema_exc,
|
|
232
|
+
table=table,
|
|
233
|
+
role=role,
|
|
234
|
+
docker_container=docker_container,
|
|
235
|
+
)
|
|
236
|
+
if dry_run:
|
|
237
|
+
if logger is not None:
|
|
238
|
+
to_logger(logger).info("Would run:\n\t%r", str(cmd))
|
|
239
|
+
return True
|
|
240
|
+
with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
|
|
241
|
+
try:
|
|
242
|
+
output = await stream_command(cmd)
|
|
243
|
+
except KeyboardInterrupt:
|
|
244
|
+
if logger is not None:
|
|
245
|
+
to_logger(logger).info(
|
|
246
|
+
"Cancelled restore from %r after %s", str(path), timer
|
|
247
|
+
)
|
|
248
|
+
return False
|
|
249
|
+
if output.return_code != 0:
|
|
250
|
+
if logger is not None:
|
|
251
|
+
to_logger(logger).exception(
|
|
252
|
+
"Restore from %r failed after %s\nstderr:\n%s",
|
|
253
|
+
str(path),
|
|
254
|
+
timer,
|
|
255
|
+
output.stderr,
|
|
256
|
+
)
|
|
257
|
+
return False
|
|
258
|
+
if logger is not None: # pragma: no cover
|
|
259
|
+
to_logger(logger).info("Restore from %r finished after %s", str(path), timer)
|
|
260
|
+
return True # pragma: no cover
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
##
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def _build_pg_restore_or_psql(
|
|
267
|
+
url: URL,
|
|
268
|
+
path: PathLike,
|
|
269
|
+
/,
|
|
270
|
+
*,
|
|
271
|
+
psql: bool = False,
|
|
272
|
+
data_only: bool = False,
|
|
273
|
+
clean: bool = False,
|
|
274
|
+
create: bool = False,
|
|
275
|
+
jobs: int | None = None,
|
|
276
|
+
schema: MaybeCollectionStr | None = None,
|
|
277
|
+
schema_exc: MaybeCollectionStr | None = None,
|
|
278
|
+
table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
279
|
+
role: str | None = None,
|
|
280
|
+
docker_container: str | None = None,
|
|
281
|
+
) -> str:
|
|
282
|
+
path = Path(path)
|
|
283
|
+
if (path.suffix == ".sql") or psql:
|
|
284
|
+
return _build_psql(url, path, docker_container=docker_container)
|
|
285
|
+
return _build_pg_restore(
|
|
286
|
+
url,
|
|
287
|
+
path,
|
|
288
|
+
data_only=data_only,
|
|
289
|
+
clean=clean,
|
|
290
|
+
create=create,
|
|
291
|
+
jobs=jobs,
|
|
292
|
+
schemas=schema,
|
|
293
|
+
schemas_exc=schema_exc,
|
|
294
|
+
tables=table,
|
|
295
|
+
role=role,
|
|
296
|
+
docker_container=docker_container,
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def _build_pg_restore(
|
|
301
|
+
url: URL,
|
|
302
|
+
path: PathLike,
|
|
303
|
+
/,
|
|
304
|
+
*,
|
|
305
|
+
data_only: bool = False,
|
|
306
|
+
clean: bool = False,
|
|
307
|
+
create: bool = False,
|
|
308
|
+
jobs: int | None = None,
|
|
309
|
+
schemas: MaybeCollectionStr | None = None,
|
|
310
|
+
schemas_exc: MaybeCollectionStr | None = None,
|
|
311
|
+
tables: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
312
|
+
role: str | None = None,
|
|
313
|
+
docker_container: str | None = None,
|
|
314
|
+
) -> str:
|
|
315
|
+
"""Run `pg_restore`."""
|
|
316
|
+
extracted = extract_url(url)
|
|
317
|
+
parts: list[str] = ["pg_restore"]
|
|
318
|
+
if docker_container is not None:
|
|
319
|
+
parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
|
|
320
|
+
parts.extend([
|
|
321
|
+
# general options
|
|
322
|
+
"--verbose",
|
|
323
|
+
# restore options
|
|
324
|
+
*_resolve_data_only_and_clean(data_only=data_only, clean=clean),
|
|
325
|
+
"--exit-on-error",
|
|
326
|
+
"--no-owner",
|
|
327
|
+
"--no-privileges",
|
|
328
|
+
# connection options
|
|
329
|
+
f"--host={extracted.host}",
|
|
330
|
+
f"--port={extracted.port}",
|
|
331
|
+
f"--username={extracted.username}",
|
|
332
|
+
f"--dbname={extracted.database}",
|
|
333
|
+
"--no-password",
|
|
334
|
+
])
|
|
335
|
+
if create:
|
|
336
|
+
parts.append("--create")
|
|
337
|
+
if jobs is not None:
|
|
338
|
+
parts.append(f"--jobs={jobs}")
|
|
339
|
+
if schemas is not None:
|
|
340
|
+
parts.extend([f"--schema={s}" for s in always_iterable(schemas)])
|
|
341
|
+
if schemas_exc is not None:
|
|
342
|
+
parts.extend([f"--exclude-schema={s}" for s in always_iterable(schemas_exc)])
|
|
343
|
+
if tables is not None:
|
|
344
|
+
parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(tables)])
|
|
345
|
+
if role is not None:
|
|
346
|
+
parts.append(f"--role={role}")
|
|
347
|
+
parts.append(str(path))
|
|
348
|
+
return " ".join(parts)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _build_psql(
|
|
352
|
+
url: URL, path: PathLike, /, *, docker_container: str | None = None
|
|
353
|
+
) -> str:
|
|
354
|
+
"""Run `psql`."""
|
|
355
|
+
extracted = extract_url(url)
|
|
356
|
+
parts: list[str] = ["psql"]
|
|
357
|
+
if docker_container is not None:
|
|
358
|
+
parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
|
|
359
|
+
parts.extend([
|
|
360
|
+
# general options
|
|
361
|
+
f"--dbname={extracted.database}",
|
|
362
|
+
f"--file={str(path)!r}",
|
|
363
|
+
# connection options
|
|
364
|
+
f"--host={extracted.host}",
|
|
365
|
+
f"--port={extracted.port}",
|
|
366
|
+
f"--username={extracted.username}",
|
|
367
|
+
"--no-password",
|
|
368
|
+
])
|
|
369
|
+
return " ".join(parts)
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
##
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
def _get_table_name(obj: TableOrORMInstOrClass | str, /) -> str:
|
|
376
|
+
match obj:
|
|
377
|
+
case Table() | DeclarativeBase() | type() as table_or_orm:
|
|
378
|
+
return get_table_name(table_or_orm)
|
|
379
|
+
case str() as name:
|
|
380
|
+
return name
|
|
381
|
+
case never:
|
|
382
|
+
assert_never(never)
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
def _resolve_data_only_and_clean(
|
|
386
|
+
*, data_only: bool = False, clean: bool = False
|
|
387
|
+
) -> list[str]:
|
|
388
|
+
match data_only, clean:
|
|
389
|
+
case False, False:
|
|
390
|
+
return []
|
|
391
|
+
case True, False:
|
|
392
|
+
return ["--data-only"]
|
|
393
|
+
case False, True:
|
|
394
|
+
return ["--clean", "--if-exists"]
|
|
395
|
+
case True, True:
|
|
396
|
+
raise _ResolveDataOnlyAndCleanError
|
|
397
|
+
case never:
|
|
398
|
+
assert_never(never)
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
@dataclass(kw_only=True, slots=True)
|
|
402
|
+
class _ResolveDataOnlyAndCleanError(Exception):
|
|
403
|
+
@override
|
|
404
|
+
def __str__(self) -> str:
|
|
405
|
+
return "Cannot use '--data-only' and '--clean' together"
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
__all__ = ["pg_dump", "restore"]
|
utilities/pottery.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from contextlib import
|
|
3
|
+
from contextlib import suppress
|
|
4
4
|
from dataclasses import dataclass
|
|
5
|
+
from sys import maxsize
|
|
5
6
|
from typing import TYPE_CHECKING, override
|
|
6
7
|
|
|
7
8
|
from pottery import AIORedlock
|
|
@@ -9,30 +10,50 @@ from pottery.exceptions import ReleaseUnlockedLock
|
|
|
9
10
|
from redis.asyncio import Redis
|
|
10
11
|
|
|
11
12
|
from utilities.asyncio import sleep_td, timeout_td
|
|
13
|
+
from utilities.contextlib import enhanced_async_context_manager
|
|
12
14
|
from utilities.iterables import always_iterable
|
|
13
|
-
from utilities.whenever import MILLISECOND, SECOND
|
|
15
|
+
from utilities.whenever import MILLISECOND, SECOND, to_nanoseconds
|
|
14
16
|
|
|
15
17
|
if TYPE_CHECKING:
|
|
16
18
|
from collections.abc import AsyncIterator, Iterable
|
|
17
19
|
|
|
18
|
-
from whenever import
|
|
20
|
+
from whenever import Delta
|
|
19
21
|
|
|
20
22
|
from utilities.types import MaybeIterable
|
|
21
23
|
|
|
24
|
+
_NUM: int = 1
|
|
25
|
+
_TIMEOUT_RELEASE: Delta = 10 * SECOND
|
|
26
|
+
_SLEEP: Delta = MILLISECOND
|
|
22
27
|
|
|
23
|
-
|
|
28
|
+
|
|
29
|
+
##
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
async def extend_lock(
|
|
33
|
+
*, lock: AIORedlock | None = None, raise_on_redis_errors: bool | None = None
|
|
34
|
+
) -> None:
|
|
35
|
+
"""Extend a lock."""
|
|
36
|
+
if lock is not None:
|
|
37
|
+
await lock.extend(raise_on_redis_errors=raise_on_redis_errors)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
##
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@enhanced_async_context_manager
|
|
24
44
|
async def yield_access(
|
|
25
45
|
redis: MaybeIterable[Redis],
|
|
26
46
|
key: str,
|
|
27
47
|
/,
|
|
28
48
|
*,
|
|
29
|
-
num: int =
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
49
|
+
num: int = _NUM,
|
|
50
|
+
timeout_release: Delta = _TIMEOUT_RELEASE,
|
|
51
|
+
num_extensions: int | None = None,
|
|
52
|
+
timeout_acquire: Delta | None = None,
|
|
53
|
+
sleep: Delta = _SLEEP,
|
|
54
|
+
throttle: Delta | None = None,
|
|
55
|
+
) -> AsyncIterator[AIORedlock]:
|
|
56
|
+
"""Acquire access to a locked resource."""
|
|
36
57
|
if num <= 0:
|
|
37
58
|
raise _YieldAccessNumLocksError(key=key, num=num)
|
|
38
59
|
masters = ( # skipif-ci-and-not-linux
|
|
@@ -42,7 +63,8 @@ async def yield_access(
|
|
|
42
63
|
AIORedlock(
|
|
43
64
|
key=f"{key}_{i}_of_{num}",
|
|
44
65
|
masters=masters,
|
|
45
|
-
auto_release_time=timeout_release
|
|
66
|
+
auto_release_time=to_nanoseconds(timeout_release) / 1e9,
|
|
67
|
+
num_extensions=maxsize if num_extensions is None else num_extensions,
|
|
46
68
|
)
|
|
47
69
|
for i in range(1, num + 1)
|
|
48
70
|
]
|
|
@@ -51,7 +73,7 @@ async def yield_access(
|
|
|
51
73
|
lock = await _get_first_available_lock(
|
|
52
74
|
key, locks, num=num, timeout=timeout_acquire, sleep=sleep
|
|
53
75
|
)
|
|
54
|
-
yield
|
|
76
|
+
yield lock
|
|
55
77
|
finally: # skipif-ci-and-not-linux
|
|
56
78
|
await sleep_td(throttle)
|
|
57
79
|
if lock is not None:
|
|
@@ -64,9 +86,9 @@ async def _get_first_available_lock(
|
|
|
64
86
|
locks: Iterable[AIORedlock],
|
|
65
87
|
/,
|
|
66
88
|
*,
|
|
67
|
-
num: int =
|
|
68
|
-
timeout:
|
|
69
|
-
sleep:
|
|
89
|
+
num: int = _NUM,
|
|
90
|
+
timeout: Delta | None = None,
|
|
91
|
+
sleep: Delta | None = _SLEEP,
|
|
70
92
|
) -> AIORedlock:
|
|
71
93
|
locks = list(locks) # skipif-ci-and-not-linux
|
|
72
94
|
error = _YieldAccessUnableToAcquireLockError( # skipif-ci-and-not-linux
|
|
@@ -91,11 +113,12 @@ async def _get_first_available_lock_if_any(
|
|
|
91
113
|
@dataclass(kw_only=True, slots=True)
|
|
92
114
|
class YieldAccessError(Exception):
|
|
93
115
|
key: str
|
|
94
|
-
num: int
|
|
95
116
|
|
|
96
117
|
|
|
97
118
|
@dataclass(kw_only=True, slots=True)
|
|
98
119
|
class _YieldAccessNumLocksError(YieldAccessError):
|
|
120
|
+
num: int
|
|
121
|
+
|
|
99
122
|
@override
|
|
100
123
|
def __str__(self) -> str:
|
|
101
124
|
return f"Number of locks for {self.key!r} must be positive; got {self.num}"
|
|
@@ -103,11 +126,12 @@ class _YieldAccessNumLocksError(YieldAccessError):
|
|
|
103
126
|
|
|
104
127
|
@dataclass(kw_only=True, slots=True)
|
|
105
128
|
class _YieldAccessUnableToAcquireLockError(YieldAccessError):
|
|
106
|
-
|
|
129
|
+
num: int
|
|
130
|
+
timeout: Delta | None
|
|
107
131
|
|
|
108
132
|
@override
|
|
109
133
|
def __str__(self) -> str:
|
|
110
134
|
return f"Unable to acquire any 1 of {self.num} locks for {self.key!r} after {self.timeout}" # skipif-ci-and-not-linux
|
|
111
135
|
|
|
112
136
|
|
|
113
|
-
__all__ = ["YieldAccessError", "yield_access"]
|
|
137
|
+
__all__ = ["YieldAccessError", "extend_lock", "yield_access"]
|
utilities/pqdm.py
CHANGED
|
@@ -9,7 +9,7 @@ from tqdm.auto import tqdm as tqdm_auto
|
|
|
9
9
|
from utilities.functions import get_func_name
|
|
10
10
|
from utilities.iterables import apply_to_varargs
|
|
11
11
|
from utilities.os import get_cpu_use
|
|
12
|
-
from utilities.sentinel import Sentinel, sentinel
|
|
12
|
+
from utilities.sentinel import Sentinel, is_sentinel, sentinel
|
|
13
13
|
|
|
14
14
|
if TYPE_CHECKING:
|
|
15
15
|
from collections.abc import Callable, Iterable
|
|
@@ -90,7 +90,7 @@ def pqdm_starmap[T](
|
|
|
90
90
|
**_get_desc(desc, func),
|
|
91
91
|
**kwargs,
|
|
92
92
|
)
|
|
93
|
-
case
|
|
93
|
+
case never:
|
|
94
94
|
assert_never(never)
|
|
95
95
|
return list(result)
|
|
96
96
|
|
|
@@ -98,7 +98,7 @@ def pqdm_starmap[T](
|
|
|
98
98
|
def _get_desc(
|
|
99
99
|
desc: str | None | Sentinel, func: Callable[..., Any], /
|
|
100
100
|
) -> dict[str, str]:
|
|
101
|
-
desc_use = get_func_name(func) if
|
|
101
|
+
desc_use = get_func_name(func) if is_sentinel(desc) else desc
|
|
102
102
|
return {} if desc_use is None else {"desc": desc_use}
|
|
103
103
|
|
|
104
104
|
|
utilities/psutil.py
CHANGED
|
@@ -1,71 +1,19 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass, field
|
|
4
|
-
from json import dumps
|
|
5
|
-
from logging import getLogger
|
|
6
4
|
from math import isclose, nan
|
|
7
|
-
from
|
|
8
|
-
from typing import TYPE_CHECKING, Self, override
|
|
5
|
+
from typing import TYPE_CHECKING, Self
|
|
9
6
|
|
|
10
7
|
from psutil import swap_memory, virtual_memory
|
|
11
8
|
|
|
12
|
-
from utilities.asyncio import Looper
|
|
13
9
|
from utilities.contextlib import suppress_super_object_attribute_error
|
|
14
|
-
from utilities.whenever import
|
|
10
|
+
from utilities.whenever import get_now
|
|
15
11
|
|
|
16
12
|
if TYPE_CHECKING:
|
|
17
|
-
from
|
|
13
|
+
from whenever import ZonedDateTime
|
|
18
14
|
|
|
19
|
-
from whenever import TimeDelta, ZonedDateTime
|
|
20
15
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
@dataclass(kw_only=True)
|
|
25
|
-
class MemoryMonitorService(Looper[None]):
|
|
26
|
-
"""Service to monitor memory usage."""
|
|
27
|
-
|
|
28
|
-
# base
|
|
29
|
-
freq: TimeDelta = field(default=10 * SECOND, repr=False)
|
|
30
|
-
backoff: TimeDelta = field(default=10 * SECOND, repr=False)
|
|
31
|
-
# self
|
|
32
|
-
console: str | None = field(default=None, repr=False)
|
|
33
|
-
path: PathLike = "memory.txt"
|
|
34
|
-
_console: Logger | None = field(init=False, repr=False)
|
|
35
|
-
_path: Path = field(init=False, repr=False)
|
|
36
|
-
|
|
37
|
-
@override
|
|
38
|
-
def __post_init__(self) -> None:
|
|
39
|
-
super().__post_init__()
|
|
40
|
-
if self.console is not None:
|
|
41
|
-
self._console = getLogger(self.console)
|
|
42
|
-
self._path = Path(self.path)
|
|
43
|
-
self._path.parent.mkdir(parents=True, exist_ok=True)
|
|
44
|
-
|
|
45
|
-
@override
|
|
46
|
-
async def core(self) -> None:
|
|
47
|
-
await super().core()
|
|
48
|
-
memory = MemoryUsage.new()
|
|
49
|
-
mapping = {
|
|
50
|
-
"datetime": memory.datetime.format_common_iso(),
|
|
51
|
-
"virtual used (mb)": memory.virtual_used_mb,
|
|
52
|
-
"virtual total (mb)": memory.virtual_total_mb,
|
|
53
|
-
"virtual (%)": memory.virtual_pct,
|
|
54
|
-
"swap used (mb)": memory.swap_used_mb,
|
|
55
|
-
"swap total (mb)": memory.swap_total_mb,
|
|
56
|
-
"swap (%)": memory.swap_pct,
|
|
57
|
-
}
|
|
58
|
-
ser = dumps(mapping)
|
|
59
|
-
with self._path.open(mode="a") as fh:
|
|
60
|
-
_ = fh.write(f"{ser}\n")
|
|
61
|
-
if self._console is not None:
|
|
62
|
-
self._console.info("%s", mapping)
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
##
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
@dataclass(kw_only=True)
|
|
16
|
+
@dataclass(order=True, unsafe_hash=True, kw_only=True)
|
|
69
17
|
class MemoryUsage:
|
|
70
18
|
"""A memory usage."""
|
|
71
19
|
|
|
@@ -113,4 +61,4 @@ class MemoryUsage:
|
|
|
113
61
|
return round(bytes_ / (1024**2))
|
|
114
62
|
|
|
115
63
|
|
|
116
|
-
__all__ = ["
|
|
64
|
+
__all__ = ["MemoryUsage"]
|
utilities/pwd.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import assert_never
|
|
4
|
+
|
|
5
|
+
from utilities.os import EFFECTIVE_USER_ID
|
|
6
|
+
from utilities.platform import SYSTEM
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_uid_name(uid: int, /) -> str | None:
|
|
10
|
+
"""Get the name of a user ID."""
|
|
11
|
+
match SYSTEM:
|
|
12
|
+
case "windows": # skipif-not-windows
|
|
13
|
+
return None
|
|
14
|
+
case "mac" | "linux": # skipif-windows
|
|
15
|
+
from pwd import getpwuid
|
|
16
|
+
|
|
17
|
+
return getpwuid(uid).pw_name
|
|
18
|
+
case never:
|
|
19
|
+
assert_never(never)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
ROOT_USER_NAME = get_uid_name(0)
|
|
23
|
+
EFFECTIVE_USER_NAME = (
|
|
24
|
+
None if EFFECTIVE_USER_ID is None else get_uid_name(EFFECTIVE_USER_ID)
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
__all__ = ["EFFECTIVE_USER_NAME", "ROOT_USER_NAME", "get_uid_name"]
|