dycw-utilities 0.166.30__py3-none-any.whl → 0.175.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dycw_utilities-0.175.17.dist-info/METADATA +34 -0
- {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.175.17.dist-info}/RECORD +43 -38
- dycw_utilities-0.175.17.dist-info/WHEEL +4 -0
- {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.175.17.dist-info}/entry_points.txt +1 -0
- utilities/__init__.py +1 -1
- utilities/altair.py +9 -4
- utilities/asyncio.py +10 -16
- utilities/cachetools.py +9 -6
- utilities/click.py +76 -20
- utilities/docker.py +293 -0
- utilities/functions.py +1 -1
- utilities/grp.py +28 -0
- utilities/hypothesis.py +38 -6
- utilities/importlib.py +17 -1
- utilities/jinja2.py +148 -0
- utilities/logging.py +7 -9
- utilities/orjson.py +18 -18
- utilities/os.py +38 -0
- utilities/parse.py +2 -2
- utilities/pathlib.py +18 -1
- utilities/permissions.py +298 -0
- utilities/platform.py +1 -1
- utilities/polars.py +4 -1
- utilities/postgres.py +28 -29
- utilities/pwd.py +28 -0
- utilities/pydantic.py +11 -0
- utilities/pydantic_settings.py +81 -8
- utilities/pydantic_settings_sops.py +13 -0
- utilities/pytest.py +60 -30
- utilities/pytest_regressions.py +26 -7
- utilities/shutil.py +25 -0
- utilities/sqlalchemy.py +15 -0
- utilities/subprocess.py +1572 -0
- utilities/tempfile.py +60 -1
- utilities/text.py +48 -32
- utilities/timer.py +2 -2
- utilities/traceback.py +1 -1
- utilities/types.py +5 -0
- utilities/typing.py +8 -2
- utilities/whenever.py +36 -5
- dycw_utilities-0.166.30.dist-info/METADATA +0 -41
- dycw_utilities-0.166.30.dist-info/WHEEL +0 -4
- dycw_utilities-0.166.30.dist-info/licenses/LICENSE +0 -21
- utilities/aeventkit.py +0 -388
- utilities/typed_settings.py +0 -152
utilities/postgres.py
CHANGED
|
@@ -9,6 +9,7 @@ from sqlalchemy import Table
|
|
|
9
9
|
from sqlalchemy.orm import DeclarativeBase
|
|
10
10
|
|
|
11
11
|
from utilities.asyncio import stream_command
|
|
12
|
+
from utilities.docker import docker_exec_cmd
|
|
12
13
|
from utilities.iterables import always_iterable
|
|
13
14
|
from utilities.logging import to_logger
|
|
14
15
|
from utilities.os import temp_environ
|
|
@@ -37,6 +38,7 @@ async def pg_dump(
|
|
|
37
38
|
path: PathLike,
|
|
38
39
|
/,
|
|
39
40
|
*,
|
|
41
|
+
docker_container: str | None = None,
|
|
40
42
|
format_: _PGDumpFormat = "plain",
|
|
41
43
|
jobs: int | None = None,
|
|
42
44
|
data_only: bool = False,
|
|
@@ -51,7 +53,6 @@ async def pg_dump(
|
|
|
51
53
|
inserts: bool = False,
|
|
52
54
|
on_conflict_do_nothing: bool = False,
|
|
53
55
|
role: str | None = None,
|
|
54
|
-
docker: str | None = None,
|
|
55
56
|
dry_run: bool = False,
|
|
56
57
|
logger: LoggerLike | None = None,
|
|
57
58
|
) -> bool:
|
|
@@ -61,6 +62,7 @@ async def pg_dump(
|
|
|
61
62
|
cmd = _build_pg_dump(
|
|
62
63
|
url,
|
|
63
64
|
path,
|
|
65
|
+
docker_container=docker_container,
|
|
64
66
|
format_=format_,
|
|
65
67
|
jobs=jobs,
|
|
66
68
|
data_only=data_only,
|
|
@@ -75,7 +77,6 @@ async def pg_dump(
|
|
|
75
77
|
inserts=inserts,
|
|
76
78
|
on_conflict_do_nothing=on_conflict_do_nothing,
|
|
77
79
|
role=role,
|
|
78
|
-
docker=docker,
|
|
79
80
|
)
|
|
80
81
|
if dry_run:
|
|
81
82
|
if logger is not None:
|
|
@@ -111,6 +112,7 @@ def _build_pg_dump(
|
|
|
111
112
|
path: PathLike,
|
|
112
113
|
/,
|
|
113
114
|
*,
|
|
115
|
+
docker_container: str | None = None,
|
|
114
116
|
format_: _PGDumpFormat = "plain",
|
|
115
117
|
jobs: int | None = None,
|
|
116
118
|
data_only: bool = False,
|
|
@@ -125,12 +127,13 @@ def _build_pg_dump(
|
|
|
125
127
|
inserts: bool = False,
|
|
126
128
|
on_conflict_do_nothing: bool = False,
|
|
127
129
|
role: str | None = None,
|
|
128
|
-
docker: str | None = None,
|
|
129
130
|
) -> str:
|
|
130
131
|
extracted = extract_url(url)
|
|
131
132
|
path = _path_pg_dump(path, format_=format_)
|
|
132
|
-
parts: list[str] = [
|
|
133
|
-
|
|
133
|
+
parts: list[str] = ["pg_dump"]
|
|
134
|
+
if docker_container is not None:
|
|
135
|
+
parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
|
|
136
|
+
parts.extend([
|
|
134
137
|
# general options
|
|
135
138
|
f"--file={str(path)!r}",
|
|
136
139
|
f"--format={format_}",
|
|
@@ -146,7 +149,7 @@ def _build_pg_dump(
|
|
|
146
149
|
f"--port={extracted.port}",
|
|
147
150
|
f"--username={extracted.username}",
|
|
148
151
|
"--no-password",
|
|
149
|
-
]
|
|
152
|
+
])
|
|
150
153
|
if (format_ == "directory") and (jobs is not None):
|
|
151
154
|
parts.append(f"--jobs={jobs}")
|
|
152
155
|
if create:
|
|
@@ -173,8 +176,6 @@ def _build_pg_dump(
|
|
|
173
176
|
parts.append("--on-conflict-do-nothing")
|
|
174
177
|
if role is not None:
|
|
175
178
|
parts.append(f"--role={role}")
|
|
176
|
-
if docker is not None:
|
|
177
|
-
parts = _wrap_docker(parts, docker)
|
|
178
179
|
return " ".join(parts)
|
|
179
180
|
|
|
180
181
|
|
|
@@ -213,7 +214,7 @@ async def restore(
|
|
|
213
214
|
schema_exc: MaybeCollectionStr | None = None,
|
|
214
215
|
table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
215
216
|
role: str | None = None,
|
|
216
|
-
|
|
217
|
+
docker_container: str | None = None,
|
|
217
218
|
dry_run: bool = False,
|
|
218
219
|
logger: LoggerLike | None = None,
|
|
219
220
|
) -> bool:
|
|
@@ -230,7 +231,7 @@ async def restore(
|
|
|
230
231
|
schema_exc=schema_exc,
|
|
231
232
|
table=table,
|
|
232
233
|
role=role,
|
|
233
|
-
|
|
234
|
+
docker_container=docker_container,
|
|
234
235
|
)
|
|
235
236
|
if dry_run:
|
|
236
237
|
if logger is not None:
|
|
@@ -276,11 +277,11 @@ def _build_pg_restore_or_psql(
|
|
|
276
277
|
schema_exc: MaybeCollectionStr | None = None,
|
|
277
278
|
table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
278
279
|
role: str | None = None,
|
|
279
|
-
|
|
280
|
+
docker_container: str | None = None,
|
|
280
281
|
) -> str:
|
|
281
282
|
path = Path(path)
|
|
282
283
|
if (path.suffix == ".sql") or psql:
|
|
283
|
-
return _build_psql(url, path,
|
|
284
|
+
return _build_psql(url, path, docker_container=docker_container)
|
|
284
285
|
return _build_pg_restore(
|
|
285
286
|
url,
|
|
286
287
|
path,
|
|
@@ -292,7 +293,7 @@ def _build_pg_restore_or_psql(
|
|
|
292
293
|
schemas_exc=schema_exc,
|
|
293
294
|
tables=table,
|
|
294
295
|
role=role,
|
|
295
|
-
|
|
296
|
+
docker_container=docker_container,
|
|
296
297
|
)
|
|
297
298
|
|
|
298
299
|
|
|
@@ -309,12 +310,14 @@ def _build_pg_restore(
|
|
|
309
310
|
schemas_exc: MaybeCollectionStr | None = None,
|
|
310
311
|
tables: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
|
|
311
312
|
role: str | None = None,
|
|
312
|
-
|
|
313
|
+
docker_container: str | None = None,
|
|
313
314
|
) -> str:
|
|
314
315
|
"""Run `pg_restore`."""
|
|
315
316
|
extracted = extract_url(url)
|
|
316
|
-
parts: list[str] = [
|
|
317
|
-
|
|
317
|
+
parts: list[str] = ["pg_restore"]
|
|
318
|
+
if docker_container is not None:
|
|
319
|
+
parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
|
|
320
|
+
parts.extend([
|
|
318
321
|
# general options
|
|
319
322
|
"--verbose",
|
|
320
323
|
# restore options
|
|
@@ -328,7 +331,7 @@ def _build_pg_restore(
|
|
|
328
331
|
f"--username={extracted.username}",
|
|
329
332
|
f"--dbname={extracted.database}",
|
|
330
333
|
"--no-password",
|
|
331
|
-
]
|
|
334
|
+
])
|
|
332
335
|
if create:
|
|
333
336
|
parts.append("--create")
|
|
334
337
|
if jobs is not None:
|
|
@@ -341,17 +344,19 @@ def _build_pg_restore(
|
|
|
341
344
|
parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(tables)])
|
|
342
345
|
if role is not None:
|
|
343
346
|
parts.append(f"--role={role}")
|
|
344
|
-
if docker is not None:
|
|
345
|
-
parts = _wrap_docker(parts, docker)
|
|
346
347
|
parts.append(str(path))
|
|
347
348
|
return " ".join(parts)
|
|
348
349
|
|
|
349
350
|
|
|
350
|
-
def _build_psql(
|
|
351
|
+
def _build_psql(
|
|
352
|
+
url: URL, path: PathLike, /, *, docker_container: str | None = None
|
|
353
|
+
) -> str:
|
|
351
354
|
"""Run `psql`."""
|
|
352
355
|
extracted = extract_url(url)
|
|
353
|
-
parts: list[str] = [
|
|
354
|
-
|
|
356
|
+
parts: list[str] = ["psql"]
|
|
357
|
+
if docker_container is not None:
|
|
358
|
+
parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
|
|
359
|
+
parts.extend([
|
|
355
360
|
# general options
|
|
356
361
|
f"--dbname={extracted.database}",
|
|
357
362
|
f"--file={str(path)!r}",
|
|
@@ -360,9 +365,7 @@ def _build_psql(url: URL, path: PathLike, /, *, docker: str | None = None) -> st
|
|
|
360
365
|
f"--port={extracted.port}",
|
|
361
366
|
f"--username={extracted.username}",
|
|
362
367
|
"--no-password",
|
|
363
|
-
]
|
|
364
|
-
if docker is not None:
|
|
365
|
-
parts = _wrap_docker(parts, docker)
|
|
368
|
+
])
|
|
366
369
|
return " ".join(parts)
|
|
367
370
|
|
|
368
371
|
|
|
@@ -402,8 +405,4 @@ class _ResolveDataOnlyAndCleanError(Exception):
|
|
|
402
405
|
return "Cannot use '--data-only' and '--clean' together"
|
|
403
406
|
|
|
404
407
|
|
|
405
|
-
def _wrap_docker(parts: list[str], container: str, /) -> list[str]:
|
|
406
|
-
return ["docker", "exec", "-it", container, *parts]
|
|
407
|
-
|
|
408
|
-
|
|
409
408
|
__all__ = ["pg_dump", "restore"]
|
utilities/pwd.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import assert_never
|
|
4
|
+
|
|
5
|
+
from utilities.os import EFFECTIVE_USER_ID
|
|
6
|
+
from utilities.platform import SYSTEM
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_uid_name(uid: int, /) -> str | None:
|
|
10
|
+
"""Get the name of a user ID."""
|
|
11
|
+
match SYSTEM:
|
|
12
|
+
case "windows": # skipif-not-windows
|
|
13
|
+
return None
|
|
14
|
+
case "mac" | "linux": # skipif-windows
|
|
15
|
+
from pwd import getpwuid
|
|
16
|
+
|
|
17
|
+
return getpwuid(uid).pw_name
|
|
18
|
+
case never:
|
|
19
|
+
assert_never(never)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
ROOT_USER_NAME = get_uid_name(0)
|
|
23
|
+
EFFECTIVE_USER_NAME = (
|
|
24
|
+
None if EFFECTIVE_USER_ID is None else get_uid_name(EFFECTIVE_USER_ID)
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
__all__ = ["EFFECTIVE_USER_NAME", "ROOT_USER_NAME", "get_uid_name"]
|
utilities/pydantic.py
ADDED
utilities/pydantic_settings.py
CHANGED
|
@@ -2,10 +2,12 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from functools import reduce
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import TYPE_CHECKING, Any, ClassVar, assert_never, override
|
|
5
|
+
from typing import TYPE_CHECKING, Any, ClassVar, assert_never, cast, override
|
|
6
6
|
|
|
7
|
+
from pydantic import Field, create_model
|
|
7
8
|
from pydantic_settings import (
|
|
8
9
|
BaseSettings,
|
|
10
|
+
CliSettingsSource,
|
|
9
11
|
JsonConfigSettingsSource,
|
|
10
12
|
PydanticBaseSettingsSource,
|
|
11
13
|
SettingsConfigDict,
|
|
@@ -14,6 +16,7 @@ from pydantic_settings import (
|
|
|
14
16
|
)
|
|
15
17
|
from pydantic_settings.sources import DEFAULT_PATH
|
|
16
18
|
|
|
19
|
+
from utilities.errors import ImpossibleCaseError
|
|
17
20
|
from utilities.iterables import always_iterable
|
|
18
21
|
|
|
19
22
|
if TYPE_CHECKING:
|
|
@@ -29,7 +32,7 @@ type PathLikeOrWithSection = PathLike | PathLikeWithSection
|
|
|
29
32
|
|
|
30
33
|
|
|
31
34
|
class CustomBaseSettings(BaseSettings):
|
|
32
|
-
"""Base settings for loading JSON files."""
|
|
35
|
+
"""Base settings for loading JSON/TOML/YAML files."""
|
|
33
36
|
|
|
34
37
|
# paths
|
|
35
38
|
json_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
|
|
@@ -38,7 +41,7 @@ class CustomBaseSettings(BaseSettings):
|
|
|
38
41
|
|
|
39
42
|
# config
|
|
40
43
|
model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
|
|
41
|
-
env_nested_delimiter="__"
|
|
44
|
+
frozen=True, env_nested_delimiter="__"
|
|
42
45
|
)
|
|
43
46
|
|
|
44
47
|
@classmethod
|
|
@@ -76,11 +79,6 @@ class CustomBaseSettings(BaseSettings):
|
|
|
76
79
|
)
|
|
77
80
|
|
|
78
81
|
|
|
79
|
-
def load_settings[T: BaseSettings](cls: type[T], /) -> T:
|
|
80
|
-
"""Load a set of settings."""
|
|
81
|
-
return cls()
|
|
82
|
-
|
|
83
|
-
|
|
84
82
|
class JsonConfigSectionSettingsSource(JsonConfigSettingsSource):
|
|
85
83
|
@override
|
|
86
84
|
def __init__(
|
|
@@ -158,8 +156,83 @@ def _get_section(
|
|
|
158
156
|
return reduce(lambda acc, el: acc.get(el, {}), always_iterable(section), mapping)
|
|
159
157
|
|
|
160
158
|
|
|
159
|
+
##
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
class HashableBaseSettings(BaseSettings):
|
|
163
|
+
"""Base settings for loading JSON files."""
|
|
164
|
+
|
|
165
|
+
# config
|
|
166
|
+
model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(frozen=True)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
##
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def load_settings[T: BaseSettings](cls: type[T], /, *, cli: bool = False) -> T:
|
|
173
|
+
"""Load a set of settings."""
|
|
174
|
+
_ = cls.model_rebuild()
|
|
175
|
+
if cli:
|
|
176
|
+
cls_with_defaults = _load_settings_create_model(cls)
|
|
177
|
+
|
|
178
|
+
@classmethod
|
|
179
|
+
def settings_customise_sources(
|
|
180
|
+
cls: type[BaseSettings],
|
|
181
|
+
settings_cls: type[BaseSettings],
|
|
182
|
+
init_settings: PydanticBaseSettingsSource,
|
|
183
|
+
env_settings: PydanticBaseSettingsSource,
|
|
184
|
+
dotenv_settings: PydanticBaseSettingsSource,
|
|
185
|
+
file_secret_settings: PydanticBaseSettingsSource,
|
|
186
|
+
) -> tuple[PydanticBaseSettingsSource, ...]:
|
|
187
|
+
parent = cast(
|
|
188
|
+
"Any", super(cls_with_defaults, cls)
|
|
189
|
+
).settings_customise_sources(
|
|
190
|
+
settings_cls=settings_cls,
|
|
191
|
+
init_settings=init_settings,
|
|
192
|
+
env_settings=env_settings,
|
|
193
|
+
dotenv_settings=dotenv_settings,
|
|
194
|
+
file_secret_settings=file_secret_settings,
|
|
195
|
+
)
|
|
196
|
+
return (
|
|
197
|
+
CliSettingsSource(
|
|
198
|
+
settings_cls, cli_parse_args=True, case_sensitive=False
|
|
199
|
+
),
|
|
200
|
+
*parent,
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
cls_use = type(
|
|
204
|
+
cls.__name__,
|
|
205
|
+
(cls_with_defaults,),
|
|
206
|
+
{"settings_customise_sources": settings_customise_sources},
|
|
207
|
+
)
|
|
208
|
+
cls_use = cast("type[T]", cls_use)
|
|
209
|
+
else:
|
|
210
|
+
cls_use = cls
|
|
211
|
+
return cls_use()
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _load_settings_create_model[T: BaseSettings](
|
|
215
|
+
cls: type[T], /, *, values: T | None = None
|
|
216
|
+
) -> type[T]:
|
|
217
|
+
values_use = cls() if values is None else values
|
|
218
|
+
kwargs: dict[str, Any] = {}
|
|
219
|
+
for name, field in cls.model_fields.items():
|
|
220
|
+
if (ann := field.annotation) is None:
|
|
221
|
+
raise ImpossibleCaseError(case=[f"{ann=}"]) # pragma: no cover
|
|
222
|
+
value = getattr(values_use, name)
|
|
223
|
+
if (
|
|
224
|
+
isinstance(cast("Any", ann), type) # 'ann' is possible not a type
|
|
225
|
+
and issubclass(ann, BaseSettings)
|
|
226
|
+
):
|
|
227
|
+
kwargs[name] = _load_settings_create_model(ann, values=value)
|
|
228
|
+
else:
|
|
229
|
+
kwargs[name] = (field.annotation, Field(default=value))
|
|
230
|
+
return create_model(cls.__name__, __base__=cls, **kwargs)
|
|
231
|
+
|
|
232
|
+
|
|
161
233
|
__all__ = [
|
|
162
234
|
"CustomBaseSettings",
|
|
235
|
+
"HashableBaseSettings",
|
|
163
236
|
"JsonConfigSectionSettingsSource",
|
|
164
237
|
"TomlConfigSectionSettingsSource",
|
|
165
238
|
"YamlConfigSectionSettingsSource",
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
from logging import Filter, LogRecord, getLogger
|
|
4
|
+
from re import search
|
|
3
5
|
from typing import TYPE_CHECKING, Any, ClassVar, override
|
|
4
6
|
|
|
5
7
|
from pydantic_settings.sources import DEFAULT_PATH
|
|
@@ -21,6 +23,17 @@ if TYPE_CHECKING:
|
|
|
21
23
|
from utilities.types import MaybeSequenceStr
|
|
22
24
|
|
|
23
25
|
|
|
26
|
+
class _SuppressDefaultConfigMessage(Filter):
|
|
27
|
+
@override
|
|
28
|
+
def filter(self, record: LogRecord) -> bool:
|
|
29
|
+
return not search(
|
|
30
|
+
r"^default config file does not exists '.*'$", record.getMessage()
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
getLogger("sopsy.utils").addFilter(_SuppressDefaultConfigMessage())
|
|
35
|
+
|
|
36
|
+
|
|
24
37
|
class SopsBaseSettings(CustomBaseSettings):
|
|
25
38
|
"""Base settings for loading secrets using `sops/age`."""
|
|
26
39
|
|
utilities/pytest.py
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
from collections.abc import Callable
|
|
3
4
|
from dataclasses import dataclass
|
|
4
5
|
from functools import partial, wraps
|
|
5
6
|
from inspect import iscoroutinefunction
|
|
6
7
|
from os import environ
|
|
7
8
|
from pathlib import Path
|
|
9
|
+
from re import sub
|
|
10
|
+
from types import FunctionType
|
|
8
11
|
from typing import TYPE_CHECKING, Any, assert_never, cast, override
|
|
9
12
|
|
|
10
13
|
from whenever import ZonedDateTime
|
|
@@ -20,44 +23,57 @@ from utilities.pathlib import (
|
|
|
20
23
|
get_tail,
|
|
21
24
|
module_path,
|
|
22
25
|
)
|
|
23
|
-
from utilities.platform import
|
|
24
|
-
IS_LINUX,
|
|
25
|
-
IS_MAC,
|
|
26
|
-
IS_NOT_LINUX,
|
|
27
|
-
IS_NOT_MAC,
|
|
28
|
-
IS_NOT_WINDOWS,
|
|
29
|
-
IS_WINDOWS,
|
|
30
|
-
)
|
|
26
|
+
from utilities.platform import IS_LINUX, IS_MAC, IS_NOT_LINUX, IS_NOT_MAC
|
|
31
27
|
from utilities.random import bernoulli
|
|
32
28
|
from utilities.text import to_bool
|
|
33
29
|
from utilities.types import MaybeCallableBoolLike, MaybeCoro, Seed
|
|
34
30
|
from utilities.whenever import SECOND, get_now_local
|
|
35
31
|
|
|
36
32
|
if TYPE_CHECKING:
|
|
37
|
-
from collections.abc import
|
|
38
|
-
|
|
39
|
-
from utilities.types import Coro, Delta, PathLike
|
|
33
|
+
from collections.abc import Iterable
|
|
40
34
|
|
|
41
|
-
try: # WARNING: this package cannot use unguarded `pytest` imports
|
|
42
35
|
from _pytest.config import Config
|
|
43
36
|
from _pytest.config.argparsing import Parser
|
|
44
37
|
from _pytest.python import Function
|
|
45
|
-
|
|
38
|
+
|
|
39
|
+
from utilities.types import Coro, Delta, PathLike
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
IS_CI = "CI" in environ
|
|
43
|
+
IS_CI_AND_NOT_LINUX = IS_CI and IS_NOT_LINUX
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
try: # WARNING: this package cannot use unguarded `pytest` imports
|
|
47
|
+
from pytest import mark
|
|
46
48
|
except ModuleNotFoundError: # pragma: no cover
|
|
47
|
-
from typing import Any as Config
|
|
48
|
-
from typing import Any as Function
|
|
49
|
-
from typing import Any as Parser
|
|
50
49
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
50
|
+
def skipif_ci[F: Callable](func: F) -> F:
|
|
51
|
+
return func
|
|
52
|
+
|
|
53
|
+
def skipif_mac[F: Callable](func: F) -> F:
|
|
54
|
+
return func
|
|
55
|
+
|
|
56
|
+
def skipif_linux[F: Callable](func: F) -> F:
|
|
57
|
+
return func
|
|
58
|
+
|
|
59
|
+
def skipif_not_mac[F: Callable](func: F) -> F:
|
|
60
|
+
return func
|
|
61
|
+
|
|
62
|
+
def skipif_not_linux[F: Callable](func: F) -> F:
|
|
63
|
+
return func
|
|
64
|
+
|
|
65
|
+
def skipif_ci_and_not_linux[F: Callable](func: F) -> F:
|
|
66
|
+
return func
|
|
67
|
+
|
|
54
68
|
else:
|
|
55
|
-
|
|
69
|
+
skipif_ci = mark.skipif(IS_CI, reason="Skipped for CI")
|
|
56
70
|
skipif_mac = mark.skipif(IS_MAC, reason="Skipped for Mac")
|
|
57
71
|
skipif_linux = mark.skipif(IS_LINUX, reason="Skipped for Linux")
|
|
58
|
-
skipif_not_windows = mark.skipif(IS_NOT_WINDOWS, reason="Skipped for non-Windows")
|
|
59
72
|
skipif_not_mac = mark.skipif(IS_NOT_MAC, reason="Skipped for non-Mac")
|
|
60
73
|
skipif_not_linux = mark.skipif(IS_NOT_LINUX, reason="Skipped for non-Linux")
|
|
74
|
+
skipif_ci_and_not_linux = mark.skipif(
|
|
75
|
+
IS_CI_AND_NOT_LINUX, reason="Skipped for CI/non-Linux"
|
|
76
|
+
)
|
|
61
77
|
|
|
62
78
|
|
|
63
79
|
def add_pytest_addoption(parser: Parser, options: list[str], /) -> None:
|
|
@@ -90,6 +106,8 @@ def add_pytest_collection_modifyitems(
|
|
|
90
106
|
def pytest_collection_modifyitems(config, items):
|
|
91
107
|
add_pytest_collection_modifyitems(config, items, ["slow"])
|
|
92
108
|
"""
|
|
109
|
+
from pytest import mark
|
|
110
|
+
|
|
93
111
|
options = list(options)
|
|
94
112
|
missing = {opt for opt in options if not config.getoption(f"--{opt}")}
|
|
95
113
|
for item in items:
|
|
@@ -119,6 +137,15 @@ def add_pytest_configure(config: Config, options: Iterable[tuple[str, str]], /)
|
|
|
119
137
|
##
|
|
120
138
|
|
|
121
139
|
|
|
140
|
+
def make_ids(obj: Any, /) -> str:
|
|
141
|
+
if isinstance(obj, FunctionType):
|
|
142
|
+
return sub(r"\s+at +0x[0-9a-fA-F]+", "", repr(obj))
|
|
143
|
+
return repr(obj)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
##
|
|
147
|
+
|
|
148
|
+
|
|
122
149
|
def node_id_path(
|
|
123
150
|
node_id: str, /, *, root: PathLike | None = None, suffix: str | None = None
|
|
124
151
|
) -> Path:
|
|
@@ -217,12 +244,12 @@ def _skipif_frac(
|
|
|
217
244
|
frac: float = 0.5,
|
|
218
245
|
seed: Seed | None = None,
|
|
219
246
|
) -> None:
|
|
220
|
-
|
|
221
|
-
|
|
247
|
+
from pytest import skip
|
|
248
|
+
|
|
222
249
|
if ((predicate is None) or to_bool(predicate)) and bernoulli(
|
|
223
250
|
true=1 - frac, seed=seed
|
|
224
251
|
):
|
|
225
|
-
|
|
252
|
+
skip(reason=f"{_get_name()} skipped (run {frac:.0%})")
|
|
226
253
|
|
|
227
254
|
|
|
228
255
|
##
|
|
@@ -291,15 +318,15 @@ def _throttle_inner[F: Callable[..., MaybeCoro[None]]](
|
|
|
291
318
|
|
|
292
319
|
|
|
293
320
|
def _skipif_recent(*, root: PathLike | None = None, delta: Delta = SECOND) -> None:
|
|
294
|
-
|
|
295
|
-
|
|
321
|
+
from pytest import skip
|
|
322
|
+
|
|
296
323
|
path = _get_path(root)
|
|
297
324
|
try:
|
|
298
325
|
contents = path.read_text()
|
|
299
326
|
except FileNotFoundError:
|
|
300
327
|
return
|
|
301
328
|
try:
|
|
302
|
-
last = ZonedDateTime.
|
|
329
|
+
last = ZonedDateTime.parse_iso(contents)
|
|
303
330
|
except ValueError:
|
|
304
331
|
return
|
|
305
332
|
now = get_now_local()
|
|
@@ -328,21 +355,24 @@ def _get_name() -> str:
|
|
|
328
355
|
def _write(root: PathLike | None = None, /) -> None:
|
|
329
356
|
path = _get_path(root)
|
|
330
357
|
with writer(path, overwrite=True) as temp:
|
|
331
|
-
_ = temp.write_text(get_now_local().
|
|
358
|
+
_ = temp.write_text(get_now_local().format_iso())
|
|
332
359
|
|
|
333
360
|
|
|
334
361
|
__all__ = [
|
|
362
|
+
"IS_CI",
|
|
363
|
+
"IS_CI_AND_NOT_LINUX",
|
|
335
364
|
"NodeIdToPathError",
|
|
336
365
|
"add_pytest_addoption",
|
|
337
366
|
"add_pytest_collection_modifyitems",
|
|
338
367
|
"add_pytest_configure",
|
|
368
|
+
"make_ids",
|
|
339
369
|
"node_id_path",
|
|
340
370
|
"run_frac",
|
|
371
|
+
"skipif_ci",
|
|
372
|
+
"skipif_ci_and_not_linux",
|
|
341
373
|
"skipif_linux",
|
|
342
374
|
"skipif_mac",
|
|
343
375
|
"skipif_not_linux",
|
|
344
376
|
"skipif_not_mac",
|
|
345
|
-
"skipif_not_windows",
|
|
346
|
-
"skipif_windows",
|
|
347
377
|
"throttle",
|
|
348
378
|
]
|
utilities/pytest_regressions.py
CHANGED
|
@@ -1,15 +1,17 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from contextlib import suppress
|
|
4
|
+
from dataclasses import dataclass
|
|
4
5
|
from json import loads
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
from shutil import copytree
|
|
7
|
-
from typing import TYPE_CHECKING, Any, assert_never
|
|
8
|
+
from typing import TYPE_CHECKING, Any, assert_never, override
|
|
8
9
|
|
|
9
10
|
from pytest_regressions.file_regression import FileRegressionFixture
|
|
10
11
|
|
|
11
12
|
from utilities.functions import ensure_str
|
|
12
13
|
from utilities.operator import is_equal
|
|
14
|
+
from utilities.reprlib import get_repr
|
|
13
15
|
|
|
14
16
|
if TYPE_CHECKING:
|
|
15
17
|
from polars import DataFrame, Series
|
|
@@ -70,10 +72,28 @@ class OrjsonRegressionFixture:
|
|
|
70
72
|
check_fn=self._check_fn,
|
|
71
73
|
)
|
|
72
74
|
|
|
73
|
-
def _check_fn(self,
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
75
|
+
def _check_fn(self, path_obtained: Path, path_existing: Path, /) -> None:
|
|
76
|
+
obtained = loads(path_obtained.read_text())
|
|
77
|
+
existing = loads(path_existing.read_text())
|
|
78
|
+
if not is_equal(obtained, existing):
|
|
79
|
+
raise OrjsonRegressionError(
|
|
80
|
+
path_obtained=path_obtained,
|
|
81
|
+
path_existing=path_existing,
|
|
82
|
+
obtained=obtained,
|
|
83
|
+
existing=existing,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@dataclass(kw_only=True, slots=True)
|
|
88
|
+
class OrjsonRegressionError(Exception):
|
|
89
|
+
path_obtained: Path
|
|
90
|
+
path_existing: Path
|
|
91
|
+
obtained: Any
|
|
92
|
+
existing: Any
|
|
93
|
+
|
|
94
|
+
@override
|
|
95
|
+
def __str__(self) -> str:
|
|
96
|
+
return f"Obtained object (at {str(self.path_obtained)!r}) and existing object (at {str(self.path_existing)!r}) differ; got {get_repr(self.obtained)} and {get_repr(self.existing)}"
|
|
77
97
|
|
|
78
98
|
|
|
79
99
|
##
|
|
@@ -97,7 +117,6 @@ class PolarsRegressionFixture:
|
|
|
97
117
|
"describe": obj.describe(percentiles=[i / 10 for i in range(1, 10)]).rows(
|
|
98
118
|
named=True
|
|
99
119
|
),
|
|
100
|
-
"estimated_size": obj.estimated_size(),
|
|
101
120
|
"is_empty": obj.is_empty(),
|
|
102
121
|
"n_unique": obj.n_unique(),
|
|
103
122
|
}
|
|
@@ -115,7 +134,7 @@ class PolarsRegressionFixture:
|
|
|
115
134
|
col(column).approx_n_unique()
|
|
116
135
|
).item()
|
|
117
136
|
data["approx_n_unique"] = approx_n_unique
|
|
118
|
-
data["glimpse"] = df.glimpse(
|
|
137
|
+
data["glimpse"] = df.glimpse(return_type="string")
|
|
119
138
|
data["null_count"] = df.null_count().row(0, named=True)
|
|
120
139
|
case never:
|
|
121
140
|
assert_never(never)
|
utilities/shutil.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import shutil
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import override
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def which(cmd: str, /) -> Path:
|
|
10
|
+
path = shutil.which(cmd)
|
|
11
|
+
if path is None:
|
|
12
|
+
raise WhichError(cmd=cmd)
|
|
13
|
+
return Path(path)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(kw_only=True, slots=True)
|
|
17
|
+
class WhichError(Exception):
|
|
18
|
+
cmd: str
|
|
19
|
+
|
|
20
|
+
@override
|
|
21
|
+
def __str__(self) -> str:
|
|
22
|
+
return f"{self.cmd!r} not found"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
__all__ = ["WhichError", "which"]
|
utilities/sqlalchemy.py
CHANGED
|
@@ -331,6 +331,20 @@ async def ensure_database_dropped(super_: URL, database: str, /) -> None:
|
|
|
331
331
|
_ = await conn.execute(text(f"DROP DATABASE IF EXISTS {database}"))
|
|
332
332
|
|
|
333
333
|
|
|
334
|
+
async def ensure_database_users_disconnected(super_: URL, database: str, /) -> None:
|
|
335
|
+
"""Ensure a databases' users are disconnected."""
|
|
336
|
+
engine = create_async_engine(super_, isolation_level="AUTOCOMMIT")
|
|
337
|
+
match dialect := _get_dialect(engine):
|
|
338
|
+
case "postgresql": # skipif-ci-and-not-linux
|
|
339
|
+
query = f"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = {database!r} AND pid <> pg_backend_pid()" # noqa: S608
|
|
340
|
+
case "mssql" | "mysql" | "oracle" | "sqlite": # pragma: no cover
|
|
341
|
+
raise NotImplementedError(dialect)
|
|
342
|
+
case never:
|
|
343
|
+
assert_never(never)
|
|
344
|
+
async with engine.begin() as conn:
|
|
345
|
+
_ = await conn.execute(text(query))
|
|
346
|
+
|
|
347
|
+
|
|
334
348
|
##
|
|
335
349
|
|
|
336
350
|
|
|
@@ -1166,6 +1180,7 @@ __all__ = [
|
|
|
1166
1180
|
"create_engine",
|
|
1167
1181
|
"ensure_database_created",
|
|
1168
1182
|
"ensure_database_dropped",
|
|
1183
|
+
"ensure_database_users_disconnected",
|
|
1169
1184
|
"ensure_tables_created",
|
|
1170
1185
|
"ensure_tables_dropped",
|
|
1171
1186
|
"enum_name",
|