dycw-utilities 0.135.0__py3-none-any.whl → 0.178.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (97) hide show
  1. dycw_utilities-0.178.1.dist-info/METADATA +34 -0
  2. dycw_utilities-0.178.1.dist-info/RECORD +105 -0
  3. dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.178.1.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +13 -10
  7. utilities/asyncio.py +312 -787
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +64 -4
  10. utilities/cachetools.py +9 -6
  11. utilities/click.py +195 -77
  12. utilities/concurrent.py +1 -1
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +15 -28
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +2 -2
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +28 -59
  21. utilities/fpdf2.py +2 -2
  22. utilities/functions.py +24 -269
  23. utilities/git.py +9 -30
  24. utilities/grp.py +28 -0
  25. utilities/gzip.py +31 -0
  26. utilities/http.py +3 -2
  27. utilities/hypothesis.py +513 -159
  28. utilities/importlib.py +17 -1
  29. utilities/inflect.py +12 -4
  30. utilities/iterables.py +33 -58
  31. utilities/jinja2.py +148 -0
  32. utilities/json.py +70 -0
  33. utilities/libcst.py +38 -17
  34. utilities/lightweight_charts.py +4 -7
  35. utilities/logging.py +136 -93
  36. utilities/math.py +8 -4
  37. utilities/more_itertools.py +43 -45
  38. utilities/operator.py +27 -27
  39. utilities/orjson.py +189 -36
  40. utilities/os.py +61 -4
  41. utilities/packaging.py +115 -0
  42. utilities/parse.py +8 -5
  43. utilities/pathlib.py +269 -40
  44. utilities/permissions.py +298 -0
  45. utilities/platform.py +7 -6
  46. utilities/polars.py +1205 -413
  47. utilities/polars_ols.py +1 -1
  48. utilities/postgres.py +408 -0
  49. utilities/pottery.py +43 -19
  50. utilities/pqdm.py +3 -3
  51. utilities/psutil.py +5 -57
  52. utilities/pwd.py +28 -0
  53. utilities/pydantic.py +4 -52
  54. utilities/pydantic_settings.py +240 -0
  55. utilities/pydantic_settings_sops.py +76 -0
  56. utilities/pyinstrument.py +7 -7
  57. utilities/pytest.py +104 -143
  58. utilities/pytest_plugins/__init__.py +1 -0
  59. utilities/pytest_plugins/pytest_randomly.py +23 -0
  60. utilities/pytest_plugins/pytest_regressions.py +56 -0
  61. utilities/pytest_regressions.py +26 -46
  62. utilities/random.py +11 -6
  63. utilities/re.py +1 -1
  64. utilities/redis.py +220 -343
  65. utilities/sentinel.py +10 -0
  66. utilities/shelve.py +4 -1
  67. utilities/shutil.py +25 -0
  68. utilities/slack_sdk.py +35 -104
  69. utilities/sqlalchemy.py +496 -471
  70. utilities/sqlalchemy_polars.py +29 -54
  71. utilities/string.py +2 -3
  72. utilities/subprocess.py +1977 -0
  73. utilities/tempfile.py +112 -4
  74. utilities/testbook.py +50 -0
  75. utilities/text.py +174 -42
  76. utilities/throttle.py +158 -0
  77. utilities/timer.py +2 -2
  78. utilities/traceback.py +70 -35
  79. utilities/types.py +102 -30
  80. utilities/typing.py +479 -19
  81. utilities/uuid.py +42 -5
  82. utilities/version.py +27 -26
  83. utilities/whenever.py +1559 -361
  84. utilities/zoneinfo.py +80 -22
  85. dycw_utilities-0.135.0.dist-info/METADATA +0 -39
  86. dycw_utilities-0.135.0.dist-info/RECORD +0 -96
  87. dycw_utilities-0.135.0.dist-info/WHEEL +0 -4
  88. dycw_utilities-0.135.0.dist-info/licenses/LICENSE +0 -21
  89. utilities/aiolimiter.py +0 -25
  90. utilities/arq.py +0 -216
  91. utilities/eventkit.py +0 -388
  92. utilities/luigi.py +0 -183
  93. utilities/period.py +0 -152
  94. utilities/pudb.py +0 -62
  95. utilities/python_dotenv.py +0 -101
  96. utilities/streamlit.py +0 -105
  97. utilities/typed_settings.py +0 -123
utilities/pydantic.py CHANGED
@@ -1,59 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
- from dataclasses import dataclass
4
3
  from pathlib import Path
5
- from typing import TYPE_CHECKING, override
4
+ from typing import Annotated
6
5
 
7
- from pydantic import BaseModel
6
+ from pydantic import BeforeValidator
8
7
 
9
- from utilities.atomicwrites import writer
8
+ ExpandedPath = Annotated[Path, BeforeValidator(lambda p: Path(p).expanduser())]
10
9
 
11
- if TYPE_CHECKING:
12
- from utilities.types import PathLike
13
10
 
14
-
15
- class HashableBaseModel(BaseModel):
16
- """Subclass of BaseModel which is hashable."""
17
-
18
- @override
19
- def __hash__(self) -> int:
20
- return hash((type(self), *self.__dict__.values()))
21
-
22
-
23
- def load_model[T: BaseModel](model: type[T], path: PathLike, /) -> T:
24
- path = Path(path)
25
- try:
26
- with path.open() as fh:
27
- return model.model_validate_json(fh.read())
28
- except FileNotFoundError:
29
- raise _LoadModelFileNotFoundError(model=model, path=path) from None
30
- except IsADirectoryError: # skipif-not-windows
31
- raise _LoadModelIsADirectoryError(model=model, path=path) from None
32
-
33
-
34
- @dataclass(kw_only=True, slots=True)
35
- class LoadModelError(Exception):
36
- model: type[BaseModel]
37
- path: Path
38
-
39
-
40
- @dataclass(kw_only=True, slots=True)
41
- class _LoadModelFileNotFoundError(LoadModelError):
42
- @override
43
- def __str__(self) -> str:
44
- return f"Unable to load {self.model}; path {str(self.path)!r} must exist."
45
-
46
-
47
- @dataclass(kw_only=True, slots=True)
48
- class _LoadModelIsADirectoryError(LoadModelError):
49
- @override
50
- def __str__(self) -> str:
51
- return f"Unable to load {self.model}; path {str(self.path)!r} must not be a directory." # skipif-not-windows
52
-
53
-
54
- def save_model(model: BaseModel, path: PathLike, /, *, overwrite: bool = False) -> None:
55
- with writer(path, overwrite=overwrite) as temp, temp.open(mode="w") as fh:
56
- _ = fh.write(model.model_dump_json())
57
-
58
-
59
- __all__ = ["HashableBaseModel", "LoadModelError", "load_model", "save_model"]
11
+ __all__ = ["ExpandedPath"]
@@ -0,0 +1,240 @@
1
+ from __future__ import annotations
2
+
3
+ from functools import reduce
4
+ from pathlib import Path
5
+ from typing import TYPE_CHECKING, Any, ClassVar, assert_never, cast, override
6
+
7
+ from pydantic import Field, create_model
8
+ from pydantic_settings import (
9
+ BaseSettings,
10
+ CliSettingsSource,
11
+ JsonConfigSettingsSource,
12
+ PydanticBaseSettingsSource,
13
+ SettingsConfigDict,
14
+ TomlConfigSettingsSource,
15
+ YamlConfigSettingsSource,
16
+ )
17
+ from pydantic_settings.sources import DEFAULT_PATH
18
+
19
+ from utilities.errors import ImpossibleCaseError
20
+ from utilities.iterables import always_iterable
21
+
22
+ if TYPE_CHECKING:
23
+ from collections.abc import Iterator, Sequence
24
+
25
+ from pydantic_settings.sources import PathType
26
+
27
+ from utilities.types import MaybeSequenceStr, PathLike
28
+
29
+
30
+ type PathLikeWithSection = tuple[PathLike, MaybeSequenceStr]
31
+ type PathLikeOrWithSection = PathLike | PathLikeWithSection
32
+
33
+
34
+ class CustomBaseSettings(BaseSettings):
35
+ """Base settings for loading JSON/TOML/YAML files."""
36
+
37
+ # paths
38
+ json_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
39
+ toml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
40
+ yaml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
41
+
42
+ # config
43
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
44
+ frozen=True, env_nested_delimiter="__"
45
+ )
46
+
47
+ @classmethod
48
+ @override
49
+ def settings_customise_sources(
50
+ cls,
51
+ settings_cls: type[BaseSettings],
52
+ init_settings: PydanticBaseSettingsSource,
53
+ env_settings: PydanticBaseSettingsSource,
54
+ dotenv_settings: PydanticBaseSettingsSource,
55
+ file_secret_settings: PydanticBaseSettingsSource,
56
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
57
+ _ = (init_settings, dotenv_settings, file_secret_settings)
58
+ return tuple(cls._yield_base_settings_sources(settings_cls, env_settings))
59
+
60
+ @classmethod
61
+ def _yield_base_settings_sources(
62
+ cls,
63
+ settings_cls: type[BaseSettings],
64
+ env_settings: PydanticBaseSettingsSource,
65
+ /,
66
+ ) -> Iterator[PydanticBaseSettingsSource]:
67
+ yield env_settings
68
+ for file, section in map(_ensure_section, cls.json_files):
69
+ yield JsonConfigSectionSettingsSource(
70
+ settings_cls, json_file=file, section=section
71
+ )
72
+ for file, section in map(_ensure_section, cls.toml_files):
73
+ yield TomlConfigSectionSettingsSource(
74
+ settings_cls, toml_file=file, section=section
75
+ )
76
+ for file, section in map(_ensure_section, cls.yaml_files):
77
+ yield YamlConfigSectionSettingsSource(
78
+ settings_cls, yaml_file=file, section=section
79
+ )
80
+
81
+
82
+ class JsonConfigSectionSettingsSource(JsonConfigSettingsSource):
83
+ @override
84
+ def __init__(
85
+ self,
86
+ settings_cls: type[BaseSettings],
87
+ json_file: PathType | None = DEFAULT_PATH,
88
+ json_file_encoding: str | None = None,
89
+ *,
90
+ section: MaybeSequenceStr,
91
+ ) -> None:
92
+ super().__init__(
93
+ settings_cls, json_file=json_file, json_file_encoding=json_file_encoding
94
+ )
95
+ self.section = section
96
+
97
+ @override
98
+ def __call__(self) -> dict[str, Any]:
99
+ return _get_section(super().__call__(), self.section)
100
+
101
+
102
+ class TomlConfigSectionSettingsSource(TomlConfigSettingsSource):
103
+ @override
104
+ def __init__(
105
+ self,
106
+ settings_cls: type[BaseSettings],
107
+ toml_file: PathType | None = DEFAULT_PATH,
108
+ *,
109
+ section: MaybeSequenceStr,
110
+ ) -> None:
111
+ super().__init__(settings_cls, toml_file=toml_file)
112
+ self.section = section
113
+
114
+ @override
115
+ def __call__(self) -> dict[str, Any]:
116
+ return _get_section(super().__call__(), self.section)
117
+
118
+
119
+ class YamlConfigSectionSettingsSource(YamlConfigSettingsSource):
120
+ @override
121
+ def __init__(
122
+ self,
123
+ settings_cls: type[BaseSettings],
124
+ yaml_file: PathType | None = DEFAULT_PATH,
125
+ yaml_file_encoding: str | None = None,
126
+ yaml_config_section: str | None = None,
127
+ *,
128
+ section: MaybeSequenceStr,
129
+ ) -> None:
130
+ super().__init__(
131
+ settings_cls,
132
+ yaml_file=yaml_file,
133
+ yaml_file_encoding=yaml_file_encoding,
134
+ yaml_config_section=yaml_config_section,
135
+ )
136
+ self.section = section
137
+
138
+ @override
139
+ def __call__(self) -> dict[str, Any]:
140
+ return _get_section(super().__call__(), self.section)
141
+
142
+
143
+ def _ensure_section(file: PathLikeOrWithSection, /) -> PathLikeWithSection:
144
+ match file:
145
+ case Path() | str():
146
+ return file, []
147
+ case Path() | str() as path, str() | list() | tuple() as section:
148
+ return path, section
149
+ case never:
150
+ assert_never(never)
151
+
152
+
153
+ def _get_section(
154
+ mapping: dict[str, Any], section: MaybeSequenceStr, /
155
+ ) -> dict[str, Any]:
156
+ return reduce(lambda acc, el: acc.get(el, {}), always_iterable(section), mapping)
157
+
158
+
159
+ ##
160
+
161
+
162
+ class HashableBaseSettings(BaseSettings):
163
+ """Base settings for loading JSON files."""
164
+
165
+ # config
166
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(frozen=True)
167
+
168
+
169
+ ##
170
+
171
+
172
+ def load_settings[T: BaseSettings](cls: type[T], /, *, cli: bool = False) -> T:
173
+ """Load a set of settings."""
174
+ _ = cls.model_rebuild()
175
+ if cli:
176
+ cls_with_defaults = _load_settings_create_model(cls)
177
+
178
+ @classmethod
179
+ def settings_customise_sources(
180
+ cls: type[BaseSettings],
181
+ settings_cls: type[BaseSettings],
182
+ init_settings: PydanticBaseSettingsSource,
183
+ env_settings: PydanticBaseSettingsSource,
184
+ dotenv_settings: PydanticBaseSettingsSource,
185
+ file_secret_settings: PydanticBaseSettingsSource,
186
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
187
+ parent = cast(
188
+ "Any", super(cls_with_defaults, cls)
189
+ ).settings_customise_sources(
190
+ settings_cls=settings_cls,
191
+ init_settings=init_settings,
192
+ env_settings=env_settings,
193
+ dotenv_settings=dotenv_settings,
194
+ file_secret_settings=file_secret_settings,
195
+ )
196
+ return (
197
+ CliSettingsSource(
198
+ settings_cls, cli_parse_args=True, case_sensitive=False
199
+ ),
200
+ *parent,
201
+ )
202
+
203
+ cls_use = type(
204
+ cls.__name__,
205
+ (cls_with_defaults,),
206
+ {"settings_customise_sources": settings_customise_sources},
207
+ )
208
+ cls_use = cast("type[T]", cls_use)
209
+ else:
210
+ cls_use = cls
211
+ return cls_use()
212
+
213
+
214
+ def _load_settings_create_model[T: BaseSettings](
215
+ cls: type[T], /, *, values: T | None = None
216
+ ) -> type[T]:
217
+ values_use = cls() if values is None else values
218
+ kwargs: dict[str, Any] = {}
219
+ for name, field in cls.model_fields.items():
220
+ if (ann := field.annotation) is None:
221
+ raise ImpossibleCaseError(case=[f"{ann=}"]) # pragma: no cover
222
+ value = getattr(values_use, name)
223
+ if (
224
+ isinstance(cast("Any", ann), type) # 'ann' is possible not a type
225
+ and issubclass(ann, BaseSettings)
226
+ ):
227
+ kwargs[name] = _load_settings_create_model(ann, values=value)
228
+ else:
229
+ kwargs[name] = (field.annotation, Field(default=value))
230
+ return create_model(cls.__name__, __base__=cls, **kwargs)
231
+
232
+
233
+ __all__ = [
234
+ "CustomBaseSettings",
235
+ "HashableBaseSettings",
236
+ "JsonConfigSectionSettingsSource",
237
+ "TomlConfigSectionSettingsSource",
238
+ "YamlConfigSectionSettingsSource",
239
+ "load_settings",
240
+ ]
@@ -0,0 +1,76 @@
1
+ from __future__ import annotations
2
+
3
+ from logging import Filter, LogRecord, getLogger
4
+ from re import search
5
+ from typing import TYPE_CHECKING, Any, ClassVar, override
6
+
7
+ from pydantic_settings.sources import DEFAULT_PATH
8
+ from pydantic_settings_sops import SOPSConfigSettingsSource
9
+
10
+ from utilities.pydantic_settings import (
11
+ CustomBaseSettings,
12
+ PathLikeOrWithSection,
13
+ _ensure_section,
14
+ _get_section,
15
+ )
16
+
17
+ if TYPE_CHECKING:
18
+ from collections.abc import Iterator, Sequence
19
+
20
+ from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
21
+ from pydantic_settings.sources import PathType
22
+
23
+ from utilities.types import MaybeSequenceStr
24
+
25
+
26
+ class _SuppressDefaultConfigMessage(Filter):
27
+ @override
28
+ def filter(self, record: LogRecord) -> bool:
29
+ return not search(
30
+ r"^default config file does not exists '.*'$", record.getMessage()
31
+ )
32
+
33
+
34
+ getLogger("sopsy.utils").addFilter(_SuppressDefaultConfigMessage())
35
+
36
+
37
+ class SopsBaseSettings(CustomBaseSettings):
38
+ """Base settings for loading secrets using `sops/age`."""
39
+
40
+ # paths
41
+ secret_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
42
+
43
+ @classmethod
44
+ @override
45
+ def _yield_base_settings_sources(
46
+ cls,
47
+ settings_cls: type[BaseSettings],
48
+ env_settings: PydanticBaseSettingsSource,
49
+ /,
50
+ ) -> Iterator[PydanticBaseSettingsSource]:
51
+ yield from super()._yield_base_settings_sources(settings_cls, env_settings)
52
+ for file, section in map(_ensure_section, cls.secret_files):
53
+ yield SOPSConfigSectionSettingsSource(
54
+ settings_cls, json_file=file, section=section
55
+ )
56
+
57
+
58
+ class SOPSConfigSectionSettingsSource(SOPSConfigSettingsSource):
59
+ @override
60
+ def __init__(
61
+ self,
62
+ settings_cls: type[BaseSettings],
63
+ json_file: PathType | None = DEFAULT_PATH,
64
+ yaml_file: PathType | None = DEFAULT_PATH,
65
+ *,
66
+ section: MaybeSequenceStr,
67
+ ) -> None:
68
+ super().__init__(settings_cls, json_file=json_file, yaml_file=yaml_file) # pyright: ignore[reportArgumentType]
69
+ self.section = section
70
+
71
+ @override
72
+ def __call__(self) -> dict[str, Any]:
73
+ return _get_section(super().__call__(), self.section)
74
+
75
+
76
+ __all__ = ["SOPSConfigSectionSettingsSource", "SopsBaseSettings"]
utilities/pyinstrument.py CHANGED
@@ -7,8 +7,8 @@ from typing import TYPE_CHECKING
7
7
  from pyinstrument.profiler import Profiler
8
8
 
9
9
  from utilities.atomicwrites import writer
10
- from utilities.pathlib import get_path
11
- from utilities.whenever import format_compact, get_now
10
+ from utilities.pathlib import to_path
11
+ from utilities.whenever import format_compact, get_now_local
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from collections.abc import Iterator
@@ -17,15 +17,15 @@ if TYPE_CHECKING:
17
17
 
18
18
 
19
19
  @contextmanager
20
- def profile(*, path: MaybeCallablePathLike | None = Path.cwd) -> Iterator[None]:
20
+ def profile(path: MaybeCallablePathLike = Path.cwd, /) -> Iterator[None]:
21
21
  """Profile the contents of a block."""
22
22
  with Profiler() as profiler:
23
23
  yield
24
- filename = get_path(path=path).joinpath(
25
- f"profile__{format_compact(get_now())}.html"
24
+ filename = to_path(path).joinpath(
25
+ f"profile__{format_compact(get_now_local(), path=True)}.html"
26
26
  )
27
- with writer(filename) as temp, temp.open(mode="w") as fh:
28
- _ = fh.write(profiler.output_html())
27
+ with writer(filename) as temp:
28
+ _ = temp.write_text(profiler.output_html())
29
29
 
30
30
 
31
31
  __all__ = ["profile"]