dycw-utilities 0.146.2__py3-none-any.whl → 0.178.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (89) hide show
  1. dycw_utilities-0.178.1.dist-info/METADATA +34 -0
  2. dycw_utilities-0.178.1.dist-info/RECORD +105 -0
  3. dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
  4. {dycw_utilities-0.146.2.dist-info → dycw_utilities-0.178.1.dist-info}/entry_points.txt +1 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +10 -7
  7. utilities/asyncio.py +129 -50
  8. utilities/atomicwrites.py +1 -1
  9. utilities/atools.py +64 -4
  10. utilities/cachetools.py +9 -6
  11. utilities/click.py +144 -49
  12. utilities/concurrent.py +1 -1
  13. utilities/contextlib.py +4 -2
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +15 -28
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +2 -2
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +8 -3
  21. utilities/fpdf2.py +2 -2
  22. utilities/functions.py +20 -297
  23. utilities/git.py +19 -0
  24. utilities/grp.py +28 -0
  25. utilities/hypothesis.py +361 -79
  26. utilities/importlib.py +17 -1
  27. utilities/inflect.py +1 -1
  28. utilities/iterables.py +33 -58
  29. utilities/jinja2.py +148 -0
  30. utilities/json.py +1 -1
  31. utilities/libcst.py +7 -7
  32. utilities/logging.py +131 -93
  33. utilities/math.py +8 -4
  34. utilities/more_itertools.py +4 -6
  35. utilities/operator.py +1 -1
  36. utilities/orjson.py +86 -34
  37. utilities/os.py +49 -2
  38. utilities/packaging.py +115 -0
  39. utilities/parse.py +2 -2
  40. utilities/pathlib.py +66 -34
  41. utilities/permissions.py +298 -0
  42. utilities/platform.py +5 -4
  43. utilities/polars.py +934 -420
  44. utilities/polars_ols.py +1 -1
  45. utilities/postgres.py +317 -153
  46. utilities/pottery.py +10 -86
  47. utilities/pqdm.py +3 -3
  48. utilities/pwd.py +28 -0
  49. utilities/pydantic.py +4 -51
  50. utilities/pydantic_settings.py +240 -0
  51. utilities/pydantic_settings_sops.py +76 -0
  52. utilities/pyinstrument.py +5 -5
  53. utilities/pytest.py +100 -126
  54. utilities/pytest_plugins/pytest_randomly.py +1 -1
  55. utilities/pytest_plugins/pytest_regressions.py +7 -3
  56. utilities/pytest_regressions.py +27 -8
  57. utilities/random.py +11 -6
  58. utilities/re.py +1 -1
  59. utilities/redis.py +101 -64
  60. utilities/sentinel.py +10 -0
  61. utilities/shelve.py +4 -1
  62. utilities/shutil.py +25 -0
  63. utilities/slack_sdk.py +9 -4
  64. utilities/sqlalchemy.py +422 -352
  65. utilities/sqlalchemy_polars.py +28 -52
  66. utilities/string.py +1 -1
  67. utilities/subprocess.py +1977 -0
  68. utilities/tempfile.py +112 -4
  69. utilities/testbook.py +50 -0
  70. utilities/text.py +174 -42
  71. utilities/throttle.py +158 -0
  72. utilities/timer.py +2 -2
  73. utilities/traceback.py +59 -38
  74. utilities/types.py +68 -22
  75. utilities/typing.py +479 -19
  76. utilities/uuid.py +42 -5
  77. utilities/version.py +27 -26
  78. utilities/whenever.py +663 -178
  79. utilities/zoneinfo.py +80 -22
  80. dycw_utilities-0.146.2.dist-info/METADATA +0 -41
  81. dycw_utilities-0.146.2.dist-info/RECORD +0 -99
  82. dycw_utilities-0.146.2.dist-info/WHEEL +0 -4
  83. dycw_utilities-0.146.2.dist-info/licenses/LICENSE +0 -21
  84. utilities/aiolimiter.py +0 -25
  85. utilities/eventkit.py +0 -388
  86. utilities/period.py +0 -237
  87. utilities/python_dotenv.py +0 -101
  88. utilities/streamlit.py +0 -105
  89. utilities/typed_settings.py +0 -144
utilities/pottery.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from contextlib import nullcontext, suppress
3
+ from contextlib import suppress
4
4
  from dataclasses import dataclass
5
5
  from sys import maxsize
6
6
  from typing import TYPE_CHECKING, override
@@ -9,23 +9,19 @@ from pottery import AIORedlock
9
9
  from pottery.exceptions import ReleaseUnlockedLock
10
10
  from redis.asyncio import Redis
11
11
 
12
- from utilities.asyncio import loop_until_succeed, sleep_td, timeout_td
12
+ from utilities.asyncio import sleep_td, timeout_td
13
13
  from utilities.contextlib import enhanced_async_context_manager
14
- from utilities.functools import partial
15
14
  from utilities.iterables import always_iterable
16
- from utilities.logging import get_logger
17
- from utilities.warnings import suppress_warnings
18
- from utilities.whenever import MILLISECOND, SECOND, to_seconds
15
+ from utilities.whenever import MILLISECOND, SECOND, to_nanoseconds
19
16
 
20
17
  if TYPE_CHECKING:
21
- from collections.abc import AsyncIterator, Callable, Iterable
18
+ from collections.abc import AsyncIterator, Iterable
22
19
 
23
20
  from whenever import Delta
24
21
 
25
- from utilities.types import Coro, LoggerOrName, MaybeIterable
22
+ from utilities.types import MaybeIterable
26
23
 
27
24
  _NUM: int = 1
28
- _TIMEOUT_TRY_ACQUIRE: Delta = SECOND
29
25
  _TIMEOUT_RELEASE: Delta = 10 * SECOND
30
26
  _SLEEP: Delta = MILLISECOND
31
27
 
@@ -44,74 +40,6 @@ async def extend_lock(
44
40
  ##
45
41
 
46
42
 
47
- @enhanced_async_context_manager
48
- async def try_yield_coroutine_looper(
49
- redis: MaybeIterable[Redis],
50
- key: str,
51
- /,
52
- *,
53
- num: int = _NUM,
54
- timeout_release: Delta = _TIMEOUT_RELEASE,
55
- num_extensions: int | None = None,
56
- timeout_acquire: Delta = _TIMEOUT_TRY_ACQUIRE,
57
- sleep_acquire: Delta = _SLEEP,
58
- throttle: Delta | None = None,
59
- logger: LoggerOrName | None = None,
60
- sleep_error: Delta | None = None,
61
- ) -> AsyncIterator[CoroutineLooper | None]:
62
- """Try acquire access to a coroutine looper."""
63
- try: # skipif-ci-and-not-linux
64
- async with yield_access(
65
- redis,
66
- key,
67
- num=num,
68
- timeout_release=timeout_release,
69
- num_extensions=num_extensions,
70
- timeout_acquire=timeout_acquire,
71
- sleep=sleep_acquire,
72
- throttle=throttle,
73
- ) as lock:
74
- yield CoroutineLooper(lock=lock, logger=logger, sleep=sleep_error)
75
- except _YieldAccessUnableToAcquireLockError as error: # skipif-ci-and-not-linux
76
- if logger is not None:
77
- get_logger(logger=logger).info("%s", error)
78
- async with nullcontext():
79
- yield
80
-
81
-
82
- @dataclass(order=True, unsafe_hash=True, kw_only=True)
83
- class CoroutineLooper:
84
- """Looper, guarded by a lock, to repeatedly call a coroutine until it succeeds."""
85
-
86
- lock: AIORedlock
87
- logger: LoggerOrName | None = None
88
- sleep: Delta | None = None
89
-
90
- async def __call__[**P](
91
- self, func: Callable[P, Coro[None]], *args: P.args, **kwargs: P.kwargs
92
- ) -> None:
93
- def make_coro() -> Coro[None]:
94
- return func(*args, **kwargs)
95
-
96
- await loop_until_succeed(
97
- make_coro, error=partial(self._error, func=make_coro), sleep=self.sleep
98
- )
99
-
100
- def _error(self, error: Exception, /, *, func: Callable[[], Coro[None]]) -> None:
101
- _ = error
102
- if self.logger is not None:
103
- coro = func()
104
- name = coro.__name__ # skipif-ci-and-not-linux
105
- with suppress_warnings(
106
- message="coroutine '.*' was never awaited", category=RuntimeWarning
107
- ):
108
- del coro
109
- get_logger(logger=self.logger).error("Error running %r", name)
110
-
111
-
112
- ##
113
-
114
-
115
43
  @enhanced_async_context_manager
116
44
  async def yield_access(
117
45
  redis: MaybeIterable[Redis],
@@ -135,7 +63,7 @@ async def yield_access(
135
63
  AIORedlock(
136
64
  key=f"{key}_{i}_of_{num}",
137
65
  masters=masters,
138
- auto_release_time=to_seconds(timeout_release),
66
+ auto_release_time=to_nanoseconds(timeout_release) / 1e9,
139
67
  num_extensions=maxsize if num_extensions is None else num_extensions,
140
68
  )
141
69
  for i in range(1, num + 1)
@@ -185,11 +113,12 @@ async def _get_first_available_lock_if_any(
185
113
  @dataclass(kw_only=True, slots=True)
186
114
  class YieldAccessError(Exception):
187
115
  key: str
188
- num: int
189
116
 
190
117
 
191
118
  @dataclass(kw_only=True, slots=True)
192
119
  class _YieldAccessNumLocksError(YieldAccessError):
120
+ num: int
121
+
193
122
  @override
194
123
  def __str__(self) -> str:
195
124
  return f"Number of locks for {self.key!r} must be positive; got {self.num}"
@@ -197,6 +126,7 @@ class _YieldAccessNumLocksError(YieldAccessError):
197
126
 
198
127
  @dataclass(kw_only=True, slots=True)
199
128
  class _YieldAccessUnableToAcquireLockError(YieldAccessError):
129
+ num: int
200
130
  timeout: Delta | None
201
131
 
202
132
  @override
@@ -204,10 +134,4 @@ class _YieldAccessUnableToAcquireLockError(YieldAccessError):
204
134
  return f"Unable to acquire any 1 of {self.num} locks for {self.key!r} after {self.timeout}" # skipif-ci-and-not-linux
205
135
 
206
136
 
207
- __all__ = [
208
- "CoroutineLooper",
209
- "YieldAccessError",
210
- "extend_lock",
211
- "try_yield_coroutine_looper",
212
- "yield_access",
213
- ]
137
+ __all__ = ["YieldAccessError", "extend_lock", "yield_access"]
utilities/pqdm.py CHANGED
@@ -9,7 +9,7 @@ from tqdm.auto import tqdm as tqdm_auto
9
9
  from utilities.functions import get_func_name
10
10
  from utilities.iterables import apply_to_varargs
11
11
  from utilities.os import get_cpu_use
12
- from utilities.sentinel import Sentinel, sentinel
12
+ from utilities.sentinel import Sentinel, is_sentinel, sentinel
13
13
 
14
14
  if TYPE_CHECKING:
15
15
  from collections.abc import Callable, Iterable
@@ -90,7 +90,7 @@ def pqdm_starmap[T](
90
90
  **_get_desc(desc, func),
91
91
  **kwargs,
92
92
  )
93
- case _ as never:
93
+ case never:
94
94
  assert_never(never)
95
95
  return list(result)
96
96
 
@@ -98,7 +98,7 @@ def pqdm_starmap[T](
98
98
  def _get_desc(
99
99
  desc: str | None | Sentinel, func: Callable[..., Any], /
100
100
  ) -> dict[str, str]:
101
- desc_use = get_func_name(func) if isinstance(desc, Sentinel) else desc
101
+ desc_use = get_func_name(func) if is_sentinel(desc) else desc
102
102
  return {} if desc_use is None else {"desc": desc_use}
103
103
 
104
104
 
utilities/pwd.py ADDED
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import assert_never
4
+
5
+ from utilities.os import EFFECTIVE_USER_ID
6
+ from utilities.platform import SYSTEM
7
+
8
+
9
+ def get_uid_name(uid: int, /) -> str | None:
10
+ """Get the name of a user ID."""
11
+ match SYSTEM:
12
+ case "windows": # skipif-not-windows
13
+ return None
14
+ case "mac" | "linux": # skipif-windows
15
+ from pwd import getpwuid
16
+
17
+ return getpwuid(uid).pw_name
18
+ case never:
19
+ assert_never(never)
20
+
21
+
22
+ ROOT_USER_NAME = get_uid_name(0)
23
+ EFFECTIVE_USER_NAME = (
24
+ None if EFFECTIVE_USER_ID is None else get_uid_name(EFFECTIVE_USER_ID)
25
+ )
26
+
27
+
28
+ __all__ = ["EFFECTIVE_USER_NAME", "ROOT_USER_NAME", "get_uid_name"]
utilities/pydantic.py CHANGED
@@ -1,58 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
- from dataclasses import dataclass
4
3
  from pathlib import Path
5
- from typing import TYPE_CHECKING, override
4
+ from typing import Annotated
6
5
 
7
- from pydantic import BaseModel
6
+ from pydantic import BeforeValidator
8
7
 
9
- from utilities.atomicwrites import writer
8
+ ExpandedPath = Annotated[Path, BeforeValidator(lambda p: Path(p).expanduser())]
10
9
 
11
- if TYPE_CHECKING:
12
- from utilities.types import PathLike
13
10
 
14
-
15
- class HashableBaseModel(BaseModel):
16
- """Subclass of BaseModel which is hashable."""
17
-
18
- @override
19
- def __hash__(self) -> int:
20
- return hash((type(self), *self.__dict__.values()))
21
-
22
-
23
- def load_model[T: BaseModel](model: type[T], path: PathLike, /) -> T:
24
- path = Path(path)
25
- try:
26
- return model.model_validate_json(path.read_text())
27
- except FileNotFoundError:
28
- raise _LoadModelFileNotFoundError(model=model, path=path) from None
29
- except IsADirectoryError: # skipif-not-windows
30
- raise _LoadModelIsADirectoryError(model=model, path=path) from None
31
-
32
-
33
- @dataclass(kw_only=True, slots=True)
34
- class LoadModelError(Exception):
35
- model: type[BaseModel]
36
- path: Path
37
-
38
-
39
- @dataclass(kw_only=True, slots=True)
40
- class _LoadModelFileNotFoundError(LoadModelError):
41
- @override
42
- def __str__(self) -> str:
43
- return f"Unable to load {self.model}; path {str(self.path)!r} must exist."
44
-
45
-
46
- @dataclass(kw_only=True, slots=True)
47
- class _LoadModelIsADirectoryError(LoadModelError):
48
- @override
49
- def __str__(self) -> str:
50
- return f"Unable to load {self.model}; path {str(self.path)!r} must not be a directory." # skipif-not-windows
51
-
52
-
53
- def save_model(model: BaseModel, path: PathLike, /, *, overwrite: bool = False) -> None:
54
- with writer(path, overwrite=overwrite) as temp:
55
- _ = temp.write_text(model.model_dump_json())
56
-
57
-
58
- __all__ = ["HashableBaseModel", "LoadModelError", "load_model", "save_model"]
11
+ __all__ = ["ExpandedPath"]
@@ -0,0 +1,240 @@
1
+ from __future__ import annotations
2
+
3
+ from functools import reduce
4
+ from pathlib import Path
5
+ from typing import TYPE_CHECKING, Any, ClassVar, assert_never, cast, override
6
+
7
+ from pydantic import Field, create_model
8
+ from pydantic_settings import (
9
+ BaseSettings,
10
+ CliSettingsSource,
11
+ JsonConfigSettingsSource,
12
+ PydanticBaseSettingsSource,
13
+ SettingsConfigDict,
14
+ TomlConfigSettingsSource,
15
+ YamlConfigSettingsSource,
16
+ )
17
+ from pydantic_settings.sources import DEFAULT_PATH
18
+
19
+ from utilities.errors import ImpossibleCaseError
20
+ from utilities.iterables import always_iterable
21
+
22
+ if TYPE_CHECKING:
23
+ from collections.abc import Iterator, Sequence
24
+
25
+ from pydantic_settings.sources import PathType
26
+
27
+ from utilities.types import MaybeSequenceStr, PathLike
28
+
29
+
30
+ type PathLikeWithSection = tuple[PathLike, MaybeSequenceStr]
31
+ type PathLikeOrWithSection = PathLike | PathLikeWithSection
32
+
33
+
34
+ class CustomBaseSettings(BaseSettings):
35
+ """Base settings for loading JSON/TOML/YAML files."""
36
+
37
+ # paths
38
+ json_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
39
+ toml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
40
+ yaml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
41
+
42
+ # config
43
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
44
+ frozen=True, env_nested_delimiter="__"
45
+ )
46
+
47
+ @classmethod
48
+ @override
49
+ def settings_customise_sources(
50
+ cls,
51
+ settings_cls: type[BaseSettings],
52
+ init_settings: PydanticBaseSettingsSource,
53
+ env_settings: PydanticBaseSettingsSource,
54
+ dotenv_settings: PydanticBaseSettingsSource,
55
+ file_secret_settings: PydanticBaseSettingsSource,
56
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
57
+ _ = (init_settings, dotenv_settings, file_secret_settings)
58
+ return tuple(cls._yield_base_settings_sources(settings_cls, env_settings))
59
+
60
+ @classmethod
61
+ def _yield_base_settings_sources(
62
+ cls,
63
+ settings_cls: type[BaseSettings],
64
+ env_settings: PydanticBaseSettingsSource,
65
+ /,
66
+ ) -> Iterator[PydanticBaseSettingsSource]:
67
+ yield env_settings
68
+ for file, section in map(_ensure_section, cls.json_files):
69
+ yield JsonConfigSectionSettingsSource(
70
+ settings_cls, json_file=file, section=section
71
+ )
72
+ for file, section in map(_ensure_section, cls.toml_files):
73
+ yield TomlConfigSectionSettingsSource(
74
+ settings_cls, toml_file=file, section=section
75
+ )
76
+ for file, section in map(_ensure_section, cls.yaml_files):
77
+ yield YamlConfigSectionSettingsSource(
78
+ settings_cls, yaml_file=file, section=section
79
+ )
80
+
81
+
82
+ class JsonConfigSectionSettingsSource(JsonConfigSettingsSource):
83
+ @override
84
+ def __init__(
85
+ self,
86
+ settings_cls: type[BaseSettings],
87
+ json_file: PathType | None = DEFAULT_PATH,
88
+ json_file_encoding: str | None = None,
89
+ *,
90
+ section: MaybeSequenceStr,
91
+ ) -> None:
92
+ super().__init__(
93
+ settings_cls, json_file=json_file, json_file_encoding=json_file_encoding
94
+ )
95
+ self.section = section
96
+
97
+ @override
98
+ def __call__(self) -> dict[str, Any]:
99
+ return _get_section(super().__call__(), self.section)
100
+
101
+
102
+ class TomlConfigSectionSettingsSource(TomlConfigSettingsSource):
103
+ @override
104
+ def __init__(
105
+ self,
106
+ settings_cls: type[BaseSettings],
107
+ toml_file: PathType | None = DEFAULT_PATH,
108
+ *,
109
+ section: MaybeSequenceStr,
110
+ ) -> None:
111
+ super().__init__(settings_cls, toml_file=toml_file)
112
+ self.section = section
113
+
114
+ @override
115
+ def __call__(self) -> dict[str, Any]:
116
+ return _get_section(super().__call__(), self.section)
117
+
118
+
119
+ class YamlConfigSectionSettingsSource(YamlConfigSettingsSource):
120
+ @override
121
+ def __init__(
122
+ self,
123
+ settings_cls: type[BaseSettings],
124
+ yaml_file: PathType | None = DEFAULT_PATH,
125
+ yaml_file_encoding: str | None = None,
126
+ yaml_config_section: str | None = None,
127
+ *,
128
+ section: MaybeSequenceStr,
129
+ ) -> None:
130
+ super().__init__(
131
+ settings_cls,
132
+ yaml_file=yaml_file,
133
+ yaml_file_encoding=yaml_file_encoding,
134
+ yaml_config_section=yaml_config_section,
135
+ )
136
+ self.section = section
137
+
138
+ @override
139
+ def __call__(self) -> dict[str, Any]:
140
+ return _get_section(super().__call__(), self.section)
141
+
142
+
143
+ def _ensure_section(file: PathLikeOrWithSection, /) -> PathLikeWithSection:
144
+ match file:
145
+ case Path() | str():
146
+ return file, []
147
+ case Path() | str() as path, str() | list() | tuple() as section:
148
+ return path, section
149
+ case never:
150
+ assert_never(never)
151
+
152
+
153
+ def _get_section(
154
+ mapping: dict[str, Any], section: MaybeSequenceStr, /
155
+ ) -> dict[str, Any]:
156
+ return reduce(lambda acc, el: acc.get(el, {}), always_iterable(section), mapping)
157
+
158
+
159
+ ##
160
+
161
+
162
+ class HashableBaseSettings(BaseSettings):
163
+ """Base settings for loading JSON files."""
164
+
165
+ # config
166
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(frozen=True)
167
+
168
+
169
+ ##
170
+
171
+
172
+ def load_settings[T: BaseSettings](cls: type[T], /, *, cli: bool = False) -> T:
173
+ """Load a set of settings."""
174
+ _ = cls.model_rebuild()
175
+ if cli:
176
+ cls_with_defaults = _load_settings_create_model(cls)
177
+
178
+ @classmethod
179
+ def settings_customise_sources(
180
+ cls: type[BaseSettings],
181
+ settings_cls: type[BaseSettings],
182
+ init_settings: PydanticBaseSettingsSource,
183
+ env_settings: PydanticBaseSettingsSource,
184
+ dotenv_settings: PydanticBaseSettingsSource,
185
+ file_secret_settings: PydanticBaseSettingsSource,
186
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
187
+ parent = cast(
188
+ "Any", super(cls_with_defaults, cls)
189
+ ).settings_customise_sources(
190
+ settings_cls=settings_cls,
191
+ init_settings=init_settings,
192
+ env_settings=env_settings,
193
+ dotenv_settings=dotenv_settings,
194
+ file_secret_settings=file_secret_settings,
195
+ )
196
+ return (
197
+ CliSettingsSource(
198
+ settings_cls, cli_parse_args=True, case_sensitive=False
199
+ ),
200
+ *parent,
201
+ )
202
+
203
+ cls_use = type(
204
+ cls.__name__,
205
+ (cls_with_defaults,),
206
+ {"settings_customise_sources": settings_customise_sources},
207
+ )
208
+ cls_use = cast("type[T]", cls_use)
209
+ else:
210
+ cls_use = cls
211
+ return cls_use()
212
+
213
+
214
+ def _load_settings_create_model[T: BaseSettings](
215
+ cls: type[T], /, *, values: T | None = None
216
+ ) -> type[T]:
217
+ values_use = cls() if values is None else values
218
+ kwargs: dict[str, Any] = {}
219
+ for name, field in cls.model_fields.items():
220
+ if (ann := field.annotation) is None:
221
+ raise ImpossibleCaseError(case=[f"{ann=}"]) # pragma: no cover
222
+ value = getattr(values_use, name)
223
+ if (
224
+ isinstance(cast("Any", ann), type) # 'ann' is possible not a type
225
+ and issubclass(ann, BaseSettings)
226
+ ):
227
+ kwargs[name] = _load_settings_create_model(ann, values=value)
228
+ else:
229
+ kwargs[name] = (field.annotation, Field(default=value))
230
+ return create_model(cls.__name__, __base__=cls, **kwargs)
231
+
232
+
233
+ __all__ = [
234
+ "CustomBaseSettings",
235
+ "HashableBaseSettings",
236
+ "JsonConfigSectionSettingsSource",
237
+ "TomlConfigSectionSettingsSource",
238
+ "YamlConfigSectionSettingsSource",
239
+ "load_settings",
240
+ ]
@@ -0,0 +1,76 @@
1
+ from __future__ import annotations
2
+
3
+ from logging import Filter, LogRecord, getLogger
4
+ from re import search
5
+ from typing import TYPE_CHECKING, Any, ClassVar, override
6
+
7
+ from pydantic_settings.sources import DEFAULT_PATH
8
+ from pydantic_settings_sops import SOPSConfigSettingsSource
9
+
10
+ from utilities.pydantic_settings import (
11
+ CustomBaseSettings,
12
+ PathLikeOrWithSection,
13
+ _ensure_section,
14
+ _get_section,
15
+ )
16
+
17
+ if TYPE_CHECKING:
18
+ from collections.abc import Iterator, Sequence
19
+
20
+ from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
21
+ from pydantic_settings.sources import PathType
22
+
23
+ from utilities.types import MaybeSequenceStr
24
+
25
+
26
+ class _SuppressDefaultConfigMessage(Filter):
27
+ @override
28
+ def filter(self, record: LogRecord) -> bool:
29
+ return not search(
30
+ r"^default config file does not exists '.*'$", record.getMessage()
31
+ )
32
+
33
+
34
+ getLogger("sopsy.utils").addFilter(_SuppressDefaultConfigMessage())
35
+
36
+
37
+ class SopsBaseSettings(CustomBaseSettings):
38
+ """Base settings for loading secrets using `sops/age`."""
39
+
40
+ # paths
41
+ secret_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
42
+
43
+ @classmethod
44
+ @override
45
+ def _yield_base_settings_sources(
46
+ cls,
47
+ settings_cls: type[BaseSettings],
48
+ env_settings: PydanticBaseSettingsSource,
49
+ /,
50
+ ) -> Iterator[PydanticBaseSettingsSource]:
51
+ yield from super()._yield_base_settings_sources(settings_cls, env_settings)
52
+ for file, section in map(_ensure_section, cls.secret_files):
53
+ yield SOPSConfigSectionSettingsSource(
54
+ settings_cls, json_file=file, section=section
55
+ )
56
+
57
+
58
+ class SOPSConfigSectionSettingsSource(SOPSConfigSettingsSource):
59
+ @override
60
+ def __init__(
61
+ self,
62
+ settings_cls: type[BaseSettings],
63
+ json_file: PathType | None = DEFAULT_PATH,
64
+ yaml_file: PathType | None = DEFAULT_PATH,
65
+ *,
66
+ section: MaybeSequenceStr,
67
+ ) -> None:
68
+ super().__init__(settings_cls, json_file=json_file, yaml_file=yaml_file) # pyright: ignore[reportArgumentType]
69
+ self.section = section
70
+
71
+ @override
72
+ def __call__(self) -> dict[str, Any]:
73
+ return _get_section(super().__call__(), self.section)
74
+
75
+
76
+ __all__ = ["SOPSConfigSectionSettingsSource", "SopsBaseSettings"]
utilities/pyinstrument.py CHANGED
@@ -7,8 +7,8 @@ from typing import TYPE_CHECKING
7
7
  from pyinstrument.profiler import Profiler
8
8
 
9
9
  from utilities.atomicwrites import writer
10
- from utilities.pathlib import get_path
11
- from utilities.whenever import format_compact, get_now, to_local_plain
10
+ from utilities.pathlib import to_path
11
+ from utilities.whenever import format_compact, get_now_local
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from collections.abc import Iterator
@@ -17,12 +17,12 @@ if TYPE_CHECKING:
17
17
 
18
18
 
19
19
  @contextmanager
20
- def profile(*, path: MaybeCallablePathLike | None = Path.cwd) -> Iterator[None]:
20
+ def profile(path: MaybeCallablePathLike = Path.cwd, /) -> Iterator[None]:
21
21
  """Profile the contents of a block."""
22
22
  with Profiler() as profiler:
23
23
  yield
24
- filename = get_path(path=path).joinpath(
25
- f"profile__{format_compact(to_local_plain(get_now()))}.html"
24
+ filename = to_path(path).joinpath(
25
+ f"profile__{format_compact(get_now_local(), path=True)}.html"
26
26
  )
27
27
  with writer(filename) as temp:
28
28
  _ = temp.write_text(profiler.output_html())