dycw-utilities 0.129.10__py3-none-any.whl → 0.175.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. dycw_utilities-0.175.17.dist-info/METADATA +34 -0
  2. dycw_utilities-0.175.17.dist-info/RECORD +103 -0
  3. dycw_utilities-0.175.17.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.175.17.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +14 -14
  7. utilities/asyncio.py +350 -819
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +77 -22
  10. utilities/cachetools.py +24 -29
  11. utilities/click.py +393 -237
  12. utilities/concurrent.py +8 -11
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +83 -118
  17. utilities/docker.py +293 -0
  18. utilities/enum.py +26 -23
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +29 -65
  21. utilities/fpdf2.py +3 -3
  22. utilities/functions.py +169 -416
  23. utilities/functools.py +18 -19
  24. utilities/git.py +9 -30
  25. utilities/grp.py +28 -0
  26. utilities/gzip.py +31 -0
  27. utilities/http.py +3 -2
  28. utilities/hypothesis.py +738 -589
  29. utilities/importlib.py +17 -1
  30. utilities/inflect.py +25 -0
  31. utilities/iterables.py +194 -262
  32. utilities/jinja2.py +148 -0
  33. utilities/json.py +70 -0
  34. utilities/libcst.py +38 -17
  35. utilities/lightweight_charts.py +5 -9
  36. utilities/logging.py +345 -543
  37. utilities/math.py +18 -13
  38. utilities/memory_profiler.py +11 -15
  39. utilities/more_itertools.py +200 -131
  40. utilities/operator.py +33 -29
  41. utilities/optuna.py +6 -6
  42. utilities/orjson.py +272 -137
  43. utilities/os.py +61 -4
  44. utilities/parse.py +59 -61
  45. utilities/pathlib.py +281 -40
  46. utilities/permissions.py +298 -0
  47. utilities/pickle.py +2 -2
  48. utilities/platform.py +24 -5
  49. utilities/polars.py +1214 -430
  50. utilities/polars_ols.py +1 -1
  51. utilities/postgres.py +408 -0
  52. utilities/pottery.py +113 -26
  53. utilities/pqdm.py +10 -11
  54. utilities/psutil.py +6 -57
  55. utilities/pwd.py +28 -0
  56. utilities/pydantic.py +4 -54
  57. utilities/pydantic_settings.py +240 -0
  58. utilities/pydantic_settings_sops.py +76 -0
  59. utilities/pyinstrument.py +8 -10
  60. utilities/pytest.py +227 -121
  61. utilities/pytest_plugins/__init__.py +1 -0
  62. utilities/pytest_plugins/pytest_randomly.py +23 -0
  63. utilities/pytest_plugins/pytest_regressions.py +56 -0
  64. utilities/pytest_regressions.py +26 -46
  65. utilities/random.py +13 -9
  66. utilities/re.py +58 -28
  67. utilities/redis.py +401 -550
  68. utilities/scipy.py +1 -1
  69. utilities/sentinel.py +10 -0
  70. utilities/shelve.py +4 -1
  71. utilities/shutil.py +25 -0
  72. utilities/slack_sdk.py +36 -106
  73. utilities/sqlalchemy.py +502 -473
  74. utilities/sqlalchemy_polars.py +38 -94
  75. utilities/string.py +2 -3
  76. utilities/subprocess.py +1572 -0
  77. utilities/tempfile.py +86 -4
  78. utilities/testbook.py +50 -0
  79. utilities/text.py +165 -42
  80. utilities/timer.py +37 -65
  81. utilities/traceback.py +158 -929
  82. utilities/types.py +146 -116
  83. utilities/typing.py +531 -71
  84. utilities/tzdata.py +1 -53
  85. utilities/tzlocal.py +6 -23
  86. utilities/uuid.py +43 -5
  87. utilities/version.py +27 -26
  88. utilities/whenever.py +1776 -386
  89. utilities/zoneinfo.py +84 -22
  90. dycw_utilities-0.129.10.dist-info/METADATA +0 -241
  91. dycw_utilities-0.129.10.dist-info/RECORD +0 -96
  92. dycw_utilities-0.129.10.dist-info/WHEEL +0 -4
  93. dycw_utilities-0.129.10.dist-info/licenses/LICENSE +0 -21
  94. utilities/datetime.py +0 -1409
  95. utilities/eventkit.py +0 -402
  96. utilities/loguru.py +0 -144
  97. utilities/luigi.py +0 -228
  98. utilities/period.py +0 -324
  99. utilities/pyrsistent.py +0 -89
  100. utilities/python_dotenv.py +0 -105
  101. utilities/streamlit.py +0 -105
  102. utilities/sys.py +0 -87
  103. utilities/tenacity.py +0 -145
utilities/psutil.py CHANGED
@@ -1,74 +1,23 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from dataclasses import dataclass, field
4
- from json import dumps
5
- from logging import getLogger
6
4
  from math import isclose, nan
7
- from pathlib import Path
8
- from typing import TYPE_CHECKING, Self, override
5
+ from typing import TYPE_CHECKING, Self
9
6
 
10
7
  from psutil import swap_memory, virtual_memory
11
8
 
12
- from utilities.asyncio import Looper
13
9
  from utilities.contextlib import suppress_super_object_attribute_error
14
- from utilities.datetime import SECOND, get_now
10
+ from utilities.whenever import get_now
15
11
 
16
12
  if TYPE_CHECKING:
17
- import datetime as dt
18
- from logging import Logger
13
+ from whenever import ZonedDateTime
19
14
 
20
- from utilities.types import Duration, PathLike
21
15
 
22
-
23
- @dataclass(kw_only=True)
24
- class MemoryMonitorService(Looper[None]):
25
- """Service to monitor memory usage."""
26
-
27
- # base
28
- freq: Duration = field(default=10 * SECOND, repr=False)
29
- backoff: Duration = field(default=10 * SECOND, repr=False)
30
- # self
31
- console: str | None = field(default=None, repr=False)
32
- path: PathLike = "memory.txt"
33
- _console: Logger | None = field(init=False, repr=False)
34
- _path: Path = field(init=False, repr=False)
35
-
36
- @override
37
- def __post_init__(self) -> None:
38
- super().__post_init__()
39
- if self.console is not None:
40
- self._console = getLogger(self.console)
41
- self._path = Path(self.path)
42
- self._path.parent.mkdir(parents=True, exist_ok=True)
43
-
44
- @override
45
- async def core(self) -> None:
46
- await super().core()
47
- memory = MemoryUsage.new()
48
- mapping = {
49
- "datetime": memory.datetime.strftime("%Y-%m-%d %H:%M:%S"),
50
- "virtual used (mb)": memory.virtual_used_mb,
51
- "virtual total (mb)": memory.virtual_total_mb,
52
- "virtual (%)": memory.virtual_pct,
53
- "swap used (mb)": memory.swap_used_mb,
54
- "swap total (mb)": memory.swap_total_mb,
55
- "swap (%)": memory.swap_pct,
56
- }
57
- ser = dumps(mapping)
58
- with self._path.open(mode="a") as fh:
59
- _ = fh.write(f"{ser}\n")
60
- if self._console is not None:
61
- self._console.info("%s", mapping)
62
-
63
-
64
- ##
65
-
66
-
67
- @dataclass(kw_only=True)
16
+ @dataclass(order=True, unsafe_hash=True, kw_only=True)
68
17
  class MemoryUsage:
69
18
  """A memory usage."""
70
19
 
71
- datetime: dt.datetime = field(default_factory=get_now)
20
+ datetime: ZonedDateTime = field(default_factory=get_now)
72
21
  virtual_used: int = field(repr=False)
73
22
  virtual_used_mb: int = field(init=False)
74
23
  virtual_total: int = field(repr=False)
@@ -112,4 +61,4 @@ class MemoryUsage:
112
61
  return round(bytes_ / (1024**2))
113
62
 
114
63
 
115
- __all__ = ["MemoryMonitorService", "MemoryUsage"]
64
+ __all__ = ["MemoryUsage"]
utilities/pwd.py ADDED
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import assert_never
4
+
5
+ from utilities.os import EFFECTIVE_USER_ID
6
+ from utilities.platform import SYSTEM
7
+
8
+
9
+ def get_uid_name(uid: int, /) -> str | None:
10
+ """Get the name of a user ID."""
11
+ match SYSTEM:
12
+ case "windows": # skipif-not-windows
13
+ return None
14
+ case "mac" | "linux": # skipif-windows
15
+ from pwd import getpwuid
16
+
17
+ return getpwuid(uid).pw_name
18
+ case never:
19
+ assert_never(never)
20
+
21
+
22
+ ROOT_USER_NAME = get_uid_name(0)
23
+ EFFECTIVE_USER_NAME = (
24
+ None if EFFECTIVE_USER_ID is None else get_uid_name(EFFECTIVE_USER_ID)
25
+ )
26
+
27
+
28
+ __all__ = ["EFFECTIVE_USER_NAME", "ROOT_USER_NAME", "get_uid_name"]
utilities/pydantic.py CHANGED
@@ -1,61 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
- from dataclasses import dataclass
4
3
  from pathlib import Path
5
- from typing import TYPE_CHECKING, TypeVar, override
4
+ from typing import Annotated
6
5
 
7
- from pydantic import BaseModel
6
+ from pydantic import BeforeValidator
8
7
 
9
- if TYPE_CHECKING:
10
- from utilities.types import PathLike
8
+ ExpandedPath = Annotated[Path, BeforeValidator(lambda p: Path(p).expanduser())]
11
9
 
12
- _TBaseModel = TypeVar("_TBaseModel", bound=BaseModel)
13
10
 
14
-
15
- class HashableBaseModel(BaseModel):
16
- """Subclass of BaseModel which is hashable."""
17
-
18
- @override
19
- def __hash__(self) -> int:
20
- return hash((type(self), *self.__dict__.values()))
21
-
22
-
23
- def load_model(model: type[_TBaseModel], path: PathLike, /) -> _TBaseModel:
24
- path = Path(path)
25
- try:
26
- with path.open() as fh:
27
- return model.model_validate_json(fh.read())
28
- except FileNotFoundError:
29
- raise _LoadModelFileNotFoundError(model=model, path=path) from None
30
- except IsADirectoryError: # skipif-not-windows
31
- raise _LoadModelIsADirectoryError(model=model, path=path) from None
32
-
33
-
34
- @dataclass(kw_only=True, slots=True)
35
- class LoadModelError(Exception):
36
- model: type[BaseModel]
37
- path: Path
38
-
39
-
40
- @dataclass(kw_only=True, slots=True)
41
- class _LoadModelFileNotFoundError(LoadModelError):
42
- @override
43
- def __str__(self) -> str:
44
- return f"Unable to load {self.model}; path {str(self.path)!r} must exist."
45
-
46
-
47
- @dataclass(kw_only=True, slots=True)
48
- class _LoadModelIsADirectoryError(LoadModelError):
49
- @override
50
- def __str__(self) -> str:
51
- return f"Unable to load {self.model}; path {str(self.path)!r} must not be a directory." # skipif-not-windows
52
-
53
-
54
- def save_model(model: BaseModel, path: PathLike, /, *, overwrite: bool = False) -> None:
55
- from utilities.atomicwrites import writer
56
-
57
- with writer(path, overwrite=overwrite) as temp, temp.open(mode="w") as fh:
58
- _ = fh.write(model.model_dump_json())
59
-
60
-
61
- __all__ = ["HashableBaseModel", "LoadModelError", "load_model", "save_model"]
11
+ __all__ = ["ExpandedPath"]
@@ -0,0 +1,240 @@
1
+ from __future__ import annotations
2
+
3
+ from functools import reduce
4
+ from pathlib import Path
5
+ from typing import TYPE_CHECKING, Any, ClassVar, assert_never, cast, override
6
+
7
+ from pydantic import Field, create_model
8
+ from pydantic_settings import (
9
+ BaseSettings,
10
+ CliSettingsSource,
11
+ JsonConfigSettingsSource,
12
+ PydanticBaseSettingsSource,
13
+ SettingsConfigDict,
14
+ TomlConfigSettingsSource,
15
+ YamlConfigSettingsSource,
16
+ )
17
+ from pydantic_settings.sources import DEFAULT_PATH
18
+
19
+ from utilities.errors import ImpossibleCaseError
20
+ from utilities.iterables import always_iterable
21
+
22
+ if TYPE_CHECKING:
23
+ from collections.abc import Iterator, Sequence
24
+
25
+ from pydantic_settings.sources import PathType
26
+
27
+ from utilities.types import MaybeSequenceStr, PathLike
28
+
29
+
30
+ type PathLikeWithSection = tuple[PathLike, MaybeSequenceStr]
31
+ type PathLikeOrWithSection = PathLike | PathLikeWithSection
32
+
33
+
34
+ class CustomBaseSettings(BaseSettings):
35
+ """Base settings for loading JSON/TOML/YAML files."""
36
+
37
+ # paths
38
+ json_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
39
+ toml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
40
+ yaml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
41
+
42
+ # config
43
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
44
+ frozen=True, env_nested_delimiter="__"
45
+ )
46
+
47
+ @classmethod
48
+ @override
49
+ def settings_customise_sources(
50
+ cls,
51
+ settings_cls: type[BaseSettings],
52
+ init_settings: PydanticBaseSettingsSource,
53
+ env_settings: PydanticBaseSettingsSource,
54
+ dotenv_settings: PydanticBaseSettingsSource,
55
+ file_secret_settings: PydanticBaseSettingsSource,
56
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
57
+ _ = (init_settings, dotenv_settings, file_secret_settings)
58
+ return tuple(cls._yield_base_settings_sources(settings_cls, env_settings))
59
+
60
+ @classmethod
61
+ def _yield_base_settings_sources(
62
+ cls,
63
+ settings_cls: type[BaseSettings],
64
+ env_settings: PydanticBaseSettingsSource,
65
+ /,
66
+ ) -> Iterator[PydanticBaseSettingsSource]:
67
+ yield env_settings
68
+ for file, section in map(_ensure_section, cls.json_files):
69
+ yield JsonConfigSectionSettingsSource(
70
+ settings_cls, json_file=file, section=section
71
+ )
72
+ for file, section in map(_ensure_section, cls.toml_files):
73
+ yield TomlConfigSectionSettingsSource(
74
+ settings_cls, toml_file=file, section=section
75
+ )
76
+ for file, section in map(_ensure_section, cls.yaml_files):
77
+ yield YamlConfigSectionSettingsSource(
78
+ settings_cls, yaml_file=file, section=section
79
+ )
80
+
81
+
82
+ class JsonConfigSectionSettingsSource(JsonConfigSettingsSource):
83
+ @override
84
+ def __init__(
85
+ self,
86
+ settings_cls: type[BaseSettings],
87
+ json_file: PathType | None = DEFAULT_PATH,
88
+ json_file_encoding: str | None = None,
89
+ *,
90
+ section: MaybeSequenceStr,
91
+ ) -> None:
92
+ super().__init__(
93
+ settings_cls, json_file=json_file, json_file_encoding=json_file_encoding
94
+ )
95
+ self.section = section
96
+
97
+ @override
98
+ def __call__(self) -> dict[str, Any]:
99
+ return _get_section(super().__call__(), self.section)
100
+
101
+
102
+ class TomlConfigSectionSettingsSource(TomlConfigSettingsSource):
103
+ @override
104
+ def __init__(
105
+ self,
106
+ settings_cls: type[BaseSettings],
107
+ toml_file: PathType | None = DEFAULT_PATH,
108
+ *,
109
+ section: MaybeSequenceStr,
110
+ ) -> None:
111
+ super().__init__(settings_cls, toml_file=toml_file)
112
+ self.section = section
113
+
114
+ @override
115
+ def __call__(self) -> dict[str, Any]:
116
+ return _get_section(super().__call__(), self.section)
117
+
118
+
119
+ class YamlConfigSectionSettingsSource(YamlConfigSettingsSource):
120
+ @override
121
+ def __init__(
122
+ self,
123
+ settings_cls: type[BaseSettings],
124
+ yaml_file: PathType | None = DEFAULT_PATH,
125
+ yaml_file_encoding: str | None = None,
126
+ yaml_config_section: str | None = None,
127
+ *,
128
+ section: MaybeSequenceStr,
129
+ ) -> None:
130
+ super().__init__(
131
+ settings_cls,
132
+ yaml_file=yaml_file,
133
+ yaml_file_encoding=yaml_file_encoding,
134
+ yaml_config_section=yaml_config_section,
135
+ )
136
+ self.section = section
137
+
138
+ @override
139
+ def __call__(self) -> dict[str, Any]:
140
+ return _get_section(super().__call__(), self.section)
141
+
142
+
143
+ def _ensure_section(file: PathLikeOrWithSection, /) -> PathLikeWithSection:
144
+ match file:
145
+ case Path() | str():
146
+ return file, []
147
+ case Path() | str() as path, str() | list() | tuple() as section:
148
+ return path, section
149
+ case never:
150
+ assert_never(never)
151
+
152
+
153
+ def _get_section(
154
+ mapping: dict[str, Any], section: MaybeSequenceStr, /
155
+ ) -> dict[str, Any]:
156
+ return reduce(lambda acc, el: acc.get(el, {}), always_iterable(section), mapping)
157
+
158
+
159
+ ##
160
+
161
+
162
+ class HashableBaseSettings(BaseSettings):
163
+ """Base settings for loading JSON files."""
164
+
165
+ # config
166
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(frozen=True)
167
+
168
+
169
+ ##
170
+
171
+
172
+ def load_settings[T: BaseSettings](cls: type[T], /, *, cli: bool = False) -> T:
173
+ """Load a set of settings."""
174
+ _ = cls.model_rebuild()
175
+ if cli:
176
+ cls_with_defaults = _load_settings_create_model(cls)
177
+
178
+ @classmethod
179
+ def settings_customise_sources(
180
+ cls: type[BaseSettings],
181
+ settings_cls: type[BaseSettings],
182
+ init_settings: PydanticBaseSettingsSource,
183
+ env_settings: PydanticBaseSettingsSource,
184
+ dotenv_settings: PydanticBaseSettingsSource,
185
+ file_secret_settings: PydanticBaseSettingsSource,
186
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
187
+ parent = cast(
188
+ "Any", super(cls_with_defaults, cls)
189
+ ).settings_customise_sources(
190
+ settings_cls=settings_cls,
191
+ init_settings=init_settings,
192
+ env_settings=env_settings,
193
+ dotenv_settings=dotenv_settings,
194
+ file_secret_settings=file_secret_settings,
195
+ )
196
+ return (
197
+ CliSettingsSource(
198
+ settings_cls, cli_parse_args=True, case_sensitive=False
199
+ ),
200
+ *parent,
201
+ )
202
+
203
+ cls_use = type(
204
+ cls.__name__,
205
+ (cls_with_defaults,),
206
+ {"settings_customise_sources": settings_customise_sources},
207
+ )
208
+ cls_use = cast("type[T]", cls_use)
209
+ else:
210
+ cls_use = cls
211
+ return cls_use()
212
+
213
+
214
+ def _load_settings_create_model[T: BaseSettings](
215
+ cls: type[T], /, *, values: T | None = None
216
+ ) -> type[T]:
217
+ values_use = cls() if values is None else values
218
+ kwargs: dict[str, Any] = {}
219
+ for name, field in cls.model_fields.items():
220
+ if (ann := field.annotation) is None:
221
+ raise ImpossibleCaseError(case=[f"{ann=}"]) # pragma: no cover
222
+ value = getattr(values_use, name)
223
+ if (
224
+ isinstance(cast("Any", ann), type) # 'ann' is possible not a type
225
+ and issubclass(ann, BaseSettings)
226
+ ):
227
+ kwargs[name] = _load_settings_create_model(ann, values=value)
228
+ else:
229
+ kwargs[name] = (field.annotation, Field(default=value))
230
+ return create_model(cls.__name__, __base__=cls, **kwargs)
231
+
232
+
233
+ __all__ = [
234
+ "CustomBaseSettings",
235
+ "HashableBaseSettings",
236
+ "JsonConfigSectionSettingsSource",
237
+ "TomlConfigSectionSettingsSource",
238
+ "YamlConfigSectionSettingsSource",
239
+ "load_settings",
240
+ ]
@@ -0,0 +1,76 @@
1
+ from __future__ import annotations
2
+
3
+ from logging import Filter, LogRecord, getLogger
4
+ from re import search
5
+ from typing import TYPE_CHECKING, Any, ClassVar, override
6
+
7
+ from pydantic_settings.sources import DEFAULT_PATH
8
+ from pydantic_settings_sops import SOPSConfigSettingsSource
9
+
10
+ from utilities.pydantic_settings import (
11
+ CustomBaseSettings,
12
+ PathLikeOrWithSection,
13
+ _ensure_section,
14
+ _get_section,
15
+ )
16
+
17
+ if TYPE_CHECKING:
18
+ from collections.abc import Iterator, Sequence
19
+
20
+ from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
21
+ from pydantic_settings.sources import PathType
22
+
23
+ from utilities.types import MaybeSequenceStr
24
+
25
+
26
+ class _SuppressDefaultConfigMessage(Filter):
27
+ @override
28
+ def filter(self, record: LogRecord) -> bool:
29
+ return not search(
30
+ r"^default config file does not exists '.*'$", record.getMessage()
31
+ )
32
+
33
+
34
+ getLogger("sopsy.utils").addFilter(_SuppressDefaultConfigMessage())
35
+
36
+
37
+ class SopsBaseSettings(CustomBaseSettings):
38
+ """Base settings for loading secrets using `sops/age`."""
39
+
40
+ # paths
41
+ secret_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
42
+
43
+ @classmethod
44
+ @override
45
+ def _yield_base_settings_sources(
46
+ cls,
47
+ settings_cls: type[BaseSettings],
48
+ env_settings: PydanticBaseSettingsSource,
49
+ /,
50
+ ) -> Iterator[PydanticBaseSettingsSource]:
51
+ yield from super()._yield_base_settings_sources(settings_cls, env_settings)
52
+ for file, section in map(_ensure_section, cls.secret_files):
53
+ yield SOPSConfigSectionSettingsSource(
54
+ settings_cls, json_file=file, section=section
55
+ )
56
+
57
+
58
+ class SOPSConfigSectionSettingsSource(SOPSConfigSettingsSource):
59
+ @override
60
+ def __init__(
61
+ self,
62
+ settings_cls: type[BaseSettings],
63
+ json_file: PathType | None = DEFAULT_PATH,
64
+ yaml_file: PathType | None = DEFAULT_PATH,
65
+ *,
66
+ section: MaybeSequenceStr,
67
+ ) -> None:
68
+ super().__init__(settings_cls, json_file=json_file, yaml_file=yaml_file) # pyright: ignore[reportArgumentType]
69
+ self.section = section
70
+
71
+ @override
72
+ def __call__(self) -> dict[str, Any]:
73
+ return _get_section(super().__call__(), self.section)
74
+
75
+
76
+ __all__ = ["SOPSConfigSectionSettingsSource", "SopsBaseSettings"]
utilities/pyinstrument.py CHANGED
@@ -6,9 +6,9 @@ from typing import TYPE_CHECKING
6
6
 
7
7
  from pyinstrument.profiler import Profiler
8
8
 
9
- from utilities.datetime import serialize_compact
10
- from utilities.pathlib import get_path
11
- from utilities.tzlocal import get_now_local
9
+ from utilities.atomicwrites import writer
10
+ from utilities.pathlib import to_path
11
+ from utilities.whenever import format_compact, get_now_local
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from collections.abc import Iterator
@@ -17,17 +17,15 @@ if TYPE_CHECKING:
17
17
 
18
18
 
19
19
  @contextmanager
20
- def profile(*, path: MaybeCallablePathLike | None = Path.cwd) -> Iterator[None]:
20
+ def profile(path: MaybeCallablePathLike = Path.cwd, /) -> Iterator[None]:
21
21
  """Profile the contents of a block."""
22
- from utilities.atomicwrites import writer
23
-
24
22
  with Profiler() as profiler:
25
23
  yield
26
- filename = get_path(path=path).joinpath(
27
- f"profile__{serialize_compact(get_now_local())}.html"
24
+ filename = to_path(path).joinpath(
25
+ f"profile__{format_compact(get_now_local(), path=True)}.html"
28
26
  )
29
- with writer(filename) as temp, temp.open(mode="w") as fh:
30
- _ = fh.write(profiler.output_html())
27
+ with writer(filename) as temp:
28
+ _ = temp.write_text(profiler.output_html())
31
29
 
32
30
 
33
31
  __all__ = ["profile"]