dycw-utilities 0.148.5__py3-none-any.whl → 0.174.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (83) hide show
  1. dycw_utilities-0.174.12.dist-info/METADATA +41 -0
  2. dycw_utilities-0.174.12.dist-info/RECORD +104 -0
  3. dycw_utilities-0.174.12.dist-info/WHEEL +4 -0
  4. {dycw_utilities-0.148.5.dist-info → dycw_utilities-0.174.12.dist-info}/entry_points.txt +3 -0
  5. utilities/__init__.py +1 -1
  6. utilities/{eventkit.py → aeventkit.py} +12 -11
  7. utilities/altair.py +7 -6
  8. utilities/asyncio.py +113 -64
  9. utilities/atomicwrites.py +1 -1
  10. utilities/atools.py +64 -4
  11. utilities/cachetools.py +9 -6
  12. utilities/click.py +145 -49
  13. utilities/concurrent.py +1 -1
  14. utilities/contextlib.py +4 -2
  15. utilities/contextvars.py +20 -1
  16. utilities/cryptography.py +3 -3
  17. utilities/dataclasses.py +15 -28
  18. utilities/docker.py +292 -0
  19. utilities/enum.py +2 -2
  20. utilities/errors.py +1 -1
  21. utilities/fastapi.py +8 -3
  22. utilities/fpdf2.py +2 -2
  23. utilities/functions.py +20 -297
  24. utilities/git.py +19 -0
  25. utilities/grp.py +28 -0
  26. utilities/hypothesis.py +360 -78
  27. utilities/inflect.py +1 -1
  28. utilities/iterables.py +12 -58
  29. utilities/jinja2.py +148 -0
  30. utilities/json.py +1 -1
  31. utilities/libcst.py +7 -7
  32. utilities/logging.py +74 -85
  33. utilities/math.py +8 -4
  34. utilities/more_itertools.py +4 -6
  35. utilities/operator.py +1 -1
  36. utilities/orjson.py +86 -34
  37. utilities/os.py +49 -2
  38. utilities/parse.py +2 -2
  39. utilities/pathlib.py +66 -34
  40. utilities/permissions.py +297 -0
  41. utilities/platform.py +5 -5
  42. utilities/polars.py +932 -420
  43. utilities/polars_ols.py +1 -1
  44. utilities/postgres.py +296 -174
  45. utilities/pottery.py +8 -73
  46. utilities/pqdm.py +3 -3
  47. utilities/pwd.py +28 -0
  48. utilities/pydantic.py +11 -0
  49. utilities/pydantic_settings.py +240 -0
  50. utilities/pydantic_settings_sops.py +76 -0
  51. utilities/pyinstrument.py +5 -5
  52. utilities/pytest.py +155 -46
  53. utilities/pytest_plugins/pytest_randomly.py +1 -1
  54. utilities/pytest_plugins/pytest_regressions.py +7 -3
  55. utilities/pytest_regressions.py +2 -3
  56. utilities/random.py +11 -6
  57. utilities/re.py +1 -1
  58. utilities/redis.py +101 -64
  59. utilities/sentinel.py +10 -0
  60. utilities/shelve.py +4 -1
  61. utilities/shutil.py +25 -0
  62. utilities/slack_sdk.py +8 -3
  63. utilities/sqlalchemy.py +422 -352
  64. utilities/sqlalchemy_polars.py +28 -52
  65. utilities/string.py +1 -1
  66. utilities/subprocess.py +864 -0
  67. utilities/tempfile.py +62 -4
  68. utilities/testbook.py +50 -0
  69. utilities/text.py +165 -42
  70. utilities/timer.py +2 -2
  71. utilities/traceback.py +46 -36
  72. utilities/types.py +62 -23
  73. utilities/typing.py +479 -19
  74. utilities/uuid.py +42 -5
  75. utilities/version.py +27 -26
  76. utilities/whenever.py +661 -151
  77. utilities/zoneinfo.py +80 -22
  78. dycw_utilities-0.148.5.dist-info/METADATA +0 -41
  79. dycw_utilities-0.148.5.dist-info/RECORD +0 -95
  80. dycw_utilities-0.148.5.dist-info/WHEEL +0 -4
  81. dycw_utilities-0.148.5.dist-info/licenses/LICENSE +0 -21
  82. utilities/period.py +0 -237
  83. utilities/typed_settings.py +0 -144
utilities/pottery.py CHANGED
@@ -1,29 +1,27 @@
1
1
  from __future__ import annotations
2
2
 
3
- from contextlib import nullcontext, suppress
3
+ from contextlib import suppress
4
4
  from dataclasses import dataclass
5
5
  from sys import maxsize
6
6
  from typing import TYPE_CHECKING, override
7
7
 
8
- from pottery import AIORedlock, ExtendUnlockedLock
8
+ from pottery import AIORedlock
9
9
  from pottery.exceptions import ReleaseUnlockedLock
10
10
  from redis.asyncio import Redis
11
11
 
12
- from utilities.asyncio import loop_until_succeed, sleep_td, timeout_td
12
+ from utilities.asyncio import sleep_td, timeout_td
13
13
  from utilities.contextlib import enhanced_async_context_manager
14
14
  from utilities.iterables import always_iterable
15
- from utilities.logging import get_logger
16
- from utilities.whenever import MILLISECOND, SECOND, to_seconds
15
+ from utilities.whenever import MILLISECOND, SECOND, to_nanoseconds
17
16
 
18
17
  if TYPE_CHECKING:
19
- from collections.abc import AsyncIterator, Callable, Iterable
18
+ from collections.abc import AsyncIterator, Iterable
20
19
 
21
20
  from whenever import Delta
22
21
 
23
- from utilities.types import Coro, LoggerOrName, MaybeIterable
22
+ from utilities.types import MaybeIterable
24
23
 
25
24
  _NUM: int = 1
26
- _TIMEOUT_TRY_ACQUIRE: Delta = SECOND
27
25
  _TIMEOUT_RELEASE: Delta = 10 * SECOND
28
26
  _SLEEP: Delta = MILLISECOND
29
27
 
@@ -42,63 +40,6 @@ async def extend_lock(
42
40
  ##
43
41
 
44
42
 
45
- @enhanced_async_context_manager
46
- async def try_yield_coroutine_looper(
47
- redis: MaybeIterable[Redis],
48
- key: str,
49
- /,
50
- *,
51
- num: int = _NUM,
52
- timeout_release: Delta = _TIMEOUT_RELEASE,
53
- num_extensions: int | None = None,
54
- timeout_acquire: Delta = _TIMEOUT_TRY_ACQUIRE,
55
- sleep_acquire: Delta = _SLEEP,
56
- throttle: Delta | None = None,
57
- logger: LoggerOrName | None = None,
58
- sleep_error: Delta | None = None,
59
- ) -> AsyncIterator[CoroutineLooper | None]:
60
- """Try acquire access to a coroutine looper."""
61
- try: # skipif-ci-and-not-linux
62
- async with yield_access(
63
- redis,
64
- key,
65
- num=num,
66
- timeout_release=timeout_release,
67
- num_extensions=num_extensions,
68
- timeout_acquire=timeout_acquire,
69
- sleep=sleep_acquire,
70
- throttle=throttle,
71
- ) as lock:
72
- yield CoroutineLooper(lock=lock, logger=logger, sleep=sleep_error)
73
- except _YieldAccessUnableToAcquireLockError as error: # skipif-ci-and-not-linux
74
- if logger is not None:
75
- get_logger(logger=logger).info("%s", error)
76
- async with nullcontext():
77
- yield
78
-
79
-
80
- @dataclass(order=True, unsafe_hash=True, kw_only=True)
81
- class CoroutineLooper:
82
- """Looper, guarded by a lock, to repeatedly call a coroutine until it succeeds."""
83
-
84
- lock: AIORedlock
85
- logger: LoggerOrName | None = None
86
- sleep: Delta | None = None
87
-
88
- async def __call__[**P](
89
- self, func: Callable[P, Coro[None]], *args: P.args, **kwargs: P.kwargs
90
- ) -> bool:
91
- def make_coro() -> Coro[None]:
92
- return func(*args, **kwargs)
93
-
94
- return await loop_until_succeed(
95
- make_coro, logger=self.logger, errors=ExtendUnlockedLock, sleep=self.sleep
96
- )
97
-
98
-
99
- ##
100
-
101
-
102
43
  @enhanced_async_context_manager
103
44
  async def yield_access(
104
45
  redis: MaybeIterable[Redis],
@@ -122,7 +63,7 @@ async def yield_access(
122
63
  AIORedlock(
123
64
  key=f"{key}_{i}_of_{num}",
124
65
  masters=masters,
125
- auto_release_time=to_seconds(timeout_release),
66
+ auto_release_time=to_nanoseconds(timeout_release) / 1e9,
126
67
  num_extensions=maxsize if num_extensions is None else num_extensions,
127
68
  )
128
69
  for i in range(1, num + 1)
@@ -193,10 +134,4 @@ class _YieldAccessUnableToAcquireLockError(YieldAccessError):
193
134
  return f"Unable to acquire any 1 of {self.num} locks for {self.key!r} after {self.timeout}" # skipif-ci-and-not-linux
194
135
 
195
136
 
196
- __all__ = [
197
- "CoroutineLooper",
198
- "YieldAccessError",
199
- "extend_lock",
200
- "try_yield_coroutine_looper",
201
- "yield_access",
202
- ]
137
+ __all__ = ["YieldAccessError", "extend_lock", "yield_access"]
utilities/pqdm.py CHANGED
@@ -9,7 +9,7 @@ from tqdm.auto import tqdm as tqdm_auto
9
9
  from utilities.functions import get_func_name
10
10
  from utilities.iterables import apply_to_varargs
11
11
  from utilities.os import get_cpu_use
12
- from utilities.sentinel import Sentinel, sentinel
12
+ from utilities.sentinel import Sentinel, is_sentinel, sentinel
13
13
 
14
14
  if TYPE_CHECKING:
15
15
  from collections.abc import Callable, Iterable
@@ -90,7 +90,7 @@ def pqdm_starmap[T](
90
90
  **_get_desc(desc, func),
91
91
  **kwargs,
92
92
  )
93
- case _ as never:
93
+ case never:
94
94
  assert_never(never)
95
95
  return list(result)
96
96
 
@@ -98,7 +98,7 @@ def pqdm_starmap[T](
98
98
  def _get_desc(
99
99
  desc: str | None | Sentinel, func: Callable[..., Any], /
100
100
  ) -> dict[str, str]:
101
- desc_use = get_func_name(func) if isinstance(desc, Sentinel) else desc
101
+ desc_use = get_func_name(func) if is_sentinel(desc) else desc
102
102
  return {} if desc_use is None else {"desc": desc_use}
103
103
 
104
104
 
utilities/pwd.py ADDED
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import assert_never
4
+
5
+ from utilities.os import EFFECTIVE_USER_ID
6
+ from utilities.platform import SYSTEM
7
+
8
+
9
+ def get_uid_name(uid: int, /) -> str | None:
10
+ """Get the name of a user ID."""
11
+ match SYSTEM:
12
+ case "windows": # skipif-not-windows
13
+ return None
14
+ case "mac" | "linux": # skipif-windows
15
+ from pwd import getpwuid
16
+
17
+ return getpwuid(uid).pw_name
18
+ case never:
19
+ assert_never(never)
20
+
21
+
22
+ ROOT_USER_NAME = get_uid_name(0)
23
+ EFFECTIVE_USER_NAME = (
24
+ None if EFFECTIVE_USER_ID is None else get_uid_name(EFFECTIVE_USER_ID)
25
+ )
26
+
27
+
28
+ __all__ = ["EFFECTIVE_USER_NAME", "ROOT_USER_NAME", "get_uid_name"]
utilities/pydantic.py ADDED
@@ -0,0 +1,11 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import Annotated
5
+
6
+ from pydantic import BeforeValidator
7
+
8
+ ExpandedPath = Annotated[Path, BeforeValidator(lambda p: Path(p).expanduser())]
9
+
10
+
11
+ __all__ = ["ExpandedPath"]
@@ -0,0 +1,240 @@
1
+ from __future__ import annotations
2
+
3
+ from functools import reduce
4
+ from pathlib import Path
5
+ from typing import TYPE_CHECKING, Any, ClassVar, assert_never, cast, override
6
+
7
+ from pydantic import Field, create_model
8
+ from pydantic_settings import (
9
+ BaseSettings,
10
+ CliSettingsSource,
11
+ JsonConfigSettingsSource,
12
+ PydanticBaseSettingsSource,
13
+ SettingsConfigDict,
14
+ TomlConfigSettingsSource,
15
+ YamlConfigSettingsSource,
16
+ )
17
+ from pydantic_settings.sources import DEFAULT_PATH
18
+
19
+ from utilities.errors import ImpossibleCaseError
20
+ from utilities.iterables import always_iterable
21
+
22
+ if TYPE_CHECKING:
23
+ from collections.abc import Iterator, Sequence
24
+
25
+ from pydantic_settings.sources import PathType
26
+
27
+ from utilities.types import MaybeSequenceStr, PathLike
28
+
29
+
30
+ type PathLikeWithSection = tuple[PathLike, MaybeSequenceStr]
31
+ type PathLikeOrWithSection = PathLike | PathLikeWithSection
32
+
33
+
34
+ class CustomBaseSettings(BaseSettings):
35
+ """Base settings for loading JSON/TOML/YAML files."""
36
+
37
+ # paths
38
+ json_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
39
+ toml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
40
+ yaml_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
41
+
42
+ # config
43
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
44
+ frozen=True, env_nested_delimiter="__"
45
+ )
46
+
47
+ @classmethod
48
+ @override
49
+ def settings_customise_sources(
50
+ cls,
51
+ settings_cls: type[BaseSettings],
52
+ init_settings: PydanticBaseSettingsSource,
53
+ env_settings: PydanticBaseSettingsSource,
54
+ dotenv_settings: PydanticBaseSettingsSource,
55
+ file_secret_settings: PydanticBaseSettingsSource,
56
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
57
+ _ = (init_settings, dotenv_settings, file_secret_settings)
58
+ return tuple(cls._yield_base_settings_sources(settings_cls, env_settings))
59
+
60
+ @classmethod
61
+ def _yield_base_settings_sources(
62
+ cls,
63
+ settings_cls: type[BaseSettings],
64
+ env_settings: PydanticBaseSettingsSource,
65
+ /,
66
+ ) -> Iterator[PydanticBaseSettingsSource]:
67
+ yield env_settings
68
+ for file, section in map(_ensure_section, cls.json_files):
69
+ yield JsonConfigSectionSettingsSource(
70
+ settings_cls, json_file=file, section=section
71
+ )
72
+ for file, section in map(_ensure_section, cls.toml_files):
73
+ yield TomlConfigSectionSettingsSource(
74
+ settings_cls, toml_file=file, section=section
75
+ )
76
+ for file, section in map(_ensure_section, cls.yaml_files):
77
+ yield YamlConfigSectionSettingsSource(
78
+ settings_cls, yaml_file=file, section=section
79
+ )
80
+
81
+
82
+ class JsonConfigSectionSettingsSource(JsonConfigSettingsSource):
83
+ @override
84
+ def __init__(
85
+ self,
86
+ settings_cls: type[BaseSettings],
87
+ json_file: PathType | None = DEFAULT_PATH,
88
+ json_file_encoding: str | None = None,
89
+ *,
90
+ section: MaybeSequenceStr,
91
+ ) -> None:
92
+ super().__init__(
93
+ settings_cls, json_file=json_file, json_file_encoding=json_file_encoding
94
+ )
95
+ self.section = section
96
+
97
+ @override
98
+ def __call__(self) -> dict[str, Any]:
99
+ return _get_section(super().__call__(), self.section)
100
+
101
+
102
+ class TomlConfigSectionSettingsSource(TomlConfigSettingsSource):
103
+ @override
104
+ def __init__(
105
+ self,
106
+ settings_cls: type[BaseSettings],
107
+ toml_file: PathType | None = DEFAULT_PATH,
108
+ *,
109
+ section: MaybeSequenceStr,
110
+ ) -> None:
111
+ super().__init__(settings_cls, toml_file=toml_file)
112
+ self.section = section
113
+
114
+ @override
115
+ def __call__(self) -> dict[str, Any]:
116
+ return _get_section(super().__call__(), self.section)
117
+
118
+
119
+ class YamlConfigSectionSettingsSource(YamlConfigSettingsSource):
120
+ @override
121
+ def __init__(
122
+ self,
123
+ settings_cls: type[BaseSettings],
124
+ yaml_file: PathType | None = DEFAULT_PATH,
125
+ yaml_file_encoding: str | None = None,
126
+ yaml_config_section: str | None = None,
127
+ *,
128
+ section: MaybeSequenceStr,
129
+ ) -> None:
130
+ super().__init__(
131
+ settings_cls,
132
+ yaml_file=yaml_file,
133
+ yaml_file_encoding=yaml_file_encoding,
134
+ yaml_config_section=yaml_config_section,
135
+ )
136
+ self.section = section
137
+
138
+ @override
139
+ def __call__(self) -> dict[str, Any]:
140
+ return _get_section(super().__call__(), self.section)
141
+
142
+
143
+ def _ensure_section(file: PathLikeOrWithSection, /) -> PathLikeWithSection:
144
+ match file:
145
+ case Path() | str():
146
+ return file, []
147
+ case Path() | str() as path, str() | list() | tuple() as section:
148
+ return path, section
149
+ case never:
150
+ assert_never(never)
151
+
152
+
153
+ def _get_section(
154
+ mapping: dict[str, Any], section: MaybeSequenceStr, /
155
+ ) -> dict[str, Any]:
156
+ return reduce(lambda acc, el: acc.get(el, {}), always_iterable(section), mapping)
157
+
158
+
159
+ ##
160
+
161
+
162
+ class HashableBaseSettings(BaseSettings):
163
+ """Base settings for loading JSON files."""
164
+
165
+ # config
166
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(frozen=True)
167
+
168
+
169
+ ##
170
+
171
+
172
+ def load_settings[T: BaseSettings](cls: type[T], /, *, cli: bool = False) -> T:
173
+ """Load a set of settings."""
174
+ _ = cls.model_rebuild()
175
+ if cli:
176
+ cls_with_defaults = _load_settings_create_model(cls)
177
+
178
+ @classmethod
179
+ def settings_customise_sources(
180
+ cls: type[BaseSettings],
181
+ settings_cls: type[BaseSettings],
182
+ init_settings: PydanticBaseSettingsSource,
183
+ env_settings: PydanticBaseSettingsSource,
184
+ dotenv_settings: PydanticBaseSettingsSource,
185
+ file_secret_settings: PydanticBaseSettingsSource,
186
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
187
+ parent = cast(
188
+ "Any", super(cls_with_defaults, cls)
189
+ ).settings_customise_sources(
190
+ settings_cls=settings_cls,
191
+ init_settings=init_settings,
192
+ env_settings=env_settings,
193
+ dotenv_settings=dotenv_settings,
194
+ file_secret_settings=file_secret_settings,
195
+ )
196
+ return (
197
+ CliSettingsSource(
198
+ settings_cls, cli_parse_args=True, case_sensitive=False
199
+ ),
200
+ *parent,
201
+ )
202
+
203
+ cls_use = type(
204
+ cls.__name__,
205
+ (cls_with_defaults,),
206
+ {"settings_customise_sources": settings_customise_sources},
207
+ )
208
+ cls_use = cast("type[T]", cls_use)
209
+ else:
210
+ cls_use = cls
211
+ return cls_use()
212
+
213
+
214
+ def _load_settings_create_model[T: BaseSettings](
215
+ cls: type[T], /, *, values: T | None = None
216
+ ) -> type[T]:
217
+ values_use = cls() if values is None else values
218
+ kwargs: dict[str, Any] = {}
219
+ for name, field in cls.model_fields.items():
220
+ if (ann := field.annotation) is None:
221
+ raise ImpossibleCaseError(case=[f"{ann=}"]) # pragma: no cover
222
+ value = getattr(values_use, name)
223
+ if (
224
+ isinstance(cast("Any", ann), type) # 'ann' is possible not a type
225
+ and issubclass(ann, BaseSettings)
226
+ ):
227
+ kwargs[name] = _load_settings_create_model(ann, values=value)
228
+ else:
229
+ kwargs[name] = (field.annotation, Field(default=value))
230
+ return create_model(cls.__name__, __base__=cls, **kwargs)
231
+
232
+
233
+ __all__ = [
234
+ "CustomBaseSettings",
235
+ "HashableBaseSettings",
236
+ "JsonConfigSectionSettingsSource",
237
+ "TomlConfigSectionSettingsSource",
238
+ "YamlConfigSectionSettingsSource",
239
+ "load_settings",
240
+ ]
@@ -0,0 +1,76 @@
1
+ from __future__ import annotations
2
+
3
+ from logging import Filter, LogRecord, getLogger
4
+ from re import search
5
+ from typing import TYPE_CHECKING, Any, ClassVar, override
6
+
7
+ from pydantic_settings.sources import DEFAULT_PATH
8
+ from pydantic_settings_sops import SOPSConfigSettingsSource
9
+
10
+ from utilities.pydantic_settings import (
11
+ CustomBaseSettings,
12
+ PathLikeOrWithSection,
13
+ _ensure_section,
14
+ _get_section,
15
+ )
16
+
17
+ if TYPE_CHECKING:
18
+ from collections.abc import Iterator, Sequence
19
+
20
+ from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
21
+ from pydantic_settings.sources import PathType
22
+
23
+ from utilities.types import MaybeSequenceStr
24
+
25
+
26
+ class _SuppressDefaultConfigMessage(Filter):
27
+ @override
28
+ def filter(self, record: LogRecord) -> bool:
29
+ return not search(
30
+ r"^default config file does not exists '.*'$", record.getMessage()
31
+ )
32
+
33
+
34
+ getLogger("sopsy.utils").addFilter(_SuppressDefaultConfigMessage())
35
+
36
+
37
+ class SopsBaseSettings(CustomBaseSettings):
38
+ """Base settings for loading secrets using `sops/age`."""
39
+
40
+ # paths
41
+ secret_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
42
+
43
+ @classmethod
44
+ @override
45
+ def _yield_base_settings_sources(
46
+ cls,
47
+ settings_cls: type[BaseSettings],
48
+ env_settings: PydanticBaseSettingsSource,
49
+ /,
50
+ ) -> Iterator[PydanticBaseSettingsSource]:
51
+ yield from super()._yield_base_settings_sources(settings_cls, env_settings)
52
+ for file, section in map(_ensure_section, cls.secret_files):
53
+ yield SOPSConfigSectionSettingsSource(
54
+ settings_cls, json_file=file, section=section
55
+ )
56
+
57
+
58
+ class SOPSConfigSectionSettingsSource(SOPSConfigSettingsSource):
59
+ @override
60
+ def __init__(
61
+ self,
62
+ settings_cls: type[BaseSettings],
63
+ json_file: PathType | None = DEFAULT_PATH,
64
+ yaml_file: PathType | None = DEFAULT_PATH,
65
+ *,
66
+ section: MaybeSequenceStr,
67
+ ) -> None:
68
+ super().__init__(settings_cls, json_file=json_file, yaml_file=yaml_file) # pyright: ignore[reportArgumentType]
69
+ self.section = section
70
+
71
+ @override
72
+ def __call__(self) -> dict[str, Any]:
73
+ return _get_section(super().__call__(), self.section)
74
+
75
+
76
+ __all__ = ["SOPSConfigSectionSettingsSource", "SopsBaseSettings"]
utilities/pyinstrument.py CHANGED
@@ -7,8 +7,8 @@ from typing import TYPE_CHECKING
7
7
  from pyinstrument.profiler import Profiler
8
8
 
9
9
  from utilities.atomicwrites import writer
10
- from utilities.pathlib import get_path
11
- from utilities.whenever import format_compact, get_now, to_local_plain
10
+ from utilities.pathlib import to_path
11
+ from utilities.whenever import format_compact, get_now_local
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from collections.abc import Iterator
@@ -17,12 +17,12 @@ if TYPE_CHECKING:
17
17
 
18
18
 
19
19
  @contextmanager
20
- def profile(*, path: MaybeCallablePathLike | None = Path.cwd) -> Iterator[None]:
20
+ def profile(path: MaybeCallablePathLike = Path.cwd, /) -> Iterator[None]:
21
21
  """Profile the contents of a block."""
22
22
  with Profiler() as profiler:
23
23
  yield
24
- filename = get_path(path=path).joinpath(
25
- f"profile__{format_compact(to_local_plain(get_now()))}.html"
24
+ filename = to_path(path).joinpath(
25
+ f"profile__{format_compact(get_now_local(), path=True)}.html"
26
26
  )
27
27
  with writer(filename) as temp:
28
28
  _ = temp.write_text(profiler.output_html())