dycw-utilities 0.135.0__py3-none-any.whl → 0.178.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (97) hide show
  1. dycw_utilities-0.178.1.dist-info/METADATA +34 -0
  2. dycw_utilities-0.178.1.dist-info/RECORD +105 -0
  3. dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.178.1.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +13 -10
  7. utilities/asyncio.py +312 -787
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +64 -4
  10. utilities/cachetools.py +9 -6
  11. utilities/click.py +195 -77
  12. utilities/concurrent.py +1 -1
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +15 -28
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +2 -2
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +28 -59
  21. utilities/fpdf2.py +2 -2
  22. utilities/functions.py +24 -269
  23. utilities/git.py +9 -30
  24. utilities/grp.py +28 -0
  25. utilities/gzip.py +31 -0
  26. utilities/http.py +3 -2
  27. utilities/hypothesis.py +513 -159
  28. utilities/importlib.py +17 -1
  29. utilities/inflect.py +12 -4
  30. utilities/iterables.py +33 -58
  31. utilities/jinja2.py +148 -0
  32. utilities/json.py +70 -0
  33. utilities/libcst.py +38 -17
  34. utilities/lightweight_charts.py +4 -7
  35. utilities/logging.py +136 -93
  36. utilities/math.py +8 -4
  37. utilities/more_itertools.py +43 -45
  38. utilities/operator.py +27 -27
  39. utilities/orjson.py +189 -36
  40. utilities/os.py +61 -4
  41. utilities/packaging.py +115 -0
  42. utilities/parse.py +8 -5
  43. utilities/pathlib.py +269 -40
  44. utilities/permissions.py +298 -0
  45. utilities/platform.py +7 -6
  46. utilities/polars.py +1205 -413
  47. utilities/polars_ols.py +1 -1
  48. utilities/postgres.py +408 -0
  49. utilities/pottery.py +43 -19
  50. utilities/pqdm.py +3 -3
  51. utilities/psutil.py +5 -57
  52. utilities/pwd.py +28 -0
  53. utilities/pydantic.py +4 -52
  54. utilities/pydantic_settings.py +240 -0
  55. utilities/pydantic_settings_sops.py +76 -0
  56. utilities/pyinstrument.py +7 -7
  57. utilities/pytest.py +104 -143
  58. utilities/pytest_plugins/__init__.py +1 -0
  59. utilities/pytest_plugins/pytest_randomly.py +23 -0
  60. utilities/pytest_plugins/pytest_regressions.py +56 -0
  61. utilities/pytest_regressions.py +26 -46
  62. utilities/random.py +11 -6
  63. utilities/re.py +1 -1
  64. utilities/redis.py +220 -343
  65. utilities/sentinel.py +10 -0
  66. utilities/shelve.py +4 -1
  67. utilities/shutil.py +25 -0
  68. utilities/slack_sdk.py +35 -104
  69. utilities/sqlalchemy.py +496 -471
  70. utilities/sqlalchemy_polars.py +29 -54
  71. utilities/string.py +2 -3
  72. utilities/subprocess.py +1977 -0
  73. utilities/tempfile.py +112 -4
  74. utilities/testbook.py +50 -0
  75. utilities/text.py +174 -42
  76. utilities/throttle.py +158 -0
  77. utilities/timer.py +2 -2
  78. utilities/traceback.py +70 -35
  79. utilities/types.py +102 -30
  80. utilities/typing.py +479 -19
  81. utilities/uuid.py +42 -5
  82. utilities/version.py +27 -26
  83. utilities/whenever.py +1559 -361
  84. utilities/zoneinfo.py +80 -22
  85. dycw_utilities-0.135.0.dist-info/METADATA +0 -39
  86. dycw_utilities-0.135.0.dist-info/RECORD +0 -96
  87. dycw_utilities-0.135.0.dist-info/WHEEL +0 -4
  88. dycw_utilities-0.135.0.dist-info/licenses/LICENSE +0 -21
  89. utilities/aiolimiter.py +0 -25
  90. utilities/arq.py +0 -216
  91. utilities/eventkit.py +0 -388
  92. utilities/luigi.py +0 -183
  93. utilities/period.py +0 -152
  94. utilities/pudb.py +0 -62
  95. utilities/python_dotenv.py +0 -101
  96. utilities/streamlit.py +0 -105
  97. utilities/typed_settings.py +0 -123
utilities/logging.py CHANGED
@@ -8,6 +8,7 @@ from logging import (
8
8
  Formatter,
9
9
  Handler,
10
10
  Logger,
11
+ LoggerAdapter,
11
12
  LogRecord,
12
13
  StreamHandler,
13
14
  basicConfig,
@@ -18,9 +19,11 @@ from logging import (
18
19
  from logging.handlers import BaseRotatingHandler, TimedRotatingFileHandler
19
20
  from pathlib import Path
20
21
  from re import Pattern
22
+ from socket import gethostname
21
23
  from typing import (
22
24
  TYPE_CHECKING,
23
25
  Any,
26
+ Concatenate,
24
27
  Literal,
25
28
  NotRequired,
26
29
  Self,
@@ -30,13 +33,13 @@ from typing import (
30
33
  override,
31
34
  )
32
35
 
33
- from whenever import PlainDateTime, ZonedDateTime
36
+ from whenever import ZonedDateTime
34
37
 
35
38
  from utilities.atomicwrites import move_many
36
39
  from utilities.dataclasses import replace_non_sentinel
37
40
  from utilities.errors import ImpossibleCaseError
38
41
  from utilities.iterables import OneEmptyError, always_iterable, one
39
- from utilities.pathlib import ensure_suffix, get_path
42
+ from utilities.pathlib import ensure_suffix, to_path
40
43
  from utilities.re import (
41
44
  ExtractGroupError,
42
45
  ExtractGroupsError,
@@ -44,26 +47,28 @@ from utilities.re import (
44
47
  extract_groups,
45
48
  )
46
49
  from utilities.sentinel import Sentinel, sentinel
47
- from utilities.tzlocal import LOCAL_TIME_ZONE_NAME
48
- from utilities.whenever import WheneverLogRecord, format_compact, get_now, get_now_local
50
+ from utilities.whenever import (
51
+ WheneverLogRecord,
52
+ format_compact,
53
+ get_now_local,
54
+ to_zoned_date_time,
55
+ )
49
56
 
50
57
  if TYPE_CHECKING:
51
- from collections.abc import Callable, Iterable, Mapping
58
+ from collections.abc import Callable, Iterable, Mapping, MutableMapping
52
59
  from datetime import time
53
60
  from logging import _FilterType
54
61
 
55
62
  from utilities.types import (
56
- LoggerOrName,
63
+ LoggerLike,
57
64
  LogLevel,
58
65
  MaybeCallablePathLike,
59
66
  MaybeIterable,
60
67
  PathLike,
68
+ StrMapping,
61
69
  )
62
70
 
63
71
 
64
- _DEFAULT_FORMAT = (
65
- "{zoned_datetime} | {name}:{funcName}:{lineno} | {levelname:8} | {message}"
66
- )
67
72
  _DEFAULT_DATEFMT = "%Y-%m-%d %H:%M:%S"
68
73
  _DEFAULT_BACKUP_COUNT: int = 100
69
74
  _DEFAULT_MAX_BYTES: int = 10 * 1024**2
@@ -73,6 +78,35 @@ _DEFAULT_WHEN: _When = "D"
73
78
  ##
74
79
 
75
80
 
81
+ def add_adapter[**P](
82
+ logger: Logger,
83
+ process: Callable[Concatenate[str, P], str],
84
+ /,
85
+ *args: P.args,
86
+ **kwargs: P.kwargs,
87
+ ) -> LoggerAdapter:
88
+ """Add an adapter to a logger."""
89
+
90
+ class CustomAdapter(LoggerAdapter):
91
+ @override
92
+ def process(
93
+ self, msg: str, kwargs: MutableMapping[str, Any]
94
+ ) -> tuple[str, MutableMapping[str, Any]]:
95
+ extra = cast("_ArgsAndKwargs", self.extra)
96
+ new_msg = process(msg, *extra["args"], **extra["kwargs"])
97
+ return new_msg, kwargs
98
+
99
+ return CustomAdapter(logger, extra=_ArgsAndKwargs(args=args, kwargs=kwargs))
100
+
101
+
102
+ class _ArgsAndKwargs(TypedDict):
103
+ args: tuple[Any, ...]
104
+ kwargs: StrMapping
105
+
106
+
107
+ ##
108
+
109
+
76
110
  def add_filters(handler: Handler, /, *filters: _FilterType) -> None:
77
111
  """Add a set of filters to a handler."""
78
112
  for filter_i in filters:
@@ -84,8 +118,10 @@ def add_filters(handler: Handler, /, *filters: _FilterType) -> None:
84
118
 
85
119
  def basic_config(
86
120
  *,
87
- obj: LoggerOrName | Handler | None = None,
88
- format_: str = _DEFAULT_FORMAT,
121
+ obj: LoggerLike | Handler | None = None,
122
+ format_: str | None = None,
123
+ prefix: str | None = None,
124
+ hostname: bool = False,
89
125
  datefmt: str = _DEFAULT_DATEFMT,
90
126
  level: LogLevel = "INFO",
91
127
  filters: MaybeIterable[_FilterType] | None = None,
@@ -95,13 +131,19 @@ def basic_config(
95
131
  """Do the basic config."""
96
132
  match obj:
97
133
  case None:
98
- basicConfig(format=format_, datefmt=datefmt, style="{", level=level)
134
+ if format_ is None:
135
+ format_use = get_format_str(prefix=prefix, hostname=hostname)
136
+ else:
137
+ format_use = format_
138
+ basicConfig(format=format_use, datefmt=datefmt, style="{", level=level)
99
139
  case Logger() as logger:
100
140
  logger.setLevel(level)
101
141
  logger.addHandler(handler := StreamHandler())
102
142
  basic_config(
103
143
  obj=handler,
104
144
  format_=format_,
145
+ prefix=prefix,
146
+ hostname=hostname,
105
147
  datefmt=datefmt,
106
148
  level=level,
107
149
  filters=filters,
@@ -110,8 +152,10 @@ def basic_config(
110
152
  )
111
153
  case str() as name:
112
154
  basic_config(
113
- obj=get_logger(logger=name),
155
+ obj=to_logger(name),
114
156
  format_=format_,
157
+ prefix=prefix,
158
+ hostname=hostname,
115
159
  datefmt=datefmt,
116
160
  level=level,
117
161
  filters=filters,
@@ -123,50 +167,32 @@ def basic_config(
123
167
  if filters is not None:
124
168
  add_filters(handler, *always_iterable(filters))
125
169
  formatter = get_formatter(
170
+ prefix=prefix,
126
171
  format_=format_,
172
+ hostname=hostname,
127
173
  datefmt=datefmt,
128
174
  plain=plain,
129
175
  color_field_styles=color_field_styles,
130
176
  )
131
177
  handler.setFormatter(formatter)
132
- case _ as never:
178
+ case never:
133
179
  assert_never(never)
134
180
 
135
181
 
136
182
  ##
137
183
 
138
184
 
139
- def filter_for_key(
140
- key: str, /, *, default: bool = False
141
- ) -> Callable[[LogRecord], bool]:
142
- """Make a filter for a given attribute."""
143
- if (key in _FILTER_FOR_KEY_BLACKLIST) or key.startswith("__"):
144
- raise FilterForKeyError(key=key)
145
-
146
- def filter_(record: LogRecord, /) -> bool:
147
- try:
148
- value = getattr(record, key)
149
- except AttributeError:
150
- return default
151
- return bool(value)
152
-
153
- return filter_
154
-
155
-
156
- # fmt: off
157
- _FILTER_FOR_KEY_BLACKLIST = {
158
- "args", "created", "exc_info", "exc_text", "filename", "funcName", "getMessage", "levelname", "levelno", "lineno", "module", "msecs", "msg", "name", "pathname", "process", "processName", "relativeCreated", "stack_info", "taskName", "thread", "threadName"
159
- }
160
- # fmt: on
161
-
162
-
163
- @dataclass(kw_only=True, slots=True)
164
- class FilterForKeyError(Exception):
165
- key: str
166
-
167
- @override
168
- def __str__(self) -> str:
169
- return f"Invalid key: {self.key!r}"
185
+ def get_format_str(*, prefix: str | None = None, hostname: bool = False) -> str:
186
+ """Generate a format string."""
187
+ parts: list[str] = [
188
+ "{zoned_datetime}",
189
+ f"{gethostname()}:{{process}}" if hostname else "{process}",
190
+ "{name}:{funcName}:{lineno}",
191
+ "{levelname}",
192
+ "{message}",
193
+ ]
194
+ joined = " | ".join(parts)
195
+ return joined if prefix is None else f"{prefix} {joined}"
170
196
 
171
197
 
172
198
  ##
@@ -184,7 +210,9 @@ class _FieldStyleDict(TypedDict):
184
210
 
185
211
  def get_formatter(
186
212
  *,
187
- format_: str = _DEFAULT_FORMAT,
213
+ format_: str | None = None,
214
+ prefix: str | None = None,
215
+ hostname: bool = False,
188
216
  datefmt: str = _DEFAULT_DATEFMT,
189
217
  plain: bool = False,
190
218
  color_field_styles: Mapping[str, _FieldStyleKeys] | None = None,
@@ -192,40 +220,44 @@ def get_formatter(
192
220
  """Get the formatter; colored if available."""
193
221
  setLogRecordFactory(WheneverLogRecord)
194
222
  if plain:
195
- return _get_plain_formatter(format_=format_, datefmt=datefmt)
223
+ return _get_plain_formatter(
224
+ format_=format_, prefix=prefix, hostname=hostname, datefmt=datefmt
225
+ )
196
226
  try:
197
227
  from coloredlogs import DEFAULT_FIELD_STYLES, ColoredFormatter
198
228
  except ModuleNotFoundError: # pragma: no cover
199
- return _get_plain_formatter(format_=format_, datefmt=datefmt)
229
+ return _get_plain_formatter(
230
+ format_=format_, prefix=prefix, hostname=hostname, datefmt=datefmt
231
+ )
232
+ format_use = (
233
+ get_format_str(prefix=prefix, hostname=hostname) if format_ is None else format_
234
+ )
200
235
  default = cast("dict[_FieldStyleKeys, _FieldStyleDict]", DEFAULT_FIELD_STYLES)
201
236
  field_styles = {cast("str", k): v for k, v in default.items()}
202
237
  field_styles["zoned_datetime"] = default["asctime"]
238
+ field_styles["hostname"] = default["hostname"]
239
+ field_styles["process"] = default["hostname"]
240
+ field_styles["lineno"] = default["name"]
241
+ field_styles["funcName"] = default["name"]
203
242
  if color_field_styles is not None:
204
243
  field_styles.update({k: default[v] for k, v in color_field_styles.items()})
205
244
  return ColoredFormatter(
206
- fmt=format_, datefmt=datefmt, style="{", field_styles=field_styles
245
+ fmt=format_use, datefmt=datefmt, style="{", field_styles=field_styles
207
246
  )
208
247
 
209
248
 
210
249
  def _get_plain_formatter(
211
- *, format_: str = _DEFAULT_FORMAT, datefmt: str = _DEFAULT_DATEFMT
250
+ *,
251
+ format_: str | None = None,
252
+ prefix: str | None = None,
253
+ hostname: bool = False,
254
+ datefmt: str = _DEFAULT_DATEFMT,
212
255
  ) -> Formatter:
213
256
  """Get the plain formatter."""
214
- return Formatter(fmt=format_, datefmt=datefmt, style="{")
215
-
216
-
217
- ##
218
-
219
-
220
- def get_logger(*, logger: LoggerOrName | None = None) -> Logger:
221
- """Get a logger."""
222
- match logger:
223
- case Logger():
224
- return logger
225
- case str() | None:
226
- return getLogger(logger)
227
- case _ as never:
228
- assert_never(never)
257
+ format_use = (
258
+ get_format_str(prefix=prefix, hostname=hostname) if format_ is None else format_
259
+ )
260
+ return Formatter(fmt=format_use, datefmt=datefmt, style="{")
229
261
 
230
262
 
231
263
  ##
@@ -254,13 +286,13 @@ class GetLoggingLevelNumberError(Exception):
254
286
 
255
287
  def setup_logging(
256
288
  *,
257
- logger: LoggerOrName | None = None,
258
- format_: str = _DEFAULT_FORMAT,
289
+ logger: LoggerLike | None = None,
290
+ format_: str | None = None,
259
291
  datefmt: str = _DEFAULT_DATEFMT,
260
292
  console_level: LogLevel = "INFO",
261
293
  console_prefix: str = "❯", # noqa: RUF001
262
294
  console_filters: MaybeIterable[_FilterType] | None = None,
263
- files_dir: MaybeCallablePathLike | None = None,
295
+ files_dir: MaybeCallablePathLike = Path.cwd,
264
296
  files_max_bytes: int = _DEFAULT_MAX_BYTES,
265
297
  files_when: _When = _DEFAULT_WHEN,
266
298
  files_interval: int = 1,
@@ -270,20 +302,20 @@ def setup_logging(
270
302
  """Set up logger."""
271
303
  basic_config(
272
304
  obj=logger,
273
- format_=f"{console_prefix} {format_}",
305
+ prefix=console_prefix,
306
+ format_=format_,
274
307
  datefmt=datefmt,
275
308
  level=console_level,
276
309
  filters=console_filters,
277
310
  )
278
- logger_use = get_logger(logger=logger)
311
+ logger_use = to_logger(logger)
279
312
  name = logger_use.name
280
- dir_ = get_path(path=files_dir)
281
313
  levels: list[LogLevel] = ["DEBUG", "INFO", "ERROR"]
282
314
  for level in levels:
283
315
  lower = level.lower()
284
316
  for stem in [lower, f"{name}-{lower}"]:
285
317
  handler = SizeAndTimeRotatingFileHandler(
286
- dir_.joinpath(stem).with_suffix(".txt"),
318
+ to_path(files_dir).joinpath(stem).with_suffix(".txt"),
287
319
  maxBytes=files_max_bytes,
288
320
  when=files_when,
289
321
  interval=files_interval,
@@ -293,6 +325,7 @@ def setup_logging(
293
325
  basic_config(
294
326
  obj=handler,
295
327
  format_=format_,
328
+ hostname=True,
296
329
  datefmt=datefmt,
297
330
  level=level,
298
331
  filters=files_filters,
@@ -354,9 +387,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
354
387
  def emit(self, record: LogRecord) -> None:
355
388
  try:
356
389
  if (self._backup_count is not None) and self._should_rollover(record):
357
- self._do_rollover( # skipif-ci-and-windows
358
- backup_count=self._backup_count
359
- )
390
+ self._do_rollover(backup_count=self._backup_count)
360
391
  FileHandler.emit(self, record)
361
392
  except Exception: # noqa: BLE001 # pragma: no cover
362
393
  self.handleError(record)
@@ -366,23 +397,23 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
366
397
  self.stream.close()
367
398
  self.stream = None
368
399
 
369
- actions = _compute_rollover_actions( # skipif-ci-and-windows
400
+ actions = _compute_rollover_actions(
370
401
  self._directory,
371
402
  self._stem,
372
403
  self._suffix,
373
404
  patterns=self._patterns,
374
405
  backup_count=backup_count,
375
406
  )
376
- actions.do() # skipif-ci-and-windows
407
+ actions.do()
377
408
 
378
409
  if not self.delay: # pragma: no cover
379
410
  self.stream = self._open()
380
- self._time_handler.rolloverAt = ( # skipif-ci-and-windows
381
- self._time_handler.computeRollover(get_now().timestamp())
411
+ self._time_handler.rolloverAt = self._time_handler.computeRollover(
412
+ get_now_local().timestamp()
382
413
  )
383
414
 
384
415
  def _should_rollover(self, record: LogRecord, /) -> bool:
385
- if self._max_bytes is not None: # skipif-ci-and-windows
416
+ if self._max_bytes is not None:
386
417
  try:
387
418
  size = self._filename.stat().st_size
388
419
  except FileNotFoundError:
@@ -390,14 +421,14 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
390
421
  else:
391
422
  if size >= self._max_bytes:
392
423
  return True
393
- return bool(self._time_handler.shouldRollover(record)) # skipif-ci-and-windows
424
+ return bool(self._time_handler.shouldRollover(record))
394
425
 
395
426
 
396
427
  def _compute_rollover_patterns(stem: str, suffix: str, /) -> _RolloverPatterns:
397
428
  return _RolloverPatterns(
398
429
  pattern1=re.compile(rf"^{stem}\.(\d+){suffix}$"),
399
- pattern2=re.compile(rf"^{stem}\.(\d+)__([\dT]+?){suffix}$"),
400
- pattern3=re.compile(rf"^{stem}\.(\d+)__([\dT]+?)__([\dT]+?){suffix}$"),
430
+ pattern2=re.compile(rf"^{stem}\.(\d+)__(.+?){suffix}$"),
431
+ pattern3=re.compile(rf"^{stem}\.(\d+)__(.+?)__(.+?){suffix}$"),
401
432
  )
402
433
 
403
434
 
@@ -497,10 +528,8 @@ class _RotatingLogFile:
497
528
  stem=stem,
498
529
  suffix=suffix,
499
530
  index=int(index),
500
- start=PlainDateTime.parse_common_iso(start).assume_tz(
501
- LOCAL_TIME_ZONE_NAME
502
- ),
503
- end=PlainDateTime.parse_common_iso(end).assume_tz(LOCAL_TIME_ZONE_NAME),
531
+ start=to_zoned_date_time(start),
532
+ end=to_zoned_date_time(end),
504
533
  )
505
534
  try:
506
535
  index, end = extract_groups(patterns.pattern2, path.name)
@@ -512,7 +541,7 @@ class _RotatingLogFile:
512
541
  stem=stem,
513
542
  suffix=suffix,
514
543
  index=int(index),
515
- end=PlainDateTime.parse_common_iso(end).assume_tz(LOCAL_TIME_ZONE_NAME),
544
+ end=to_zoned_date_time(end),
516
545
  )
517
546
  try:
518
547
  index = extract_group(patterns.pattern1, path.name)
@@ -533,9 +562,9 @@ class _RotatingLogFile:
533
562
  case int() as index, None, None:
534
563
  tail = str(index)
535
564
  case int() as index, None, ZonedDateTime() as end:
536
- tail = f"{index}__{format_compact(end)}"
565
+ tail = f"{index}__{format_compact(end, path=True)}"
537
566
  case int() as index, ZonedDateTime() as start, ZonedDateTime() as end:
538
- tail = f"{index}__{format_compact(start)}__{format_compact(end)}"
567
+ tail = f"{index}__{format_compact(start, path=True)}__{format_compact(end, path=True)}"
539
568
  case _: # pragma: no cover
540
569
  raise ImpossibleCaseError(
541
570
  case=[f"{self.index=}", f"{self.start=}", f"{self.end=}"]
@@ -573,14 +602,28 @@ class _Rotation:
573
602
  return self.file.replace(index=self.index, start=self.start, end=self.end).path
574
603
 
575
604
 
605
+ ##
606
+
607
+
608
+ def to_logger(logger: LoggerLike | None = None, /) -> Logger:
609
+ """Convert to a logger."""
610
+ match logger:
611
+ case Logger():
612
+ return logger
613
+ case str() | None:
614
+ return getLogger(logger)
615
+ case never:
616
+ assert_never(never)
617
+
618
+
576
619
  __all__ = [
577
- "FilterForKeyError",
578
620
  "GetLoggingLevelNumberError",
579
621
  "SizeAndTimeRotatingFileHandler",
622
+ "add_adapter",
580
623
  "add_filters",
581
624
  "basic_config",
582
- "filter_for_key",
583
- "get_logger",
625
+ "get_format_str",
584
626
  "get_logging_level_number",
585
627
  "setup_logging",
628
+ "to_logger",
586
629
  ]
utilities/math.py CHANGED
@@ -641,7 +641,10 @@ def _is_close(
641
641
  ##
642
642
 
643
643
 
644
- def number_of_decimals(x: float, /, *, max_decimals: int = 20) -> int:
644
+ MAX_DECIMALS = 10
645
+
646
+
647
+ def number_of_decimals(x: float, /, *, max_decimals: int = MAX_DECIMALS) -> int:
645
648
  """Get the number of decimals."""
646
649
  _, frac = divmod(x, 1)
647
650
  results = (
@@ -731,7 +734,7 @@ def round_(
731
734
  return 0
732
735
  case -1:
733
736
  return floor(x)
734
- case _ as never:
737
+ case never:
735
738
  assert_never(never)
736
739
  case "standard-tie-floor":
737
740
  return _round_tie_standard(x, "floor", rel_tol=rel_tol, abs_tol=abs_tol)
@@ -743,7 +746,7 @@ def round_(
743
746
  )
744
747
  case "standard-tie-away-zero":
745
748
  return _round_tie_standard(x, "away-zero", rel_tol=rel_tol, abs_tol=abs_tol)
746
- case _ as never:
749
+ case never:
747
750
  assert_never(never)
748
751
 
749
752
 
@@ -876,7 +879,7 @@ def sign(
876
879
  if is_negative(x, rel_tol=rel_tol, abs_tol=abs_tol):
877
880
  return -1
878
881
  return 0
879
- case _ as never:
882
+ case never:
880
883
  assert_never(never)
881
884
 
882
885
 
@@ -889,6 +892,7 @@ def significant_figures(x: float, /, *, n: int = 2) -> str:
889
892
 
890
893
 
891
894
  __all__ = [
895
+ "MAX_DECIMALS",
892
896
  "MAX_FLOAT32",
893
897
  "MAX_FLOAT64",
894
898
  "MAX_INT8",
@@ -21,57 +21,66 @@ from more_itertools import peekable as _peekable
21
21
  from utilities.functions import get_class_name
22
22
  from utilities.iterables import OneNonUniqueError, one
23
23
  from utilities.reprlib import get_repr
24
- from utilities.sentinel import Sentinel, sentinel
24
+ from utilities.sentinel import Sentinel, is_sentinel, sentinel
25
25
 
26
26
  if TYPE_CHECKING:
27
27
  from collections.abc import Iterable, Iterator, Mapping, Sequence
28
28
 
29
29
 
30
30
  @overload
31
- def bucket_mapping[T, U, UH: Hashable](
31
+ def bucket_mapping[T, UH: Hashable](
32
32
  iterable: Iterable[T],
33
33
  func: Callable[[T], UH],
34
34
  /,
35
35
  *,
36
- pre: Callable[[T], U],
36
+ pre: None = None,
37
+ post: None = None,
38
+ ) -> Mapping[UH, Iterator[T]]: ...
39
+ @overload
40
+ def bucket_mapping[T, UH: Hashable](
41
+ iterable: Iterable[T],
42
+ func: Callable[[T], UH],
43
+ /,
44
+ *,
45
+ pre: None = None,
37
46
  post: Literal["list"],
38
- ) -> Mapping[UH, list[U]]: ...
47
+ ) -> Mapping[UH, list[T]]: ...
39
48
  @overload
40
- def bucket_mapping[T, U, UH: Hashable](
49
+ def bucket_mapping[T, UH: Hashable](
41
50
  iterable: Iterable[T],
42
51
  func: Callable[[T], UH],
43
52
  /,
44
53
  *,
45
- pre: Callable[[T], U],
54
+ pre: None = None,
46
55
  post: Literal["tuple"],
47
- ) -> Mapping[UH, tuple[U, ...]]: ...
56
+ ) -> Mapping[UH, tuple[T, ...]]: ...
48
57
  @overload
49
- def bucket_mapping[T, U, UH: Hashable](
58
+ def bucket_mapping[T, UH: Hashable](
50
59
  iterable: Iterable[T],
51
60
  func: Callable[[T], UH],
52
61
  /,
53
62
  *,
54
- pre: Callable[[T], U],
63
+ pre: None = None,
55
64
  post: Literal["set"],
56
- ) -> Mapping[UH, set[U]]: ...
65
+ ) -> Mapping[UH, set[T]]: ...
57
66
  @overload
58
- def bucket_mapping[T, U, UH: Hashable](
67
+ def bucket_mapping[T, UH: Hashable](
59
68
  iterable: Iterable[T],
60
69
  func: Callable[[T], UH],
61
70
  /,
62
71
  *,
63
- pre: Callable[[T], U],
72
+ pre: None = None,
64
73
  post: Literal["frozenset"],
65
- ) -> Mapping[UH, frozenset[U]]: ...
74
+ ) -> Mapping[UH, frozenset[T]]: ...
66
75
  @overload
67
- def bucket_mapping[T, U, UH: Hashable](
76
+ def bucket_mapping[T, UH: Hashable](
68
77
  iterable: Iterable[T],
69
78
  func: Callable[[T], UH],
70
79
  /,
71
80
  *,
72
- pre: Callable[[T], U] | None = None,
81
+ pre: None = None,
73
82
  post: Literal["unique"],
74
- ) -> Mapping[UH, U]: ...
83
+ ) -> Mapping[UH, T]: ...
75
84
  @overload
76
85
  def bucket_mapping[T, U, UH: Hashable](
77
86
  iterable: Iterable[T],
@@ -82,59 +91,50 @@ def bucket_mapping[T, U, UH: Hashable](
82
91
  post: None = None,
83
92
  ) -> Mapping[UH, Iterator[U]]: ...
84
93
  @overload
85
- def bucket_mapping[T, UH: Hashable](
94
+ def bucket_mapping[T, U, UH: Hashable](
86
95
  iterable: Iterable[T],
87
96
  func: Callable[[T], UH],
88
97
  /,
89
98
  *,
90
- pre: None = None,
99
+ pre: Callable[[T], U],
91
100
  post: Literal["list"],
92
- ) -> Mapping[UH, list[T]]: ...
101
+ ) -> Mapping[UH, list[U]]: ...
93
102
  @overload
94
- def bucket_mapping[T, UH: Hashable](
103
+ def bucket_mapping[T, U, UH: Hashable](
95
104
  iterable: Iterable[T],
96
105
  func: Callable[[T], UH],
97
106
  /,
98
107
  *,
99
- pre: None = None,
108
+ pre: Callable[[T], U],
100
109
  post: Literal["tuple"],
101
- ) -> Mapping[UH, tuple[T, ...]]: ...
110
+ ) -> Mapping[UH, tuple[U, ...]]: ...
102
111
  @overload
103
- def bucket_mapping[T, UH: Hashable](
112
+ def bucket_mapping[T, U, UH: Hashable](
104
113
  iterable: Iterable[T],
105
114
  func: Callable[[T], UH],
106
115
  /,
107
116
  *,
108
- pre: None = None,
117
+ pre: Callable[[T], U],
109
118
  post: Literal["set"],
110
- ) -> Mapping[UH, set[T]]: ...
119
+ ) -> Mapping[UH, set[U]]: ...
111
120
  @overload
112
- def bucket_mapping[T, UH: Hashable](
121
+ def bucket_mapping[T, U, UH: Hashable](
113
122
  iterable: Iterable[T],
114
123
  func: Callable[[T], UH],
115
124
  /,
116
125
  *,
117
- pre: None = None,
126
+ pre: Callable[[T], U],
118
127
  post: Literal["frozenset"],
119
- ) -> Mapping[UH, frozenset[T]]: ...
128
+ ) -> Mapping[UH, frozenset[U]]: ...
120
129
  @overload
121
- def bucket_mapping[T, UH: Hashable](
130
+ def bucket_mapping[T, U, UH: Hashable](
122
131
  iterable: Iterable[T],
123
132
  func: Callable[[T], UH],
124
133
  /,
125
134
  *,
126
- pre: None = None,
135
+ pre: Callable[[T], U] | None = None,
127
136
  post: Literal["unique"],
128
- ) -> Mapping[UH, T]: ...
129
- @overload
130
- def bucket_mapping[T, UH: Hashable](
131
- iterable: Iterable[T],
132
- func: Callable[[T], UH],
133
- /,
134
- *,
135
- pre: None = None,
136
- post: None = None,
137
- ) -> Mapping[UH, Iterator[T]]: ...
137
+ ) -> Mapping[UH, U]: ...
138
138
  @overload
139
139
  def bucket_mapping[T, U, UH: Hashable](
140
140
  iterable: Iterable[T],
@@ -206,7 +206,7 @@ def bucket_mapping[T, U, UH: Hashable](
206
206
  return {k: frozenset(map(pre, v)) for k, v in mapping.items()}
207
207
  case Callable(), "unique":
208
208
  return _bucket_mapping_unique({k: map(pre, v) for k, v in mapping.items()})
209
- case _ as never:
209
+ case never:
210
210
  assert_never(never)
211
211
 
212
212
 
@@ -290,9 +290,7 @@ class peekable[T](_peekable): # noqa: N801
290
290
  def peek[U](self, *, default: U) -> T | U: ...
291
291
  @override
292
292
  def peek(self, *, default: Any = sentinel) -> Any: # pyright: ignore[reportIncompatibleMethodOverride]
293
- if isinstance(default, Sentinel):
294
- return super().peek()
295
- return super().peek(default=default)
293
+ return super().peek() if is_sentinel(default) else super().peek(default=default)
296
294
 
297
295
  def takewhile(self, predicate: Callable[[T], bool], /) -> Iterator[T]:
298
296
  while bool(self) and predicate(self.peek()):
@@ -374,7 +372,7 @@ def _yield_splits2[T](
374
372
  len_tail = max(len_win - head, 0)
375
373
  if len_tail >= 1:
376
374
  yield window, head, len_tail
377
- case _ as never:
375
+ case never:
378
376
  assert_never(never)
379
377
 
380
378