dycw-utilities 0.109.29__py3-none-any.whl → 0.110.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dycw-utilities
3
- Version: 0.109.29
3
+ Version: 0.110.1
4
4
  Author-email: Derek Wan <d.wan@icloud.com>
5
5
  License-File: LICENSE
6
6
  Requires-Python: >=3.12
@@ -1,4 +1,4 @@
1
- utilities/__init__.py,sha256=AIZiPdrHMOKkgJqoeU29sMxSZXZMURdyCYzqvmBulyI,61
1
+ utilities/__init__.py,sha256=hplCmGbNVfruqTvCNUXlSrWUj9O7kQcWSuSnZNUbp0w,60
2
2
  utilities/altair.py,sha256=Gpja-flOo-Db0PIPJLJsgzAlXWoKUjPU1qY-DQ829ek,9156
3
3
  utilities/astor.py,sha256=xuDUkjq0-b6fhtwjhbnebzbqQZAjMSHR1IIS5uOodVg,777
4
4
  utilities/asyncio.py,sha256=41oQUurWMvadFK5gFnaG21hMM0Vmfn2WS6OpC0R9mas,14757
@@ -11,7 +11,7 @@ utilities/contextlib.py,sha256=OOIIEa5lXKGzFAnauaul40nlQnQko6Na4ryiMJcHkIg,478
11
11
  utilities/contextvars.py,sha256=RsSGGrbQqqZ67rOydnM7WWIsM2lIE31UHJLejnHJPWY,505
12
12
  utilities/cryptography.py,sha256=HyOewI20cl3uRXsKivhIaeLVDInQdzgXZGaly7hS5dE,771
13
13
  utilities/cvxpy.py,sha256=Rv1-fD-XYerosCavRF8Pohop2DBkU3AlFaGTfD8AEAA,13776
14
- utilities/dataclasses.py,sha256=8-38WHrScAvElBNvFxBnhJwab1XXkSXpDOiNPOAvh2Q,23295
14
+ utilities/dataclasses.py,sha256=e2YuCj1DF4eecFFxsu20MknghXJ4SNkWCmbIAOXXr18,25942
15
15
  utilities/datetime.py,sha256=GOs-MIEW_A49kzqa1yhIoeNeSqqPVgGO-h2AThtgTDk,37326
16
16
  utilities/enum.py,sha256=HoRwVCWzsnH0vpO9ZEcAAIZLMv0Sn2vJxxA4sYMQgDs,5793
17
17
  utilities/errors.py,sha256=BtSNP0JC3ik536ddPyTerLomCRJV9f6kdMe6POz0QHM,361
@@ -41,7 +41,7 @@ utilities/operator.py,sha256=0M2yZJ0PODH47ogFEnkGMBe_cfxwZR02T_92LZVZvHo,3715
41
41
  utilities/optuna.py,sha256=loyJGWTzljgdJaoLhP09PT8Jz6o_pwBOwehY33lHkhw,1923
42
42
  utilities/orjson.py,sha256=Wj5pzG_VdgoAy14a7Luhem-BgYrRtRFvvl_POiszRd0,36930
43
43
  utilities/os.py,sha256=D_FyyT-6TtqiN9KSS7c9g1fnUtgxmyMtzAjmYLkk46A,3587
44
- utilities/parse.py,sha256=l8W5ik1CvmPEsas-dB7cS5gCKpeyr0hQDtjZqI-9azI,16283
44
+ utilities/parse.py,sha256=wxqh4YwBfQ7nm249-F_6uqiLo1js9_xji9AVvUxZ5nI,17091
45
45
  utilities/pathlib.py,sha256=31WPMXdLIyXgYOMMl_HOI2wlo66MGSE-cgeelk-Lias,1410
46
46
  utilities/period.py,sha256=ikHXsWtDLr553cfH6p9mMaiCnIAP69B7q84ckWV3HaA,10884
47
47
  utilities/pickle.py,sha256=Bhvd7cZl-zQKQDFjUerqGuSKlHvnW1K2QXeU5UZibtg,657
@@ -55,7 +55,7 @@ utilities/pyinstrument.py,sha256=ROq2txPwbe2ZUuYJ2IDNbfT97lu2ca0v5_C_yn6sSlM,800
55
55
  utilities/pyrsistent.py,sha256=TLJfiiKO4cKNU_pCoM3zDqmSM421qpuoaeaBNnyC_Ac,2489
56
56
  utilities/pytest.py,sha256=85QUax4g2VBBAqAHtM9wekcSLB7_9O8AKFTaCshztL8,7989
57
57
  utilities/pytest_regressions.py,sha256=-SVT9647Dg6-JcdsiaDKXe3NdOmmrvGevLKWwGjxq3c,5088
58
- utilities/python_dotenv.py,sha256=JDIGuaGIiVkOcOBDpA7OZZM_0vxrkZyrrfqOssB6cqE,3170
58
+ utilities/python_dotenv.py,sha256=6viKAI7zx9YQU2ewITaOcad7wMwkrf6FbYpBmCl4vCA,3170
59
59
  utilities/random.py,sha256=lYdjgxB7GCfU_fwFVl5U-BIM_HV3q6_urL9byjrwDM8,4157
60
60
  utilities/re.py,sha256=5J4d8VwIPFVrX2Eb8zfoxImDv7IwiN_U7mJ07wR2Wvs,3958
61
61
  utilities/redis.py,sha256=CsDQqc9V6ASLzLQwtbQXZQEndyG9pJiCOhPlPeszt7Y,21203
@@ -77,7 +77,7 @@ utilities/text.py,sha256=Ax_n-nY80_onWxag9M0PkmbaAqwyut9AEA9tEMd5lBs,6694
77
77
  utilities/threading.py,sha256=GvBOp4CyhHfN90wGXZuA2VKe9fGzMaEa7oCl4f3nnPU,1009
78
78
  utilities/timer.py,sha256=Rkc49KSpHuC8s7vUxGO9DU55U9I6yDKnchsQqrUCVBs,4075
79
79
  utilities/traceback.py,sha256=KwHPLdEbdj0fFhXo8MBfxcvem8A-VXYDwFMNJ6f0cTM,27328
80
- utilities/types.py,sha256=z1hbBOT5TkzTn2JOvSldw6DScxi3erG9qpJ3xci66GI,17963
80
+ utilities/types.py,sha256=Hi9aKaxN3G9zFVlLjx6U9xd_HMGq-eqHLxmG1cSdVpg,17967
81
81
  utilities/typing.py,sha256=gLg4EbE1FX52fJ1d3ji4i08qolwu9qgWt8w_w_Y5DTk,5512
82
82
  utilities/tzdata.py,sha256=2ZsPmhTVM9Ptrxb4QrWKtKOB9RiH8IOO-A1u7ULdVbg,176
83
83
  utilities/tzlocal.py,sha256=42BCquGF54oIqIKe5RGziP4K8Nbm3Ey7uqcNn6m5ge8,534
@@ -87,7 +87,7 @@ utilities/warnings.py,sha256=yUgjnmkCRf6QhdyAXzl7u0qQFejhQG3PrjoSwxpbHrs,1819
87
87
  utilities/whenever.py,sha256=TjoTAJ1R27-rKXiXzdE4GzPidmYqm0W58XydDXp-QZM,17786
88
88
  utilities/zipfile.py,sha256=24lQc9ATcJxHXBPc_tBDiJk48pWyRrlxO2fIsFxU0A8,699
89
89
  utilities/zoneinfo.py,sha256=-DQz5a0Ikw9jfSZtL0BEQkXOMC9yGn_xiJYNCLMiqEc,1989
90
- dycw_utilities-0.109.29.dist-info/METADATA,sha256=wN-G6OupzEqFsIek2lE458u5MkSYceoapxbzRnWL8Z8,13005
91
- dycw_utilities-0.109.29.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
92
- dycw_utilities-0.109.29.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
93
- dycw_utilities-0.109.29.dist-info/RECORD,,
90
+ dycw_utilities-0.110.1.dist-info/METADATA,sha256=aP9GGGR2pc47dx2nhMCDkvuUbKPEEZyaGZI6mZtIibI,13004
91
+ dycw_utilities-0.110.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
92
+ dycw_utilities-0.110.1.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
93
+ dycw_utilities-0.110.1.dist-info/RECORD,,
utilities/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from __future__ import annotations
2
2
 
3
- __version__ = "0.109.29"
3
+ __version__ = "0.110.1"
utilities/dataclasses.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from collections.abc import Mapping
4
- from collections.abc import Set as AbstractSet
5
4
  from dataclasses import MISSING, dataclass, field, fields, replace
6
5
  from typing import (
7
6
  TYPE_CHECKING,
@@ -22,13 +21,26 @@ from utilities.functions import (
22
21
  )
23
22
  from utilities.iterables import OneStrEmptyError, OneStrNonUniqueError, one_str
24
23
  from utilities.operator import is_equal
25
- from utilities.parse import ParseTextError, parse_text
24
+ from utilities.parse import (
25
+ LIST_SEPARATOR,
26
+ PAIR_SEPARATOR,
27
+ ParseObjectError,
28
+ parse_object,
29
+ serialize_object,
30
+ )
26
31
  from utilities.sentinel import Sentinel, sentinel
27
- from utilities.types import ParseTextExtra, StrStrMapping, TDataclass
32
+ from utilities.text import (
33
+ _SplitKeyValuePairsDuplicateKeysError,
34
+ _SplitKeyValuePairsSplitError,
35
+ join_strs,
36
+ split_key_value_pairs,
37
+ )
38
+ from utilities.types import ParseObjectExtra, StrStrMapping, TDataclass
28
39
  from utilities.typing import get_type_hints
29
40
 
30
41
  if TYPE_CHECKING:
31
42
  from collections.abc import Callable, Iterable, Iterator
43
+ from collections.abc import Set as AbstractSet
32
44
 
33
45
  from utilities.types import Dataclass, StrMapping
34
46
 
@@ -44,11 +56,11 @@ def dataclass_repr(
44
56
  obj: Dataclass,
45
57
  /,
46
58
  *,
47
- include: Iterable[str] | None = None,
48
- exclude: Iterable[str] | None = None,
49
59
  globalns: StrMapping | None = None,
50
60
  localns: StrMapping | None = None,
51
61
  warn_name_errors: bool = False,
62
+ include: Iterable[str] | None = None,
63
+ exclude: Iterable[str] | None = None,
52
64
  rel_tol: float | None = None,
53
65
  abs_tol: float | None = None,
54
66
  extra: Mapping[type[_T], Callable[[_T, _T], bool]] | None = None,
@@ -75,11 +87,11 @@ def dataclass_repr(
75
87
  if is_dataclass_instance(fld.value):
76
88
  repr_ = dataclass_repr(
77
89
  fld.value,
78
- include=include,
79
- exclude=exclude,
80
90
  globalns=globalns,
81
91
  localns=localns,
82
92
  warn_name_errors=warn_name_errors,
93
+ include=include,
94
+ exclude=exclude,
83
95
  rel_tol=rel_tol,
84
96
  abs_tol=abs_tol,
85
97
  extra=extra,
@@ -90,11 +102,11 @@ def dataclass_repr(
90
102
  repr_ = [
91
103
  dataclass_repr(
92
104
  v,
93
- include=include,
94
- exclude=exclude,
95
105
  globalns=globalns,
96
106
  localns=localns,
97
107
  warn_name_errors=warn_name_errors,
108
+ include=include,
109
+ exclude=exclude,
98
110
  rel_tol=rel_tol,
99
111
  abs_tol=abs_tol,
100
112
  extra=extra,
@@ -123,11 +135,11 @@ def dataclass_to_dict(
123
135
  obj: Dataclass,
124
136
  /,
125
137
  *,
126
- include: Iterable[str] | None = None,
127
- exclude: Iterable[str] | None = None,
128
138
  globalns: StrMapping | None = None,
129
139
  localns: StrMapping | None = None,
130
140
  warn_name_errors: bool = False,
141
+ include: Iterable[str] | None = None,
142
+ exclude: Iterable[str] | None = None,
131
143
  rel_tol: float | None = None,
132
144
  abs_tol: float | None = None,
133
145
  extra: Mapping[type[_T], Callable[[_T, _T], bool]] | None = None,
@@ -155,6 +167,8 @@ def dataclass_to_dict(
155
167
  globalns=globalns,
156
168
  localns=localns,
157
169
  warn_name_errors=warn_name_errors,
170
+ include=include,
171
+ exclude=exclude,
158
172
  rel_tol=rel_tol,
159
173
  abs_tol=abs_tol,
160
174
  extra=extra,
@@ -169,6 +183,8 @@ def dataclass_to_dict(
169
183
  globalns=globalns,
170
184
  localns=localns,
171
185
  warn_name_errors=warn_name_errors,
186
+ include=include,
187
+ exclude=exclude,
172
188
  rel_tol=rel_tol,
173
189
  abs_tol=abs_tol,
174
190
  extra=extra,
@@ -381,85 +397,68 @@ def replace_non_sentinel(
381
397
  ##
382
398
 
383
399
 
384
- def str_mapping_to_field_mapping(
385
- cls: type[TDataclass],
386
- mapping: Mapping[str, _T],
400
+ def serialize_dataclass(
401
+ obj: Dataclass,
387
402
  /,
388
403
  *,
389
- fields: Iterable[_YieldFieldsClass[Any]] | None = None,
390
404
  globalns: StrMapping | None = None,
391
405
  localns: StrMapping | None = None,
392
406
  warn_name_errors: bool = False,
393
- head: bool = False,
394
- case_sensitive: bool = False,
395
- allow_extra: bool = False,
396
- ) -> Mapping[_YieldFieldsClass[Any], _T]:
397
- """Convert a string-mapping into a field-mapping."""
398
- keys_to_fields: Mapping[str, _YieldFieldsClass[Any]] = {}
399
- for key in mapping:
400
- try:
401
- keys_to_fields[key] = one_field(
402
- cls,
403
- key,
404
- fields=fields,
405
- globalns=globalns,
406
- localns=localns,
407
- warn_name_errors=warn_name_errors,
408
- head=head,
409
- case_sensitive=case_sensitive,
407
+ include: Iterable[str] | None = None,
408
+ exclude: Iterable[str] | None = None,
409
+ rel_tol: float | None = None,
410
+ abs_tol: float | None = None,
411
+ extra: Mapping[type[_U], Callable[[_U, _U], bool]] | None = None,
412
+ defaults: bool = False,
413
+ list_separator: str = LIST_SEPARATOR,
414
+ pair_separator: str = PAIR_SEPARATOR,
415
+ ) -> str:
416
+ """Serialize a Dataclass."""
417
+ mapping: StrStrMapping = {}
418
+ fields = list(
419
+ yield_fields(
420
+ obj, globalns=globalns, localns=localns, warn_name_errors=warn_name_errors
421
+ )
422
+ )
423
+ for fld in fields:
424
+ if fld.keep(
425
+ include=include,
426
+ exclude=exclude,
427
+ rel_tol=rel_tol,
428
+ abs_tol=abs_tol,
429
+ extra=extra,
430
+ defaults=defaults,
431
+ ):
432
+ mapping[fld.name] = serialize_object(
433
+ fld.value, list_separator=list_separator, pair_separator=pair_separator
410
434
  )
411
- except OneFieldEmptyError:
412
- if not allow_extra:
413
- raise StrMappingToFieldMappingError(
414
- cls=cls, key=key, head=head, case_sensitive=case_sensitive
415
- ) from None
416
- return {field: mapping[key] for key, field in keys_to_fields.items()}
417
-
418
-
419
- @dataclass(kw_only=True, slots=True)
420
- class StrMappingToFieldMappingError(Exception):
421
- cls: type[Dataclass]
422
- key: str
423
- head: bool = False
424
- case_sensitive: bool = False
425
-
426
- @override
427
- def __str__(self) -> str:
428
- head = f"Dataclass {get_class_name(self.cls)!r} does not contain"
429
- match self.head, self.case_sensitive:
430
- case False, True:
431
- tail = f"a field {self.key!r}"
432
- case False, False:
433
- tail = f"a field {self.key!r} (modulo case)"
434
- case True, True:
435
- tail = f"any field starting with {self.key!r}"
436
- case True, False:
437
- tail = f"any field starting with {self.key!r} (modulo case)"
438
- case _ as never:
439
- assert_never(never)
440
- return f"{head} {tail}"
441
-
442
-
443
- ##
435
+ joined_items = (
436
+ join_strs(item, separator=pair_separator) for item in mapping.items()
437
+ )
438
+ return join_strs(joined_items, separator=list_separator)
444
439
 
445
440
 
446
- def text_to_dataclass(
441
+ def parse_dataclass(
447
442
  text_or_mapping: str | StrStrMapping,
448
443
  cls: type[TDataclass],
449
444
  /,
450
445
  *,
446
+ list_separator: str = LIST_SEPARATOR,
447
+ pair_separator: str = PAIR_SEPARATOR,
451
448
  globalns: StrMapping | None = None,
452
449
  localns: StrMapping | None = None,
453
450
  warn_name_errors: bool = False,
454
451
  head: bool = False,
455
452
  case_sensitive: bool = False,
456
453
  allow_extra_keys: bool = False,
457
- extra_parsers: ParseTextExtra | None = None,
454
+ extra_parsers: ParseObjectExtra | None = None,
458
455
  ) -> TDataclass:
459
456
  """Construct a dataclass from a string or a mapping or strings."""
460
457
  match text_or_mapping:
461
458
  case str() as text:
462
- keys_to_serializes = _text_to_dataclass_split_text(text, cls)
459
+ keys_to_serializes = _parse_dataclass_split_key_value_pairs(
460
+ text, cls, list_separator=list_separator, pair_separator=pair_separator
461
+ )
463
462
  case Mapping() as keys_to_serializes:
464
463
  ...
465
464
  case _ as never:
@@ -481,7 +480,7 @@ def text_to_dataclass(
481
480
  allow_extra=allow_extra_keys,
482
481
  )
483
482
  field_names_to_values = {
484
- f.name: _text_to_dataclass_parse(
483
+ f.name: _parse_dataclass_parse_text(
485
484
  f, t, cls, head=head, case_sensitive=case_sensitive, extra=extra_parsers
486
485
  )
487
486
  for f, t in fields_to_serializes.items()
@@ -499,57 +498,82 @@ def text_to_dataclass(
499
498
  )
500
499
 
501
500
 
502
- def _text_to_dataclass_split_text(text: str, cls: type[TDataclass], /) -> StrStrMapping:
503
- pairs = (t for t in text.split(",") if t != "")
504
- return dict(_text_to_dataclass_split_key_value_pair(pair, cls) for pair in pairs)
505
-
506
-
507
- def _text_to_dataclass_split_key_value_pair(
508
- text: str, cls: type[Dataclass], /
509
- ) -> tuple[str, str]:
501
+ def _parse_dataclass_split_key_value_pairs(
502
+ text: str,
503
+ cls: type[TDataclass],
504
+ /,
505
+ *,
506
+ list_separator: str = LIST_SEPARATOR,
507
+ pair_separator: str = PAIR_SEPARATOR,
508
+ ) -> StrStrMapping:
510
509
  try:
511
- key, value = text.split("=")
512
- except ValueError:
513
- raise _TextToDataClassSplitKeyValuePairError(cls=cls, text=text) from None
514
- return key, value
510
+ return split_key_value_pairs(
511
+ text,
512
+ list_separator=list_separator,
513
+ pair_separator=pair_separator,
514
+ mapping=True,
515
+ )
516
+ except _SplitKeyValuePairsSplitError as error:
517
+ raise _ParseDataClassSplitKeyValuePairsSplitError(
518
+ text=error.inner, cls=cls
519
+ ) from None
520
+ except _SplitKeyValuePairsDuplicateKeysError as error:
521
+ raise _ParseDataClassSplitKeyValuePairsDuplicateKeysError(
522
+ text=error.text, cls=cls, counts=error.counts
523
+ ) from None
515
524
 
516
525
 
517
- def _text_to_dataclass_parse(
526
+ def _parse_dataclass_parse_text(
518
527
  field: _YieldFieldsClass[Any],
519
528
  text: str,
520
529
  cls: type[Dataclass],
521
530
  /,
522
531
  *,
532
+ list_separator: str = LIST_SEPARATOR,
533
+ pair_separator: str = PAIR_SEPARATOR,
523
534
  head: bool = False,
524
535
  case_sensitive: bool = False,
525
- extra: ParseTextExtra | None = None,
536
+ extra: ParseObjectExtra | None = None,
526
537
  ) -> Any:
527
538
  try:
528
- return parse_text(
529
- field.type_, text, head=head, case_sensitive=case_sensitive, extra=extra
539
+ return parse_object(
540
+ field.type_,
541
+ text,
542
+ list_separator=list_separator,
543
+ pair_separator=pair_separator,
544
+ head=head,
545
+ case_sensitive=case_sensitive,
546
+ extra=extra,
530
547
  )
531
- except ParseTextError:
532
- raise _TextToDataClassParseValueError(cls=cls, field=field, text=text) from None
548
+ except ParseObjectError:
549
+ raise _ParseDataClassParseValueError(cls=cls, field=field, text=text) from None
533
550
 
534
551
 
535
552
  @dataclass(kw_only=True, slots=True)
536
- class TextToDataClassError(Exception, Generic[TDataclass]):
553
+ class ParseDataClassError(Exception, Generic[TDataclass]):
554
+ text: str
537
555
  cls: type[TDataclass]
538
556
 
539
557
 
540
558
  @dataclass(kw_only=True, slots=True)
541
- class _TextToDataClassSplitKeyValuePairError(TextToDataClassError):
542
- text: str
543
-
559
+ class _ParseDataClassSplitKeyValuePairsSplitError(ParseDataClassError):
544
560
  @override
545
561
  def __str__(self) -> str:
546
562
  return f"Unable to construct {get_class_name(self.cls)!r}; failed to split key-value pair {self.text!r}"
547
563
 
548
564
 
549
565
  @dataclass(kw_only=True, slots=True)
550
- class _TextToDataClassParseValueError(TextToDataClassError[TDataclass]):
566
+ class _ParseDataClassSplitKeyValuePairsDuplicateKeysError(ParseDataClassError):
567
+ counts: Mapping[str, int]
568
+
569
+ @override
570
+ def __str__(self) -> str:
571
+ return f"Unable to construct {get_class_name(self.cls)!r} since there are duplicate keys; got {self.counts!r}"
572
+
573
+
574
+ @dataclass(kw_only=True, slots=True)
575
+ class _ParseDataClassParseValueError(ParseDataClassError[TDataclass]):
551
576
  field: _YieldFieldsClass[Any]
552
- text: str
553
577
 
554
578
  @override
555
579
  def __str__(self) -> str:
@@ -559,6 +583,68 @@ class _TextToDataClassParseValueError(TextToDataClassError[TDataclass]):
559
583
  ##
560
584
 
561
585
 
586
+ def str_mapping_to_field_mapping(
587
+ cls: type[TDataclass],
588
+ mapping: Mapping[str, _T],
589
+ /,
590
+ *,
591
+ fields: Iterable[_YieldFieldsClass[Any]] | None = None,
592
+ globalns: StrMapping | None = None,
593
+ localns: StrMapping | None = None,
594
+ warn_name_errors: bool = False,
595
+ head: bool = False,
596
+ case_sensitive: bool = False,
597
+ allow_extra: bool = False,
598
+ ) -> Mapping[_YieldFieldsClass[Any], _T]:
599
+ """Convert a string-mapping into a field-mapping."""
600
+ keys_to_fields: Mapping[str, _YieldFieldsClass[Any]] = {}
601
+ for key in mapping:
602
+ try:
603
+ keys_to_fields[key] = one_field(
604
+ cls,
605
+ key,
606
+ fields=fields,
607
+ globalns=globalns,
608
+ localns=localns,
609
+ warn_name_errors=warn_name_errors,
610
+ head=head,
611
+ case_sensitive=case_sensitive,
612
+ )
613
+ except OneFieldEmptyError:
614
+ if not allow_extra:
615
+ raise StrMappingToFieldMappingError(
616
+ cls=cls, key=key, head=head, case_sensitive=case_sensitive
617
+ ) from None
618
+ return {field: mapping[key] for key, field in keys_to_fields.items()}
619
+
620
+
621
+ @dataclass(kw_only=True, slots=True)
622
+ class StrMappingToFieldMappingError(Exception):
623
+ cls: type[Dataclass]
624
+ key: str
625
+ head: bool = False
626
+ case_sensitive: bool = False
627
+
628
+ @override
629
+ def __str__(self) -> str:
630
+ head = f"Dataclass {get_class_name(self.cls)!r} does not contain"
631
+ match self.head, self.case_sensitive:
632
+ case False, True:
633
+ tail = f"a field {self.key!r}"
634
+ case False, False:
635
+ tail = f"a field {self.key!r} (modulo case)"
636
+ case True, True:
637
+ tail = f"any field starting with {self.key!r}"
638
+ case True, False:
639
+ tail = f"any field starting with {self.key!r} (modulo case)"
640
+ case _ as never:
641
+ assert_never(never)
642
+ return f"{head} {tail}"
643
+
644
+
645
+ ##
646
+
647
+
562
648
  @overload
563
649
  def yield_fields(
564
650
  obj: Dataclass,
@@ -726,15 +812,15 @@ __all__ = [
726
812
  "OneFieldEmptyError",
727
813
  "OneFieldError",
728
814
  "OneFieldNonUniqueError",
815
+ "ParseDataClassError",
729
816
  "StrMappingToFieldMappingError",
730
- "TextToDataClassError",
731
817
  "YieldFieldsError",
732
818
  "dataclass_repr",
733
819
  "dataclass_to_dict",
734
820
  "mapping_to_dataclass",
735
821
  "one_field",
822
+ "parse_dataclass",
736
823
  "replace_non_sentinel",
737
824
  "str_mapping_to_field_mapping",
738
- "text_to_dataclass",
739
825
  "yield_fields",
740
826
  ]
utilities/parse.py CHANGED
@@ -28,7 +28,7 @@ from utilities.text import (
28
28
  split_key_value_pairs,
29
29
  split_str,
30
30
  )
31
- from utilities.types import Duration, Number, ParseTextExtra
31
+ from utilities.types import Duration, Number, ParseObjectExtra
32
32
  from utilities.typing import (
33
33
  get_args,
34
34
  is_dict_type,
@@ -47,27 +47,33 @@ if TYPE_CHECKING:
47
47
  from collections.abc import Set as AbstractSet
48
48
 
49
49
 
50
- def parse_text(
50
+ LIST_SEPARATOR = ","
51
+ PAIR_SEPARATOR = "="
52
+
53
+
54
+ def parse_object(
51
55
  type_: Any,
52
56
  text: str,
53
57
  /,
54
58
  *,
55
- list_separator: str = ",",
56
- pair_separator: str = "=",
59
+ list_separator: str = LIST_SEPARATOR,
60
+ pair_separator: str = PAIR_SEPARATOR,
57
61
  head: bool = False,
58
62
  case_sensitive: bool = False,
59
- extra: ParseTextExtra | None = None,
63
+ extra: ParseObjectExtra | None = None,
60
64
  ) -> Any:
61
65
  """Parse text."""
62
66
  if type_ is None:
63
67
  try:
64
68
  return parse_none(text)
65
69
  except ParseNoneError:
66
- raise _ParseTextParseError(type_=type_, text=text) from None
70
+ raise _ParseObjectParseError(type_=type_, text=text) from None
67
71
  if isinstance(type_, type):
68
- return _parse_text_type(type_, text, case_sensitive=case_sensitive, extra=extra)
72
+ return _parse_object_type(
73
+ type_, text, case_sensitive=case_sensitive, extra=extra
74
+ )
69
75
  if is_dict_type(type_):
70
- return _parse_text_dict_type(
76
+ return _parse_object_dict_type(
71
77
  type_,
72
78
  text,
73
79
  list_separator=list_separator,
@@ -78,7 +84,7 @@ def parse_text(
78
84
  )
79
85
  if is_frozenset_type(type_):
80
86
  return frozenset(
81
- _parse_text_set_type(
87
+ _parse_object_set_type(
82
88
  type_,
83
89
  text,
84
90
  list_separator=list_separator,
@@ -89,7 +95,7 @@ def parse_text(
89
95
  )
90
96
  )
91
97
  if is_list_type(type_):
92
- return _parse_text_list_type(
98
+ return _parse_object_list_type(
93
99
  type_,
94
100
  text,
95
101
  list_separator=list_separator,
@@ -105,7 +111,7 @@ def parse_text(
105
111
  return parse_none(text)
106
112
  inner = one(arg for arg in get_args(type_) if arg is not NoneType)
107
113
  try:
108
- return parse_text(
114
+ return parse_object(
109
115
  inner,
110
116
  text,
111
117
  list_separator=list_separator,
@@ -114,10 +120,10 @@ def parse_text(
114
120
  case_sensitive=case_sensitive,
115
121
  extra=extra,
116
122
  )
117
- except _ParseTextParseError:
118
- raise _ParseTextParseError(type_=type_, text=text) from None
123
+ except _ParseObjectParseError:
124
+ raise _ParseObjectParseError(type_=type_, text=text) from None
119
125
  if is_set_type(type_):
120
- return _parse_text_set_type(
126
+ return _parse_object_set_type(
121
127
  type_,
122
128
  text,
123
129
  list_separator=list_separator,
@@ -127,7 +133,7 @@ def parse_text(
127
133
  extra=extra,
128
134
  )
129
135
  if is_tuple_type(type_):
130
- return _parse_text_tuple_type(
136
+ return _parse_object_tuple_type(
131
137
  type_,
132
138
  text,
133
139
  list_separator=list_separator,
@@ -137,116 +143,116 @@ def parse_text(
137
143
  extra=extra,
138
144
  )
139
145
  if is_union_type(type_):
140
- return _parse_text_union_type(type_, text, extra=extra)
141
- raise _ParseTextParseError(type_=type_, text=text) from None
146
+ return _parse_object_union_type(type_, text, extra=extra)
147
+ raise _ParseObjectParseError(type_=type_, text=text) from None
142
148
 
143
149
 
144
- def _parse_text_type(
150
+ def _parse_object_type(
145
151
  cls: type[Any],
146
152
  text: str,
147
153
  /,
148
154
  *,
149
155
  case_sensitive: bool = False,
150
- extra: ParseTextExtra | None = None,
156
+ extra: ParseObjectExtra | None = None,
151
157
  ) -> Any:
152
158
  """Parse text."""
153
159
  if issubclass(cls, NoneType):
154
160
  try:
155
161
  return parse_none(text)
156
162
  except ParseNoneError:
157
- raise _ParseTextParseError(type_=cls, text=text) from None
163
+ raise _ParseObjectParseError(type_=cls, text=text) from None
158
164
  if issubclass(cls, str):
159
165
  return text
160
166
  if issubclass(cls, bool):
161
167
  try:
162
168
  return parse_bool(text)
163
169
  except ParseBoolError:
164
- raise _ParseTextParseError(type_=cls, text=text) from None
170
+ raise _ParseObjectParseError(type_=cls, text=text) from None
165
171
  if is_subclass_int_not_bool(cls):
166
172
  try:
167
173
  return int(text)
168
174
  except ValueError:
169
- raise _ParseTextParseError(type_=cls, text=text) from None
175
+ raise _ParseObjectParseError(type_=cls, text=text) from None
170
176
  if issubclass(cls, float):
171
177
  try:
172
178
  return float(text)
173
179
  except ValueError:
174
- raise _ParseTextParseError(type_=cls, text=text) from None
180
+ raise _ParseObjectParseError(type_=cls, text=text) from None
175
181
  if issubclass(cls, Enum):
176
182
  try:
177
183
  return parse_enum(text, cls, case_sensitive=case_sensitive)
178
184
  except ParseEnumError:
179
- raise _ParseTextParseError(type_=cls, text=text) from None
185
+ raise _ParseObjectParseError(type_=cls, text=text) from None
180
186
  if issubclass(cls, Path):
181
187
  return Path(text).expanduser()
182
188
  if issubclass(cls, Sentinel):
183
189
  try:
184
190
  return parse_sentinel(text)
185
191
  except ParseSentinelError:
186
- raise _ParseTextParseError(type_=cls, text=text) from None
192
+ raise _ParseObjectParseError(type_=cls, text=text) from None
187
193
  if issubclass(cls, Version):
188
194
  try:
189
195
  return parse_version(text)
190
196
  except ParseVersionError:
191
- raise _ParseTextParseError(type_=cls, text=text) from None
197
+ raise _ParseObjectParseError(type_=cls, text=text) from None
192
198
  if is_subclass_date_not_datetime(cls):
193
199
  from utilities.whenever import ParseDateError, parse_date
194
200
 
195
201
  try:
196
202
  return parse_date(text)
197
203
  except ParseDateError:
198
- raise _ParseTextParseError(type_=cls, text=text) from None
204
+ raise _ParseObjectParseError(type_=cls, text=text) from None
199
205
  if issubclass(cls, dt.datetime):
200
206
  from utilities.whenever import ParseDateTimeError, parse_datetime
201
207
 
202
208
  try:
203
209
  return parse_datetime(text)
204
210
  except ParseDateTimeError:
205
- raise _ParseTextParseError(type_=cls, text=text) from None
211
+ raise _ParseObjectParseError(type_=cls, text=text) from None
206
212
  if issubclass(cls, dt.time):
207
213
  from utilities.whenever import ParseTimeError, parse_time
208
214
 
209
215
  try:
210
216
  return parse_time(text)
211
217
  except ParseTimeError:
212
- raise _ParseTextParseError(type_=cls, text=text) from None
218
+ raise _ParseObjectParseError(type_=cls, text=text) from None
213
219
  if issubclass(cls, dt.timedelta):
214
220
  from utilities.whenever import ParseTimedeltaError, parse_timedelta
215
221
 
216
222
  try:
217
223
  return parse_timedelta(text)
218
224
  except ParseTimedeltaError:
219
- raise _ParseTextParseError(type_=cls, text=text) from None
225
+ raise _ParseObjectParseError(type_=cls, text=text) from None
220
226
  if extra is not None:
221
227
  try:
222
228
  parser = one(p for c, p in extra.items() if issubclass(cls, c))
223
229
  except OneEmptyError:
224
230
  pass
225
231
  except OneNonUniqueError as error:
226
- raise _ParseTextExtraNonUniqueError(
232
+ raise _ParseObjectExtraNonUniqueError(
227
233
  type_=cls, text=text, first=error.first, second=error.second
228
234
  ) from None
229
235
  else:
230
236
  return parser(text)
231
- raise _ParseTextParseError(type_=cls, text=text) from None
237
+ raise _ParseObjectParseError(type_=cls, text=text) from None
232
238
 
233
239
 
234
- def _parse_text_dict_type(
240
+ def _parse_object_dict_type(
235
241
  type_: Any,
236
242
  text: str,
237
243
  /,
238
244
  *,
239
- list_separator: str = ",",
240
- pair_separator: str = "=",
245
+ list_separator: str = LIST_SEPARATOR,
246
+ pair_separator: str = PAIR_SEPARATOR,
241
247
  head: bool = False,
242
248
  case_sensitive: bool = False,
243
- extra: ParseTextExtra | None = None,
249
+ extra: ParseObjectExtra | None = None,
244
250
  ) -> dict[Any, Any]:
245
251
  key_type, value_type = get_args(type_)
246
252
  try:
247
253
  inner_text = extract_group(r"^{(.*)}$", text, flags=DOTALL)
248
254
  except ExtractGroupError:
249
- raise _ParseTextParseError(type_=type_, text=text) from None
255
+ raise _ParseObjectParseError(type_=type_, text=text) from None
250
256
  pairs = split_key_value_pairs(
251
257
  inner_text,
252
258
  list_separator=list_separator,
@@ -254,7 +260,7 @@ def _parse_text_dict_type(
254
260
  mapping=True,
255
261
  )
256
262
  keys = (
257
- parse_text(
263
+ parse_object(
258
264
  key_type,
259
265
  k,
260
266
  list_separator=list_separator,
@@ -266,7 +272,7 @@ def _parse_text_dict_type(
266
272
  for k in pairs
267
273
  )
268
274
  values = (
269
- parse_text(
275
+ parse_object(
270
276
  value_type,
271
277
  v,
272
278
  list_separator=list_separator,
@@ -279,30 +285,30 @@ def _parse_text_dict_type(
279
285
  )
280
286
  try:
281
287
  return dict(zip(keys, values, strict=True))
282
- except _ParseTextParseError:
283
- raise _ParseTextParseError(type_=type_, text=text) from None
288
+ except _ParseObjectParseError:
289
+ raise _ParseObjectParseError(type_=type_, text=text) from None
284
290
 
285
291
 
286
- def _parse_text_list_type(
292
+ def _parse_object_list_type(
287
293
  type_: Any,
288
294
  text: str,
289
295
  /,
290
296
  *,
291
- list_separator: str = ",",
292
- pair_separator: str = "=",
297
+ list_separator: str = LIST_SEPARATOR,
298
+ pair_separator: str = PAIR_SEPARATOR,
293
299
  head: bool = False,
294
300
  case_sensitive: bool = False,
295
- extra: ParseTextExtra | None = None,
301
+ extra: ParseObjectExtra | None = None,
296
302
  ) -> list[Any]:
297
303
  inner_type = one(get_args(type_))
298
304
  try:
299
305
  inner_text = extract_group(r"^\[(.*)\]$", text, flags=DOTALL)
300
306
  except ExtractGroupError:
301
- raise _ParseTextParseError(type_=type_, text=text) from None
307
+ raise _ParseObjectParseError(type_=type_, text=text) from None
302
308
  texts = split_str(inner_text, separator=list_separator)
303
309
  try:
304
310
  return [
305
- parse_text(
311
+ parse_object(
306
312
  inner_type,
307
313
  t,
308
314
  list_separator=list_separator,
@@ -313,30 +319,30 @@ def _parse_text_list_type(
313
319
  )
314
320
  for t in texts
315
321
  ]
316
- except _ParseTextParseError:
317
- raise _ParseTextParseError(type_=type_, text=text) from None
322
+ except _ParseObjectParseError:
323
+ raise _ParseObjectParseError(type_=type_, text=text) from None
318
324
 
319
325
 
320
- def _parse_text_set_type(
326
+ def _parse_object_set_type(
321
327
  type_: Any,
322
328
  text: str,
323
329
  /,
324
330
  *,
325
- list_separator: str = ",",
326
- pair_separator: str = "=",
331
+ list_separator: str = LIST_SEPARATOR,
332
+ pair_separator: str = PAIR_SEPARATOR,
327
333
  head: bool = False,
328
334
  case_sensitive: bool = False,
329
- extra: ParseTextExtra | None = None,
335
+ extra: ParseObjectExtra | None = None,
330
336
  ) -> set[Any]:
331
337
  inner_type = one(get_args(type_))
332
338
  try:
333
339
  inner_text = extract_group(r"^{(.*)}$", text, flags=DOTALL)
334
340
  except ExtractGroupError:
335
- raise _ParseTextParseError(type_=type_, text=text) from None
341
+ raise _ParseObjectParseError(type_=type_, text=text) from None
336
342
  texts = split_str(inner_text, separator=list_separator)
337
343
  try:
338
344
  return {
339
- parse_text(
345
+ parse_object(
340
346
  inner_type,
341
347
  t,
342
348
  list_separator=list_separator,
@@ -347,25 +353,25 @@ def _parse_text_set_type(
347
353
  )
348
354
  for t in texts
349
355
  }
350
- except _ParseTextParseError:
351
- raise _ParseTextParseError(type_=type_, text=text) from None
356
+ except _ParseObjectParseError:
357
+ raise _ParseObjectParseError(type_=type_, text=text) from None
352
358
 
353
359
 
354
- def _parse_text_union_type(
355
- type_: Any, text: str, /, *, extra: ParseTextExtra | None = None
360
+ def _parse_object_union_type(
361
+ type_: Any, text: str, /, *, extra: ParseObjectExtra | None = None
356
362
  ) -> Any:
357
363
  if type_ is Number:
358
364
  try:
359
365
  return parse_number(text)
360
366
  except ParseNumberError:
361
- raise _ParseTextParseError(type_=type_, text=text) from None
367
+ raise _ParseObjectParseError(type_=type_, text=text) from None
362
368
  if type_ is Duration:
363
369
  from utilities.whenever import ParseDurationError, parse_duration
364
370
 
365
371
  try:
366
372
  return parse_duration(text)
367
373
  except ParseDurationError:
368
- raise _ParseTextParseError(type_=type_, text=text) from None
374
+ raise _ParseObjectParseError(type_=type_, text=text) from None
369
375
  if extra is not None:
370
376
  try:
371
377
  parser = one(p for c, p in extra.items() if c is type_)
@@ -373,31 +379,31 @@ def _parse_text_union_type(
373
379
  pass
374
380
  else:
375
381
  return parser(text)
376
- raise _ParseTextParseError(type_=type_, text=text) from None
382
+ raise _ParseObjectParseError(type_=type_, text=text) from None
377
383
 
378
384
 
379
- def _parse_text_tuple_type(
385
+ def _parse_object_tuple_type(
380
386
  type_: Any,
381
387
  text: str,
382
388
  /,
383
389
  *,
384
- list_separator: str = ",",
385
- pair_separator: str = "=",
390
+ list_separator: str = LIST_SEPARATOR,
391
+ pair_separator: str = PAIR_SEPARATOR,
386
392
  head: bool = False,
387
393
  case_sensitive: bool = False,
388
- extra: ParseTextExtra | None = None,
394
+ extra: ParseObjectExtra | None = None,
389
395
  ) -> tuple[Any, ...]:
390
396
  args = get_args(type_)
391
397
  try:
392
398
  inner = extract_group(r"^\((.*)\)$", text, flags=DOTALL)
393
399
  except ExtractGroupError:
394
- raise _ParseTextParseError(type_=type_, text=text) from None
395
- texts = inner.split(",")
400
+ raise _ParseObjectParseError(type_=type_, text=text) from None
401
+ texts = inner.split(list_separator)
396
402
  if len(args) != len(texts):
397
- raise _ParseTextParseError(type_=type_, text=text)
403
+ raise _ParseObjectParseError(type_=type_, text=text)
398
404
  try:
399
405
  return tuple(
400
- parse_text(
406
+ parse_object(
401
407
  arg,
402
408
  text,
403
409
  list_separator=list_separator,
@@ -408,25 +414,25 @@ def _parse_text_tuple_type(
408
414
  )
409
415
  for arg, text in zip(args, texts, strict=True)
410
416
  )
411
- except _ParseTextParseError:
412
- raise _ParseTextParseError(type_=type_, text=text) from None
417
+ except _ParseObjectParseError:
418
+ raise _ParseObjectParseError(type_=type_, text=text) from None
413
419
 
414
420
 
415
421
  @dataclass
416
- class ParseTextError(Exception):
422
+ class ParseObjectError(Exception):
417
423
  type_: Any
418
424
  text: str
419
425
 
420
426
 
421
427
  @dataclass
422
- class _ParseTextParseError(ParseTextError):
428
+ class _ParseObjectParseError(ParseObjectError):
423
429
  @override
424
430
  def __str__(self) -> str:
425
431
  return f"Unable to parse {self.type_!r}; got {self.text!r}"
426
432
 
427
433
 
428
434
  @dataclass
429
- class _ParseTextExtraNonUniqueError(ParseTextError):
435
+ class _ParseObjectExtraNonUniqueError(ParseObjectError):
430
436
  first: type[Any]
431
437
  second: type[Any]
432
438
 
@@ -438,8 +444,12 @@ class _ParseTextExtraNonUniqueError(ParseTextError):
438
444
  ##
439
445
 
440
446
 
441
- def to_text(
442
- obj: Any, /, *, list_separator: str = ",", pair_separator: str = "="
447
+ def serialize_object(
448
+ obj: Any,
449
+ /,
450
+ *,
451
+ list_separator: str = LIST_SEPARATOR,
452
+ pair_separator: str = PAIR_SEPARATOR,
443
453
  ) -> str:
444
454
  """Convert an object to text."""
445
455
  if (obj is None) or isinstance(
@@ -465,33 +475,41 @@ def to_text(
465
475
  if isinstance(obj, Enum):
466
476
  return obj.name
467
477
  if isinstance(obj, dict):
468
- return _to_text_dict(
478
+ return _serialize_object_dict(
469
479
  obj, list_separator=list_separator, pair_separator=pair_separator
470
480
  )
471
481
  if isinstance(obj, list):
472
- return _to_text_list(
482
+ return _serialize_object_list(
473
483
  obj, list_separator=list_separator, pair_separator=pair_separator
474
484
  )
475
485
  if isinstance(obj, tuple):
476
- return _to_text_tuple(
486
+ return _serialize_object_tuple(
477
487
  obj, list_separator=list_separator, pair_separator=pair_separator
478
488
  )
479
489
  if isinstance(obj, set | frozenset):
480
- return _to_text_set(
490
+ return _serialize_object_set(
481
491
  obj, list_separator=list_separator, pair_separator=pair_separator
482
492
  )
483
493
  raise NotImplementedError(obj)
484
494
 
485
495
 
486
- def _to_text_dict(
487
- obj: Mapping[Any, Any], /, *, list_separator: str = ",", pair_separator: str = "="
496
+ def _serialize_object_dict(
497
+ obj: Mapping[Any, Any],
498
+ /,
499
+ *,
500
+ list_separator: str = LIST_SEPARATOR,
501
+ pair_separator: str = PAIR_SEPARATOR,
488
502
  ) -> str:
489
503
  keys = (
490
- to_text(k, list_separator=list_separator, pair_separator=pair_separator)
504
+ serialize_object(
505
+ k, list_separator=list_separator, pair_separator=pair_separator
506
+ )
491
507
  for k in obj
492
508
  )
493
509
  values = (
494
- to_text(v, list_separator=list_separator, pair_separator=pair_separator)
510
+ serialize_object(
511
+ v, list_separator=list_separator, pair_separator=pair_separator
512
+ )
495
513
  for v in obj.values()
496
514
  )
497
515
  items = zip(keys, values, strict=True)
@@ -500,37 +518,55 @@ def _to_text_dict(
500
518
  return f"{{{joined}}}"
501
519
 
502
520
 
503
- def _to_text_list(
504
- obj: Sequence[Any], /, *, list_separator: str = ",", pair_separator: str = "="
521
+ def _serialize_object_list(
522
+ obj: Sequence[Any],
523
+ /,
524
+ *,
525
+ list_separator: str = LIST_SEPARATOR,
526
+ pair_separator: str = PAIR_SEPARATOR,
505
527
  ) -> str:
506
528
  items = (
507
- to_text(i, list_separator=list_separator, pair_separator=pair_separator)
529
+ serialize_object(
530
+ i, list_separator=list_separator, pair_separator=pair_separator
531
+ )
508
532
  for i in obj
509
533
  )
510
534
  joined = join_strs(items, separator=list_separator)
511
535
  return f"[{joined}]"
512
536
 
513
537
 
514
- def _to_text_set(
515
- obj: AbstractSet[Any], /, *, list_separator: str = ",", pair_separator: str = "="
538
+ def _serialize_object_set(
539
+ obj: AbstractSet[Any],
540
+ /,
541
+ *,
542
+ list_separator: str = LIST_SEPARATOR,
543
+ pair_separator: str = PAIR_SEPARATOR,
516
544
  ) -> str:
517
545
  items = (
518
- to_text(i, list_separator=list_separator, pair_separator=pair_separator)
546
+ serialize_object(
547
+ i, list_separator=list_separator, pair_separator=pair_separator
548
+ )
519
549
  for i in obj
520
550
  )
521
551
  joined = join_strs(items, sort=True, separator=list_separator)
522
552
  return f"{{{joined}}}"
523
553
 
524
554
 
525
- def _to_text_tuple(
526
- obj: tuple[Any, ...], /, *, list_separator: str = ",", pair_separator: str = "="
555
+ def _serialize_object_tuple(
556
+ obj: tuple[Any, ...],
557
+ /,
558
+ *,
559
+ list_separator: str = LIST_SEPARATOR,
560
+ pair_separator: str = PAIR_SEPARATOR,
527
561
  ) -> str:
528
562
  items = (
529
- to_text(i, list_separator=list_separator, pair_separator=pair_separator)
563
+ serialize_object(
564
+ i, list_separator=list_separator, pair_separator=pair_separator
565
+ )
530
566
  for i in obj
531
567
  )
532
568
  joined = join_strs(items, separator=list_separator)
533
569
  return f"({joined})"
534
570
 
535
571
 
536
- __all__ = ["parse_text"]
572
+ __all__ = ["LIST_SEPARATOR", "PAIR_SEPARATOR", "parse_object", "serialize_object"]
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, override
6
6
 
7
7
  from dotenv import dotenv_values
8
8
 
9
- from utilities.dataclasses import MappingToDataclassError, text_to_dataclass
9
+ from utilities.dataclasses import MappingToDataclassError, parse_dataclass
10
10
  from utilities.git import get_repo_root
11
11
  from utilities.iterables import MergeStrMappingsError, merge_str_mappings
12
12
  from utilities.pathlib import PWD
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
17
17
  from collections.abc import Set as AbstractSet
18
18
  from pathlib import Path
19
19
 
20
- from utilities.types import ParseTextExtra, PathLike, StrMapping, TDataclass
20
+ from utilities.types import ParseObjectExtra, PathLike, StrMapping, TDataclass
21
21
 
22
22
 
23
23
  def load_settings(
@@ -30,7 +30,7 @@ def load_settings(
30
30
  warn_name_errors: bool = False,
31
31
  head: bool = False,
32
32
  case_sensitive: bool = False,
33
- extra_parsers: ParseTextExtra | None = None,
33
+ extra_parsers: ParseObjectExtra | None = None,
34
34
  ) -> TDataclass:
35
35
  """Load a set of settings from the `.env` file."""
36
36
  path = get_repo_root(cwd=cwd).joinpath(".env")
@@ -50,7 +50,7 @@ def load_settings(
50
50
  ) from None
51
51
  values = {k: v for k, v in maybe_values.items() if v is not None}
52
52
  try:
53
- return text_to_dataclass(
53
+ return parse_dataclass(
54
54
  values,
55
55
  cls,
56
56
  globalns=globalns,
utilities/types.py CHANGED
@@ -231,7 +231,7 @@ class SupportsRound(Protocol[_T_co]):
231
231
 
232
232
 
233
233
  # parse
234
- type ParseTextExtra = Mapping[Any, Callable[[str], Any]]
234
+ type ParseObjectExtra = Mapping[Any, Callable[[str], Any]]
235
235
 
236
236
 
237
237
  # pathlib
@@ -283,7 +283,7 @@ __all__ = [
283
283
  "OpenMode",
284
284
  "OptExcInfo",
285
285
  "Parallelism",
286
- "ParseTextExtra",
286
+ "ParseObjectExtra",
287
287
  "PathLike",
288
288
  "PathLikeOrCallable",
289
289
  "RoundMode",