dycw-utilities 0.109.28__py3-none-any.whl → 0.110.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dycw-utilities
3
- Version: 0.109.28
3
+ Version: 0.110.0
4
4
  Author-email: Derek Wan <d.wan@icloud.com>
5
5
  License-File: LICENSE
6
6
  Requires-Python: >=3.12
@@ -1,4 +1,4 @@
1
- utilities/__init__.py,sha256=thnWMb1VwaXUYJiwR7uCyVcy6rTge0ODibDsllPsk3Q,61
1
+ utilities/__init__.py,sha256=H5Ma2FOh3z_dBcZTVaSY2VI14JUFHWspBQs7dYAHnjc,60
2
2
  utilities/altair.py,sha256=Gpja-flOo-Db0PIPJLJsgzAlXWoKUjPU1qY-DQ829ek,9156
3
3
  utilities/astor.py,sha256=xuDUkjq0-b6fhtwjhbnebzbqQZAjMSHR1IIS5uOodVg,777
4
4
  utilities/asyncio.py,sha256=41oQUurWMvadFK5gFnaG21hMM0Vmfn2WS6OpC0R9mas,14757
@@ -11,7 +11,7 @@ utilities/contextlib.py,sha256=OOIIEa5lXKGzFAnauaul40nlQnQko6Na4ryiMJcHkIg,478
11
11
  utilities/contextvars.py,sha256=RsSGGrbQqqZ67rOydnM7WWIsM2lIE31UHJLejnHJPWY,505
12
12
  utilities/cryptography.py,sha256=HyOewI20cl3uRXsKivhIaeLVDInQdzgXZGaly7hS5dE,771
13
13
  utilities/cvxpy.py,sha256=Rv1-fD-XYerosCavRF8Pohop2DBkU3AlFaGTfD8AEAA,13776
14
- utilities/dataclasses.py,sha256=8-38WHrScAvElBNvFxBnhJwab1XXkSXpDOiNPOAvh2Q,23295
14
+ utilities/dataclasses.py,sha256=vkrcV-qnMS4qGHJ2z3kmcugNH_Fk20SPXfttXb3H5Bs,25950
15
15
  utilities/datetime.py,sha256=GOs-MIEW_A49kzqa1yhIoeNeSqqPVgGO-h2AThtgTDk,37326
16
16
  utilities/enum.py,sha256=HoRwVCWzsnH0vpO9ZEcAAIZLMv0Sn2vJxxA4sYMQgDs,5793
17
17
  utilities/errors.py,sha256=BtSNP0JC3ik536ddPyTerLomCRJV9f6kdMe6POz0QHM,361
@@ -41,7 +41,7 @@ utilities/operator.py,sha256=0M2yZJ0PODH47ogFEnkGMBe_cfxwZR02T_92LZVZvHo,3715
41
41
  utilities/optuna.py,sha256=loyJGWTzljgdJaoLhP09PT8Jz6o_pwBOwehY33lHkhw,1923
42
42
  utilities/orjson.py,sha256=Wj5pzG_VdgoAy14a7Luhem-BgYrRtRFvvl_POiszRd0,36930
43
43
  utilities/os.py,sha256=D_FyyT-6TtqiN9KSS7c9g1fnUtgxmyMtzAjmYLkk46A,3587
44
- utilities/parse.py,sha256=D1rqqrULV1FkjnCZS6804Io26_AZbMrsEILf-pZGNzw,7192
44
+ utilities/parse.py,sha256=wxqh4YwBfQ7nm249-F_6uqiLo1js9_xji9AVvUxZ5nI,17091
45
45
  utilities/pathlib.py,sha256=31WPMXdLIyXgYOMMl_HOI2wlo66MGSE-cgeelk-Lias,1410
46
46
  utilities/period.py,sha256=ikHXsWtDLr553cfH6p9mMaiCnIAP69B7q84ckWV3HaA,10884
47
47
  utilities/pickle.py,sha256=Bhvd7cZl-zQKQDFjUerqGuSKlHvnW1K2QXeU5UZibtg,657
@@ -55,7 +55,7 @@ utilities/pyinstrument.py,sha256=ROq2txPwbe2ZUuYJ2IDNbfT97lu2ca0v5_C_yn6sSlM,800
55
55
  utilities/pyrsistent.py,sha256=TLJfiiKO4cKNU_pCoM3zDqmSM421qpuoaeaBNnyC_Ac,2489
56
56
  utilities/pytest.py,sha256=85QUax4g2VBBAqAHtM9wekcSLB7_9O8AKFTaCshztL8,7989
57
57
  utilities/pytest_regressions.py,sha256=-SVT9647Dg6-JcdsiaDKXe3NdOmmrvGevLKWwGjxq3c,5088
58
- utilities/python_dotenv.py,sha256=JDIGuaGIiVkOcOBDpA7OZZM_0vxrkZyrrfqOssB6cqE,3170
58
+ utilities/python_dotenv.py,sha256=6viKAI7zx9YQU2ewITaOcad7wMwkrf6FbYpBmCl4vCA,3170
59
59
  utilities/random.py,sha256=lYdjgxB7GCfU_fwFVl5U-BIM_HV3q6_urL9byjrwDM8,4157
60
60
  utilities/re.py,sha256=5J4d8VwIPFVrX2Eb8zfoxImDv7IwiN_U7mJ07wR2Wvs,3958
61
61
  utilities/redis.py,sha256=CsDQqc9V6ASLzLQwtbQXZQEndyG9pJiCOhPlPeszt7Y,21203
@@ -77,7 +77,7 @@ utilities/text.py,sha256=Ax_n-nY80_onWxag9M0PkmbaAqwyut9AEA9tEMd5lBs,6694
77
77
  utilities/threading.py,sha256=GvBOp4CyhHfN90wGXZuA2VKe9fGzMaEa7oCl4f3nnPU,1009
78
78
  utilities/timer.py,sha256=Rkc49KSpHuC8s7vUxGO9DU55U9I6yDKnchsQqrUCVBs,4075
79
79
  utilities/traceback.py,sha256=KwHPLdEbdj0fFhXo8MBfxcvem8A-VXYDwFMNJ6f0cTM,27328
80
- utilities/types.py,sha256=z1hbBOT5TkzTn2JOvSldw6DScxi3erG9qpJ3xci66GI,17963
80
+ utilities/types.py,sha256=Hi9aKaxN3G9zFVlLjx6U9xd_HMGq-eqHLxmG1cSdVpg,17967
81
81
  utilities/typing.py,sha256=gLg4EbE1FX52fJ1d3ji4i08qolwu9qgWt8w_w_Y5DTk,5512
82
82
  utilities/tzdata.py,sha256=2ZsPmhTVM9Ptrxb4QrWKtKOB9RiH8IOO-A1u7ULdVbg,176
83
83
  utilities/tzlocal.py,sha256=42BCquGF54oIqIKe5RGziP4K8Nbm3Ey7uqcNn6m5ge8,534
@@ -87,7 +87,7 @@ utilities/warnings.py,sha256=yUgjnmkCRf6QhdyAXzl7u0qQFejhQG3PrjoSwxpbHrs,1819
87
87
  utilities/whenever.py,sha256=TjoTAJ1R27-rKXiXzdE4GzPidmYqm0W58XydDXp-QZM,17786
88
88
  utilities/zipfile.py,sha256=24lQc9ATcJxHXBPc_tBDiJk48pWyRrlxO2fIsFxU0A8,699
89
89
  utilities/zoneinfo.py,sha256=-DQz5a0Ikw9jfSZtL0BEQkXOMC9yGn_xiJYNCLMiqEc,1989
90
- dycw_utilities-0.109.28.dist-info/METADATA,sha256=_wZaTQxzNcdICCzqa8Eg769N4iPa9Ho9XWHP-LLUMgw,13005
91
- dycw_utilities-0.109.28.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
92
- dycw_utilities-0.109.28.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
93
- dycw_utilities-0.109.28.dist-info/RECORD,,
90
+ dycw_utilities-0.110.0.dist-info/METADATA,sha256=z9nJyYO-djaRWCX7iT7S0vdy40gXfUedyu_jhw3O-Cs,13004
91
+ dycw_utilities-0.110.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
92
+ dycw_utilities-0.110.0.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
93
+ dycw_utilities-0.110.0.dist-info/RECORD,,
utilities/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from __future__ import annotations
2
2
 
3
- __version__ = "0.109.28"
3
+ __version__ = "0.110.0"
utilities/dataclasses.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from collections.abc import Mapping
4
- from collections.abc import Set as AbstractSet
5
4
  from dataclasses import MISSING, dataclass, field, fields, replace
6
5
  from typing import (
7
6
  TYPE_CHECKING,
@@ -22,13 +21,26 @@ from utilities.functions import (
22
21
  )
23
22
  from utilities.iterables import OneStrEmptyError, OneStrNonUniqueError, one_str
24
23
  from utilities.operator import is_equal
25
- from utilities.parse import ParseTextError, parse_text
24
+ from utilities.parse import (
25
+ LIST_SEPARATOR,
26
+ PAIR_SEPARATOR,
27
+ ParseObjectError,
28
+ parse_object,
29
+ serialize_object,
30
+ )
26
31
  from utilities.sentinel import Sentinel, sentinel
27
- from utilities.types import ParseTextExtra, StrStrMapping, TDataclass
32
+ from utilities.text import (
33
+ _SplitKeyValuePairsDuplicateKeysError,
34
+ _SplitKeyValuePairsSplitError,
35
+ join_strs,
36
+ split_key_value_pairs,
37
+ )
38
+ from utilities.types import ParseObjectExtra, StrStrMapping, TDataclass
28
39
  from utilities.typing import get_type_hints
29
40
 
30
41
  if TYPE_CHECKING:
31
42
  from collections.abc import Callable, Iterable, Iterator
43
+ from collections.abc import Set as AbstractSet
32
44
 
33
45
  from utilities.types import Dataclass, StrMapping
34
46
 
@@ -44,11 +56,11 @@ def dataclass_repr(
44
56
  obj: Dataclass,
45
57
  /,
46
58
  *,
47
- include: Iterable[str] | None = None,
48
- exclude: Iterable[str] | None = None,
49
59
  globalns: StrMapping | None = None,
50
60
  localns: StrMapping | None = None,
51
61
  warn_name_errors: bool = False,
62
+ include: Iterable[str] | None = None,
63
+ exclude: Iterable[str] | None = None,
52
64
  rel_tol: float | None = None,
53
65
  abs_tol: float | None = None,
54
66
  extra: Mapping[type[_T], Callable[[_T, _T], bool]] | None = None,
@@ -75,11 +87,11 @@ def dataclass_repr(
75
87
  if is_dataclass_instance(fld.value):
76
88
  repr_ = dataclass_repr(
77
89
  fld.value,
78
- include=include,
79
- exclude=exclude,
80
90
  globalns=globalns,
81
91
  localns=localns,
82
92
  warn_name_errors=warn_name_errors,
93
+ include=include,
94
+ exclude=exclude,
83
95
  rel_tol=rel_tol,
84
96
  abs_tol=abs_tol,
85
97
  extra=extra,
@@ -90,11 +102,11 @@ def dataclass_repr(
90
102
  repr_ = [
91
103
  dataclass_repr(
92
104
  v,
93
- include=include,
94
- exclude=exclude,
95
105
  globalns=globalns,
96
106
  localns=localns,
97
107
  warn_name_errors=warn_name_errors,
108
+ include=include,
109
+ exclude=exclude,
98
110
  rel_tol=rel_tol,
99
111
  abs_tol=abs_tol,
100
112
  extra=extra,
@@ -123,11 +135,11 @@ def dataclass_to_dict(
123
135
  obj: Dataclass,
124
136
  /,
125
137
  *,
126
- include: Iterable[str] | None = None,
127
- exclude: Iterable[str] | None = None,
128
138
  globalns: StrMapping | None = None,
129
139
  localns: StrMapping | None = None,
130
140
  warn_name_errors: bool = False,
141
+ include: Iterable[str] | None = None,
142
+ exclude: Iterable[str] | None = None,
131
143
  rel_tol: float | None = None,
132
144
  abs_tol: float | None = None,
133
145
  extra: Mapping[type[_T], Callable[[_T, _T], bool]] | None = None,
@@ -155,6 +167,8 @@ def dataclass_to_dict(
155
167
  globalns=globalns,
156
168
  localns=localns,
157
169
  warn_name_errors=warn_name_errors,
170
+ include=include,
171
+ exclude=exclude,
158
172
  rel_tol=rel_tol,
159
173
  abs_tol=abs_tol,
160
174
  extra=extra,
@@ -169,6 +183,8 @@ def dataclass_to_dict(
169
183
  globalns=globalns,
170
184
  localns=localns,
171
185
  warn_name_errors=warn_name_errors,
186
+ include=include,
187
+ exclude=exclude,
172
188
  rel_tol=rel_tol,
173
189
  abs_tol=abs_tol,
174
190
  extra=extra,
@@ -381,85 +397,68 @@ def replace_non_sentinel(
381
397
  ##
382
398
 
383
399
 
384
- def str_mapping_to_field_mapping(
385
- cls: type[TDataclass],
386
- mapping: Mapping[str, _T],
400
+ def serialize_dataclass(
401
+ obj: Dataclass,
387
402
  /,
388
403
  *,
389
- fields: Iterable[_YieldFieldsClass[Any]] | None = None,
390
404
  globalns: StrMapping | None = None,
391
405
  localns: StrMapping | None = None,
392
406
  warn_name_errors: bool = False,
393
- head: bool = False,
394
- case_sensitive: bool = False,
395
- allow_extra: bool = False,
396
- ) -> Mapping[_YieldFieldsClass[Any], _T]:
397
- """Convert a string-mapping into a field-mapping."""
398
- keys_to_fields: Mapping[str, _YieldFieldsClass[Any]] = {}
399
- for key in mapping:
400
- try:
401
- keys_to_fields[key] = one_field(
402
- cls,
403
- key,
404
- fields=fields,
405
- globalns=globalns,
406
- localns=localns,
407
- warn_name_errors=warn_name_errors,
408
- head=head,
409
- case_sensitive=case_sensitive,
407
+ include: Iterable[str] | None = None,
408
+ exclude: Iterable[str] | None = None,
409
+ rel_tol: float | None = None,
410
+ abs_tol: float | None = None,
411
+ extra: Mapping[type[_U], Callable[[_U, _U], bool]] | None = None,
412
+ defaults: bool = False,
413
+ list_separator: str = LIST_SEPARATOR,
414
+ pair_separator: str = PAIR_SEPARATOR,
415
+ ) -> str:
416
+ """Serialize a Dataclass."""
417
+ mapping: StrStrMapping = {}
418
+ fields = list(
419
+ yield_fields(
420
+ obj, globalns=globalns, localns=localns, warn_name_errors=warn_name_errors
421
+ )
422
+ )
423
+ for fld in fields:
424
+ if fld.keep(
425
+ include=include,
426
+ exclude=exclude,
427
+ rel_tol=rel_tol,
428
+ abs_tol=abs_tol,
429
+ extra=extra,
430
+ defaults=defaults,
431
+ ):
432
+ mapping[fld.name] = serialize_object(
433
+ fld.value, list_separator=list_separator, pair_separator=pair_separator
410
434
  )
411
- except OneFieldEmptyError:
412
- if not allow_extra:
413
- raise StrMappingToFieldMappingError(
414
- cls=cls, key=key, head=head, case_sensitive=case_sensitive
415
- ) from None
416
- return {field: mapping[key] for key, field in keys_to_fields.items()}
417
-
418
-
419
- @dataclass(kw_only=True, slots=True)
420
- class StrMappingToFieldMappingError(Exception):
421
- cls: type[Dataclass]
422
- key: str
423
- head: bool = False
424
- case_sensitive: bool = False
425
-
426
- @override
427
- def __str__(self) -> str:
428
- head = f"Dataclass {get_class_name(self.cls)!r} does not contain"
429
- match self.head, self.case_sensitive:
430
- case False, True:
431
- tail = f"a field {self.key!r}"
432
- case False, False:
433
- tail = f"a field {self.key!r} (modulo case)"
434
- case True, True:
435
- tail = f"any field starting with {self.key!r}"
436
- case True, False:
437
- tail = f"any field starting with {self.key!r} (modulo case)"
438
- case _ as never:
439
- assert_never(never)
440
- return f"{head} {tail}"
441
-
442
-
443
- ##
435
+ joined_items = (
436
+ join_strs(item, separator=pair_separator) for item in mapping.items()
437
+ )
438
+ return join_strs(joined_items, separator=list_separator)
444
439
 
445
440
 
446
- def text_to_dataclass(
447
- text_or_mapping: str | StrStrMapping,
441
+ def parse_dataclass(
442
+ text_or_mapping: str | Mapping[str, str],
448
443
  cls: type[TDataclass],
449
444
  /,
450
445
  *,
446
+ list_separator: str = LIST_SEPARATOR,
447
+ pair_separator: str = PAIR_SEPARATOR,
451
448
  globalns: StrMapping | None = None,
452
449
  localns: StrMapping | None = None,
453
450
  warn_name_errors: bool = False,
454
451
  head: bool = False,
455
452
  case_sensitive: bool = False,
456
453
  allow_extra_keys: bool = False,
457
- extra_parsers: ParseTextExtra | None = None,
454
+ extra_parsers: ParseObjectExtra | None = None,
458
455
  ) -> TDataclass:
459
456
  """Construct a dataclass from a string or a mapping or strings."""
460
457
  match text_or_mapping:
461
458
  case str() as text:
462
- keys_to_serializes = _text_to_dataclass_split_text(text, cls)
459
+ keys_to_serializes = _parse_dataclass_split_key_value_pairs(
460
+ text, cls, list_separator=list_separator, pair_separator=pair_separator
461
+ )
463
462
  case Mapping() as keys_to_serializes:
464
463
  ...
465
464
  case _ as never:
@@ -481,7 +480,7 @@ def text_to_dataclass(
481
480
  allow_extra=allow_extra_keys,
482
481
  )
483
482
  field_names_to_values = {
484
- f.name: _text_to_dataclass_parse(
483
+ f.name: _parse_dataclass_parse_text(
485
484
  f, t, cls, head=head, case_sensitive=case_sensitive, extra=extra_parsers
486
485
  )
487
486
  for f, t in fields_to_serializes.items()
@@ -499,57 +498,82 @@ def text_to_dataclass(
499
498
  )
500
499
 
501
500
 
502
- def _text_to_dataclass_split_text(text: str, cls: type[TDataclass], /) -> StrStrMapping:
503
- pairs = (t for t in text.split(",") if t != "")
504
- return dict(_text_to_dataclass_split_key_value_pair(pair, cls) for pair in pairs)
505
-
506
-
507
- def _text_to_dataclass_split_key_value_pair(
508
- text: str, cls: type[Dataclass], /
509
- ) -> tuple[str, str]:
501
+ def _parse_dataclass_split_key_value_pairs(
502
+ text: str,
503
+ cls: type[TDataclass],
504
+ /,
505
+ *,
506
+ list_separator: str = LIST_SEPARATOR,
507
+ pair_separator: str = PAIR_SEPARATOR,
508
+ ) -> Mapping[str, str]:
510
509
  try:
511
- key, value = text.split("=")
512
- except ValueError:
513
- raise _TextToDataClassSplitKeyValuePairError(cls=cls, text=text) from None
514
- return key, value
510
+ return split_key_value_pairs(
511
+ text,
512
+ list_separator=list_separator,
513
+ pair_separator=pair_separator,
514
+ mapping=True,
515
+ )
516
+ except _SplitKeyValuePairsSplitError as error:
517
+ raise _ParseDataClassSplitKeyValuePairsSplitError(
518
+ text=error.inner, cls=cls
519
+ ) from None
520
+ except _SplitKeyValuePairsDuplicateKeysError as error:
521
+ raise _ParseDataClassSplitKeyValuePairsDuplicateKeysError(
522
+ text=error.text, cls=cls, counts=error.counts
523
+ ) from None
515
524
 
516
525
 
517
- def _text_to_dataclass_parse(
526
+ def _parse_dataclass_parse_text(
518
527
  field: _YieldFieldsClass[Any],
519
528
  text: str,
520
529
  cls: type[Dataclass],
521
530
  /,
522
531
  *,
532
+ list_separator: str = LIST_SEPARATOR,
533
+ pair_separator: str = PAIR_SEPARATOR,
523
534
  head: bool = False,
524
535
  case_sensitive: bool = False,
525
- extra: ParseTextExtra | None = None,
536
+ extra: ParseObjectExtra | None = None,
526
537
  ) -> Any:
527
538
  try:
528
- return parse_text(
529
- field.type_, text, head=head, case_sensitive=case_sensitive, extra=extra
539
+ return parse_object(
540
+ field.type_,
541
+ text,
542
+ list_separator=list_separator,
543
+ pair_separator=pair_separator,
544
+ head=head,
545
+ case_sensitive=case_sensitive,
546
+ extra=extra,
530
547
  )
531
- except ParseTextError:
532
- raise _TextToDataClassParseValueError(cls=cls, field=field, text=text) from None
548
+ except ParseObjectError:
549
+ raise _ParseDataClassParseValueError(cls=cls, field=field, text=text) from None
533
550
 
534
551
 
535
552
  @dataclass(kw_only=True, slots=True)
536
- class TextToDataClassError(Exception, Generic[TDataclass]):
553
+ class ParseDataClassError(Exception, Generic[TDataclass]):
554
+ text: str
537
555
  cls: type[TDataclass]
538
556
 
539
557
 
540
558
  @dataclass(kw_only=True, slots=True)
541
- class _TextToDataClassSplitKeyValuePairError(TextToDataClassError):
542
- text: str
543
-
559
+ class _ParseDataClassSplitKeyValuePairsSplitError(ParseDataClassError):
544
560
  @override
545
561
  def __str__(self) -> str:
546
562
  return f"Unable to construct {get_class_name(self.cls)!r}; failed to split key-value pair {self.text!r}"
547
563
 
548
564
 
549
565
  @dataclass(kw_only=True, slots=True)
550
- class _TextToDataClassParseValueError(TextToDataClassError[TDataclass]):
566
+ class _ParseDataClassSplitKeyValuePairsDuplicateKeysError(ParseDataClassError):
567
+ counts: Mapping[str, int]
568
+
569
+ @override
570
+ def __str__(self) -> str:
571
+ return f"Unable to construct {get_class_name(self.cls)!r} since there are duplicate keys; got {self.counts!r}"
572
+
573
+
574
+ @dataclass(kw_only=True, slots=True)
575
+ class _ParseDataClassParseValueError(ParseDataClassError[TDataclass]):
551
576
  field: _YieldFieldsClass[Any]
552
- text: str
553
577
 
554
578
  @override
555
579
  def __str__(self) -> str:
@@ -559,6 +583,68 @@ class _TextToDataClassParseValueError(TextToDataClassError[TDataclass]):
559
583
  ##
560
584
 
561
585
 
586
+ def str_mapping_to_field_mapping(
587
+ cls: type[TDataclass],
588
+ mapping: Mapping[str, _T],
589
+ /,
590
+ *,
591
+ fields: Iterable[_YieldFieldsClass[Any]] | None = None,
592
+ globalns: StrMapping | None = None,
593
+ localns: StrMapping | None = None,
594
+ warn_name_errors: bool = False,
595
+ head: bool = False,
596
+ case_sensitive: bool = False,
597
+ allow_extra: bool = False,
598
+ ) -> Mapping[_YieldFieldsClass[Any], _T]:
599
+ """Convert a string-mapping into a field-mapping."""
600
+ keys_to_fields: Mapping[str, _YieldFieldsClass[Any]] = {}
601
+ for key in mapping:
602
+ try:
603
+ keys_to_fields[key] = one_field(
604
+ cls,
605
+ key,
606
+ fields=fields,
607
+ globalns=globalns,
608
+ localns=localns,
609
+ warn_name_errors=warn_name_errors,
610
+ head=head,
611
+ case_sensitive=case_sensitive,
612
+ )
613
+ except OneFieldEmptyError:
614
+ if not allow_extra:
615
+ raise StrMappingToFieldMappingError(
616
+ cls=cls, key=key, head=head, case_sensitive=case_sensitive
617
+ ) from None
618
+ return {field: mapping[key] for key, field in keys_to_fields.items()}
619
+
620
+
621
+ @dataclass(kw_only=True, slots=True)
622
+ class StrMappingToFieldMappingError(Exception):
623
+ cls: type[Dataclass]
624
+ key: str
625
+ head: bool = False
626
+ case_sensitive: bool = False
627
+
628
+ @override
629
+ def __str__(self) -> str:
630
+ head = f"Dataclass {get_class_name(self.cls)!r} does not contain"
631
+ match self.head, self.case_sensitive:
632
+ case False, True:
633
+ tail = f"a field {self.key!r}"
634
+ case False, False:
635
+ tail = f"a field {self.key!r} (modulo case)"
636
+ case True, True:
637
+ tail = f"any field starting with {self.key!r}"
638
+ case True, False:
639
+ tail = f"any field starting with {self.key!r} (modulo case)"
640
+ case _ as never:
641
+ assert_never(never)
642
+ return f"{head} {tail}"
643
+
644
+
645
+ ##
646
+
647
+
562
648
  @overload
563
649
  def yield_fields(
564
650
  obj: Dataclass,
@@ -726,15 +812,15 @@ __all__ = [
726
812
  "OneFieldEmptyError",
727
813
  "OneFieldError",
728
814
  "OneFieldNonUniqueError",
815
+ "ParseDataClassError",
729
816
  "StrMappingToFieldMappingError",
730
- "TextToDataClassError",
731
817
  "YieldFieldsError",
732
818
  "dataclass_repr",
733
819
  "dataclass_to_dict",
734
820
  "mapping_to_dataclass",
735
821
  "one_field",
822
+ "parse_dataclass",
736
823
  "replace_non_sentinel",
737
824
  "str_mapping_to_field_mapping",
738
- "text_to_dataclass",
739
825
  "yield_fields",
740
826
  ]
utilities/parse.py CHANGED
@@ -7,206 +7,566 @@ from enum import Enum
7
7
  from pathlib import Path
8
8
  from re import DOTALL
9
9
  from types import NoneType
10
- from typing import Any, override
10
+ from typing import TYPE_CHECKING, Any, override
11
11
 
12
- from utilities.datetime import is_subclass_date_not_datetime
12
+ from utilities.datetime import (
13
+ is_instance_date_not_datetime,
14
+ is_subclass_date_not_datetime,
15
+ )
13
16
  from utilities.enum import ParseEnumError, parse_enum
14
17
  from utilities.functions import is_subclass_int_not_bool
15
18
  from utilities.iterables import OneEmptyError, OneNonUniqueError, one, one_str
16
19
  from utilities.math import ParseNumberError, parse_number
17
20
  from utilities.re import ExtractGroupError, extract_group
18
21
  from utilities.sentinel import ParseSentinelError, Sentinel, parse_sentinel
19
- from utilities.text import ParseBoolError, ParseNoneError, parse_bool, parse_none
20
- from utilities.types import Duration, Number, ParseTextExtra
22
+ from utilities.text import (
23
+ ParseBoolError,
24
+ ParseNoneError,
25
+ join_strs,
26
+ parse_bool,
27
+ parse_none,
28
+ split_key_value_pairs,
29
+ split_str,
30
+ )
31
+ from utilities.types import Duration, Number, ParseObjectExtra
21
32
  from utilities.typing import (
22
33
  get_args,
34
+ is_dict_type,
35
+ is_frozenset_type,
36
+ is_list_type,
23
37
  is_literal_type,
24
38
  is_optional_type,
39
+ is_set_type,
25
40
  is_tuple_type,
26
41
  is_union_type,
27
42
  )
28
43
  from utilities.version import ParseVersionError, Version, parse_version
29
44
 
45
+ if TYPE_CHECKING:
46
+ from collections.abc import Mapping, Sequence
47
+ from collections.abc import Set as AbstractSet
30
48
 
31
- def parse_text(
32
- obj: Any,
49
+
50
+ LIST_SEPARATOR = ","
51
+ PAIR_SEPARATOR = "="
52
+
53
+
54
+ def parse_object(
55
+ type_: Any,
33
56
  text: str,
34
57
  /,
35
58
  *,
59
+ list_separator: str = LIST_SEPARATOR,
60
+ pair_separator: str = PAIR_SEPARATOR,
36
61
  head: bool = False,
37
62
  case_sensitive: bool = False,
38
- extra: ParseTextExtra | None = None,
63
+ extra: ParseObjectExtra | None = None,
39
64
  ) -> Any:
40
65
  """Parse text."""
41
- if obj is None:
66
+ if type_ is None:
42
67
  try:
43
68
  return parse_none(text)
44
69
  except ParseNoneError:
45
- raise _ParseTextParseError(obj=obj, text=text) from None
46
- if isinstance(obj, type):
47
- return _parse_text_type(obj, text, case_sensitive=case_sensitive, extra=extra)
48
- if is_literal_type(obj):
49
- return one_str(get_args(obj), text, head=head, case_sensitive=case_sensitive)
50
- if is_optional_type(obj):
70
+ raise _ParseObjectParseError(type_=type_, text=text) from None
71
+ if isinstance(type_, type):
72
+ return _parse_object_type(
73
+ type_, text, case_sensitive=case_sensitive, extra=extra
74
+ )
75
+ if is_dict_type(type_):
76
+ return _parse_object_dict_type(
77
+ type_,
78
+ text,
79
+ list_separator=list_separator,
80
+ pair_separator=pair_separator,
81
+ head=head,
82
+ case_sensitive=case_sensitive,
83
+ extra=extra,
84
+ )
85
+ if is_frozenset_type(type_):
86
+ return frozenset(
87
+ _parse_object_set_type(
88
+ type_,
89
+ text,
90
+ list_separator=list_separator,
91
+ pair_separator=pair_separator,
92
+ head=head,
93
+ case_sensitive=case_sensitive,
94
+ extra=extra,
95
+ )
96
+ )
97
+ if is_list_type(type_):
98
+ return _parse_object_list_type(
99
+ type_,
100
+ text,
101
+ list_separator=list_separator,
102
+ pair_separator=pair_separator,
103
+ head=head,
104
+ case_sensitive=case_sensitive,
105
+ extra=extra,
106
+ )
107
+ if is_literal_type(type_):
108
+ return one_str(get_args(type_), text, head=head, case_sensitive=case_sensitive)
109
+ if is_optional_type(type_):
51
110
  with suppress(ParseNoneError):
52
111
  return parse_none(text)
53
- inner = one(arg for arg in get_args(obj) if arg is not NoneType)
112
+ inner = one(arg for arg in get_args(type_) if arg is not NoneType)
54
113
  try:
55
- return parse_text(
56
- inner, text, head=head, case_sensitive=case_sensitive, extra=extra
114
+ return parse_object(
115
+ inner,
116
+ text,
117
+ list_separator=list_separator,
118
+ pair_separator=pair_separator,
119
+ head=head,
120
+ case_sensitive=case_sensitive,
121
+ extra=extra,
57
122
  )
58
- except _ParseTextParseError:
59
- raise _ParseTextParseError(obj=obj, text=text) from None
60
- if is_tuple_type(obj):
61
- args = get_args(obj)
62
- try:
63
- texts = extract_group(r"^\((.*)\)$", text, flags=DOTALL).split(", ")
64
- except ExtractGroupError:
65
- raise _ParseTextParseError(obj=obj, text=text) from None
66
- if len(args) != len(texts):
67
- raise _ParseTextParseError(obj=obj, text=text)
68
- return tuple(
69
- parse_text(arg, text, head=head, case_sensitive=case_sensitive, extra=extra)
70
- for arg, text in zip(args, texts, strict=True)
123
+ except _ParseObjectParseError:
124
+ raise _ParseObjectParseError(type_=type_, text=text) from None
125
+ if is_set_type(type_):
126
+ return _parse_object_set_type(
127
+ type_,
128
+ text,
129
+ list_separator=list_separator,
130
+ pair_separator=pair_separator,
131
+ head=head,
132
+ case_sensitive=case_sensitive,
133
+ extra=extra,
134
+ )
135
+ if is_tuple_type(type_):
136
+ return _parse_object_tuple_type(
137
+ type_,
138
+ text,
139
+ list_separator=list_separator,
140
+ pair_separator=pair_separator,
141
+ head=head,
142
+ case_sensitive=case_sensitive,
143
+ extra=extra,
71
144
  )
72
- if is_union_type(obj):
73
- return _parse_text_union_type(obj, text, extra=extra)
74
- raise _ParseTextParseError(obj=obj, text=text) from None
145
+ if is_union_type(type_):
146
+ return _parse_object_union_type(type_, text, extra=extra)
147
+ raise _ParseObjectParseError(type_=type_, text=text) from None
75
148
 
76
149
 
77
- def _parse_text_type(
150
+ def _parse_object_type(
78
151
  cls: type[Any],
79
152
  text: str,
80
153
  /,
81
154
  *,
82
155
  case_sensitive: bool = False,
83
- extra: ParseTextExtra | None = None,
156
+ extra: ParseObjectExtra | None = None,
84
157
  ) -> Any:
85
158
  """Parse text."""
86
159
  if issubclass(cls, NoneType):
87
160
  try:
88
161
  return parse_none(text)
89
162
  except ParseNoneError:
90
- raise _ParseTextParseError(obj=cls, text=text) from None
163
+ raise _ParseObjectParseError(type_=cls, text=text) from None
91
164
  if issubclass(cls, str):
92
165
  return text
93
166
  if issubclass(cls, bool):
94
167
  try:
95
168
  return parse_bool(text)
96
169
  except ParseBoolError:
97
- raise _ParseTextParseError(obj=cls, text=text) from None
170
+ raise _ParseObjectParseError(type_=cls, text=text) from None
98
171
  if is_subclass_int_not_bool(cls):
99
172
  try:
100
173
  return int(text)
101
174
  except ValueError:
102
- raise _ParseTextParseError(obj=cls, text=text) from None
175
+ raise _ParseObjectParseError(type_=cls, text=text) from None
103
176
  if issubclass(cls, float):
104
177
  try:
105
178
  return float(text)
106
179
  except ValueError:
107
- raise _ParseTextParseError(obj=cls, text=text) from None
180
+ raise _ParseObjectParseError(type_=cls, text=text) from None
108
181
  if issubclass(cls, Enum):
109
182
  try:
110
183
  return parse_enum(text, cls, case_sensitive=case_sensitive)
111
184
  except ParseEnumError:
112
- raise _ParseTextParseError(obj=cls, text=text) from None
185
+ raise _ParseObjectParseError(type_=cls, text=text) from None
113
186
  if issubclass(cls, Path):
114
187
  return Path(text).expanduser()
115
188
  if issubclass(cls, Sentinel):
116
189
  try:
117
190
  return parse_sentinel(text)
118
191
  except ParseSentinelError:
119
- raise _ParseTextParseError(obj=cls, text=text) from None
192
+ raise _ParseObjectParseError(type_=cls, text=text) from None
120
193
  if issubclass(cls, Version):
121
194
  try:
122
195
  return parse_version(text)
123
196
  except ParseVersionError:
124
- raise _ParseTextParseError(obj=cls, text=text) from None
197
+ raise _ParseObjectParseError(type_=cls, text=text) from None
125
198
  if is_subclass_date_not_datetime(cls):
126
199
  from utilities.whenever import ParseDateError, parse_date
127
200
 
128
201
  try:
129
202
  return parse_date(text)
130
203
  except ParseDateError:
131
- raise _ParseTextParseError(obj=cls, text=text) from None
204
+ raise _ParseObjectParseError(type_=cls, text=text) from None
132
205
  if issubclass(cls, dt.datetime):
133
206
  from utilities.whenever import ParseDateTimeError, parse_datetime
134
207
 
135
208
  try:
136
209
  return parse_datetime(text)
137
210
  except ParseDateTimeError:
138
- raise _ParseTextParseError(obj=cls, text=text) from None
211
+ raise _ParseObjectParseError(type_=cls, text=text) from None
139
212
  if issubclass(cls, dt.time):
140
213
  from utilities.whenever import ParseTimeError, parse_time
141
214
 
142
215
  try:
143
216
  return parse_time(text)
144
217
  except ParseTimeError:
145
- raise _ParseTextParseError(obj=cls, text=text) from None
218
+ raise _ParseObjectParseError(type_=cls, text=text) from None
146
219
  if issubclass(cls, dt.timedelta):
147
220
  from utilities.whenever import ParseTimedeltaError, parse_timedelta
148
221
 
149
222
  try:
150
223
  return parse_timedelta(text)
151
224
  except ParseTimedeltaError:
152
- raise _ParseTextParseError(obj=cls, text=text) from None
225
+ raise _ParseObjectParseError(type_=cls, text=text) from None
153
226
  if extra is not None:
154
227
  try:
155
228
  parser = one(p for c, p in extra.items() if issubclass(cls, c))
156
229
  except OneEmptyError:
157
230
  pass
158
231
  except OneNonUniqueError as error:
159
- raise _ParseTextExtraNonUniqueError(
160
- obj=cls, text=text, first=error.first, second=error.second
232
+ raise _ParseObjectExtraNonUniqueError(
233
+ type_=cls, text=text, first=error.first, second=error.second
161
234
  ) from None
162
235
  else:
163
236
  return parser(text)
164
- raise _ParseTextParseError(obj=cls, text=text) from None
237
+ raise _ParseObjectParseError(type_=cls, text=text) from None
238
+
239
+
240
+ def _parse_object_dict_type(
241
+ type_: Any,
242
+ text: str,
243
+ /,
244
+ *,
245
+ list_separator: str = LIST_SEPARATOR,
246
+ pair_separator: str = PAIR_SEPARATOR,
247
+ head: bool = False,
248
+ case_sensitive: bool = False,
249
+ extra: ParseObjectExtra | None = None,
250
+ ) -> dict[Any, Any]:
251
+ key_type, value_type = get_args(type_)
252
+ try:
253
+ inner_text = extract_group(r"^{(.*)}$", text, flags=DOTALL)
254
+ except ExtractGroupError:
255
+ raise _ParseObjectParseError(type_=type_, text=text) from None
256
+ pairs = split_key_value_pairs(
257
+ inner_text,
258
+ list_separator=list_separator,
259
+ pair_separator=pair_separator,
260
+ mapping=True,
261
+ )
262
+ keys = (
263
+ parse_object(
264
+ key_type,
265
+ k,
266
+ list_separator=list_separator,
267
+ pair_separator=pair_separator,
268
+ head=head,
269
+ case_sensitive=case_sensitive,
270
+ extra=extra,
271
+ )
272
+ for k in pairs
273
+ )
274
+ values = (
275
+ parse_object(
276
+ value_type,
277
+ v,
278
+ list_separator=list_separator,
279
+ pair_separator=pair_separator,
280
+ head=head,
281
+ case_sensitive=case_sensitive,
282
+ extra=extra,
283
+ )
284
+ for v in pairs.values()
285
+ )
286
+ try:
287
+ return dict(zip(keys, values, strict=True))
288
+ except _ParseObjectParseError:
289
+ raise _ParseObjectParseError(type_=type_, text=text) from None
290
+
291
+
292
+ def _parse_object_list_type(
293
+ type_: Any,
294
+ text: str,
295
+ /,
296
+ *,
297
+ list_separator: str = LIST_SEPARATOR,
298
+ pair_separator: str = PAIR_SEPARATOR,
299
+ head: bool = False,
300
+ case_sensitive: bool = False,
301
+ extra: ParseObjectExtra | None = None,
302
+ ) -> list[Any]:
303
+ inner_type = one(get_args(type_))
304
+ try:
305
+ inner_text = extract_group(r"^\[(.*)\]$", text, flags=DOTALL)
306
+ except ExtractGroupError:
307
+ raise _ParseObjectParseError(type_=type_, text=text) from None
308
+ texts = split_str(inner_text, separator=list_separator)
309
+ try:
310
+ return [
311
+ parse_object(
312
+ inner_type,
313
+ t,
314
+ list_separator=list_separator,
315
+ pair_separator=pair_separator,
316
+ head=head,
317
+ case_sensitive=case_sensitive,
318
+ extra=extra,
319
+ )
320
+ for t in texts
321
+ ]
322
+ except _ParseObjectParseError:
323
+ raise _ParseObjectParseError(type_=type_, text=text) from None
324
+
325
+
326
+ def _parse_object_set_type(
327
+ type_: Any,
328
+ text: str,
329
+ /,
330
+ *,
331
+ list_separator: str = LIST_SEPARATOR,
332
+ pair_separator: str = PAIR_SEPARATOR,
333
+ head: bool = False,
334
+ case_sensitive: bool = False,
335
+ extra: ParseObjectExtra | None = None,
336
+ ) -> set[Any]:
337
+ inner_type = one(get_args(type_))
338
+ try:
339
+ inner_text = extract_group(r"^{(.*)}$", text, flags=DOTALL)
340
+ except ExtractGroupError:
341
+ raise _ParseObjectParseError(type_=type_, text=text) from None
342
+ texts = split_str(inner_text, separator=list_separator)
343
+ try:
344
+ return {
345
+ parse_object(
346
+ inner_type,
347
+ t,
348
+ list_separator=list_separator,
349
+ pair_separator=pair_separator,
350
+ head=head,
351
+ case_sensitive=case_sensitive,
352
+ extra=extra,
353
+ )
354
+ for t in texts
355
+ }
356
+ except _ParseObjectParseError:
357
+ raise _ParseObjectParseError(type_=type_, text=text) from None
165
358
 
166
359
 
167
- def _parse_text_union_type(
168
- obj: Any, text: str, /, *, extra: ParseTextExtra | None = None
360
+ def _parse_object_union_type(
361
+ type_: Any, text: str, /, *, extra: ParseObjectExtra | None = None
169
362
  ) -> Any:
170
- if obj is Number:
363
+ if type_ is Number:
171
364
  try:
172
365
  return parse_number(text)
173
366
  except ParseNumberError:
174
- raise _ParseTextParseError(obj=obj, text=text) from None
175
- if obj is Duration:
367
+ raise _ParseObjectParseError(type_=type_, text=text) from None
368
+ if type_ is Duration:
176
369
  from utilities.whenever import ParseDurationError, parse_duration
177
370
 
178
371
  try:
179
372
  return parse_duration(text)
180
373
  except ParseDurationError:
181
- raise _ParseTextParseError(obj=obj, text=text) from None
374
+ raise _ParseObjectParseError(type_=type_, text=text) from None
182
375
  if extra is not None:
183
376
  try:
184
- parser = one(p for c, p in extra.items() if c is obj)
377
+ parser = one(p for c, p in extra.items() if c is type_)
185
378
  except OneEmptyError:
186
379
  pass
187
380
  else:
188
381
  return parser(text)
189
- raise _ParseTextParseError(obj=obj, text=text) from None
382
+ raise _ParseObjectParseError(type_=type_, text=text) from None
383
+
384
+
385
+ def _parse_object_tuple_type(
386
+ type_: Any,
387
+ text: str,
388
+ /,
389
+ *,
390
+ list_separator: str = LIST_SEPARATOR,
391
+ pair_separator: str = PAIR_SEPARATOR,
392
+ head: bool = False,
393
+ case_sensitive: bool = False,
394
+ extra: ParseObjectExtra | None = None,
395
+ ) -> tuple[Any, ...]:
396
+ args = get_args(type_)
397
+ try:
398
+ inner = extract_group(r"^\((.*)\)$", text, flags=DOTALL)
399
+ except ExtractGroupError:
400
+ raise _ParseObjectParseError(type_=type_, text=text) from None
401
+ texts = inner.split(list_separator)
402
+ if len(args) != len(texts):
403
+ raise _ParseObjectParseError(type_=type_, text=text)
404
+ try:
405
+ return tuple(
406
+ parse_object(
407
+ arg,
408
+ text,
409
+ list_separator=list_separator,
410
+ pair_separator=pair_separator,
411
+ head=head,
412
+ case_sensitive=case_sensitive,
413
+ extra=extra,
414
+ )
415
+ for arg, text in zip(args, texts, strict=True)
416
+ )
417
+ except _ParseObjectParseError:
418
+ raise _ParseObjectParseError(type_=type_, text=text) from None
190
419
 
191
420
 
192
421
  @dataclass
193
- class ParseTextError(Exception):
194
- obj: Any
422
+ class ParseObjectError(Exception):
423
+ type_: Any
195
424
  text: str
196
425
 
197
426
 
198
427
  @dataclass
199
- class _ParseTextParseError(ParseTextError):
428
+ class _ParseObjectParseError(ParseObjectError):
200
429
  @override
201
430
  def __str__(self) -> str:
202
- return f"Unable to parse {self.obj!r}; got {self.text!r}"
431
+ return f"Unable to parse {self.type_!r}; got {self.text!r}"
203
432
 
204
433
 
205
434
  @dataclass
206
- class _ParseTextExtraNonUniqueError(ParseTextError):
435
+ class _ParseObjectExtraNonUniqueError(ParseObjectError):
207
436
  first: type[Any]
208
437
  second: type[Any]
209
438
 
210
439
  @override
211
440
  def __str__(self) -> str:
212
- return f"Unable to parse {self.obj!r} since `extra` must contain exactly one parent class; got {self.first!r}, {self.second!r} and perhaps more"
441
+ return f"Unable to parse {self.type_!r} since `extra` must contain exactly one parent class; got {self.first!r}, {self.second!r} and perhaps more"
442
+
443
+
444
+ ##
445
+
446
+
447
+ def serialize_object(
448
+ obj: Any,
449
+ /,
450
+ *,
451
+ list_separator: str = LIST_SEPARATOR,
452
+ pair_separator: str = PAIR_SEPARATOR,
453
+ ) -> str:
454
+ """Convert an object to text."""
455
+ if (obj is None) or isinstance(
456
+ obj, bool | int | float | str | Path | Sentinel | Version
457
+ ):
458
+ return str(obj)
459
+ if is_instance_date_not_datetime(obj):
460
+ from utilities.whenever import serialize_date
461
+
462
+ return serialize_date(obj)
463
+ if isinstance(obj, dt.datetime):
464
+ from utilities.whenever import serialize_datetime
465
+
466
+ return serialize_datetime(obj)
467
+ if isinstance(obj, dt.time):
468
+ from utilities.whenever import serialize_time
469
+
470
+ return serialize_time(obj)
471
+ if isinstance(obj, dt.timedelta):
472
+ from utilities.whenever import serialize_timedelta
473
+
474
+ return serialize_timedelta(obj)
475
+ if isinstance(obj, Enum):
476
+ return obj.name
477
+ if isinstance(obj, dict):
478
+ return _serialize_object_dict(
479
+ obj, list_separator=list_separator, pair_separator=pair_separator
480
+ )
481
+ if isinstance(obj, list):
482
+ return _serialize_object_list(
483
+ obj, list_separator=list_separator, pair_separator=pair_separator
484
+ )
485
+ if isinstance(obj, tuple):
486
+ return _serialize_object_tuple(
487
+ obj, list_separator=list_separator, pair_separator=pair_separator
488
+ )
489
+ if isinstance(obj, set | frozenset):
490
+ return _serialize_object_set(
491
+ obj, list_separator=list_separator, pair_separator=pair_separator
492
+ )
493
+ raise NotImplementedError(obj)
494
+
495
+
496
+ def _serialize_object_dict(
497
+ obj: Mapping[Any, Any],
498
+ /,
499
+ *,
500
+ list_separator: str = LIST_SEPARATOR,
501
+ pair_separator: str = PAIR_SEPARATOR,
502
+ ) -> str:
503
+ keys = (
504
+ serialize_object(
505
+ k, list_separator=list_separator, pair_separator=pair_separator
506
+ )
507
+ for k in obj
508
+ )
509
+ values = (
510
+ serialize_object(
511
+ v, list_separator=list_separator, pair_separator=pair_separator
512
+ )
513
+ for v in obj.values()
514
+ )
515
+ items = zip(keys, values, strict=True)
516
+ joined_items = (join_strs(item, separator=pair_separator) for item in items)
517
+ joined = join_strs(joined_items, separator=list_separator)
518
+ return f"{{{joined}}}"
519
+
520
+
521
+ def _serialize_object_list(
522
+ obj: Sequence[Any],
523
+ /,
524
+ *,
525
+ list_separator: str = LIST_SEPARATOR,
526
+ pair_separator: str = PAIR_SEPARATOR,
527
+ ) -> str:
528
+ items = (
529
+ serialize_object(
530
+ i, list_separator=list_separator, pair_separator=pair_separator
531
+ )
532
+ for i in obj
533
+ )
534
+ joined = join_strs(items, separator=list_separator)
535
+ return f"[{joined}]"
536
+
537
+
538
+ def _serialize_object_set(
539
+ obj: AbstractSet[Any],
540
+ /,
541
+ *,
542
+ list_separator: str = LIST_SEPARATOR,
543
+ pair_separator: str = PAIR_SEPARATOR,
544
+ ) -> str:
545
+ items = (
546
+ serialize_object(
547
+ i, list_separator=list_separator, pair_separator=pair_separator
548
+ )
549
+ for i in obj
550
+ )
551
+ joined = join_strs(items, sort=True, separator=list_separator)
552
+ return f"{{{joined}}}"
553
+
554
+
555
+ def _serialize_object_tuple(
556
+ obj: tuple[Any, ...],
557
+ /,
558
+ *,
559
+ list_separator: str = LIST_SEPARATOR,
560
+ pair_separator: str = PAIR_SEPARATOR,
561
+ ) -> str:
562
+ items = (
563
+ serialize_object(
564
+ i, list_separator=list_separator, pair_separator=pair_separator
565
+ )
566
+ for i in obj
567
+ )
568
+ joined = join_strs(items, separator=list_separator)
569
+ return f"({joined})"
570
+
571
+
572
+ __all__ = ["LIST_SEPARATOR", "PAIR_SEPARATOR", "parse_object", "serialize_object"]
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, override
6
6
 
7
7
  from dotenv import dotenv_values
8
8
 
9
- from utilities.dataclasses import MappingToDataclassError, text_to_dataclass
9
+ from utilities.dataclasses import MappingToDataclassError, parse_dataclass
10
10
  from utilities.git import get_repo_root
11
11
  from utilities.iterables import MergeStrMappingsError, merge_str_mappings
12
12
  from utilities.pathlib import PWD
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
17
17
  from collections.abc import Set as AbstractSet
18
18
  from pathlib import Path
19
19
 
20
- from utilities.types import ParseTextExtra, PathLike, StrMapping, TDataclass
20
+ from utilities.types import ParseObjectExtra, PathLike, StrMapping, TDataclass
21
21
 
22
22
 
23
23
  def load_settings(
@@ -30,7 +30,7 @@ def load_settings(
30
30
  warn_name_errors: bool = False,
31
31
  head: bool = False,
32
32
  case_sensitive: bool = False,
33
- extra_parsers: ParseTextExtra | None = None,
33
+ extra_parsers: ParseObjectExtra | None = None,
34
34
  ) -> TDataclass:
35
35
  """Load a set of settings from the `.env` file."""
36
36
  path = get_repo_root(cwd=cwd).joinpath(".env")
@@ -50,7 +50,7 @@ def load_settings(
50
50
  ) from None
51
51
  values = {k: v for k, v in maybe_values.items() if v is not None}
52
52
  try:
53
- return text_to_dataclass(
53
+ return parse_dataclass(
54
54
  values,
55
55
  cls,
56
56
  globalns=globalns,
utilities/types.py CHANGED
@@ -231,7 +231,7 @@ class SupportsRound(Protocol[_T_co]):
231
231
 
232
232
 
233
233
  # parse
234
- type ParseTextExtra = Mapping[Any, Callable[[str], Any]]
234
+ type ParseObjectExtra = Mapping[Any, Callable[[str], Any]]
235
235
 
236
236
 
237
237
  # pathlib
@@ -283,7 +283,7 @@ __all__ = [
283
283
  "OpenMode",
284
284
  "OptExcInfo",
285
285
  "Parallelism",
286
- "ParseTextExtra",
286
+ "ParseObjectExtra",
287
287
  "PathLike",
288
288
  "PathLikeOrCallable",
289
289
  "RoundMode",