dycw-utilities 0.146.2__py3-none-any.whl → 0.178.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (89) hide show
  1. dycw_utilities-0.178.1.dist-info/METADATA +34 -0
  2. dycw_utilities-0.178.1.dist-info/RECORD +105 -0
  3. dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
  4. {dycw_utilities-0.146.2.dist-info → dycw_utilities-0.178.1.dist-info}/entry_points.txt +1 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +10 -7
  7. utilities/asyncio.py +129 -50
  8. utilities/atomicwrites.py +1 -1
  9. utilities/atools.py +64 -4
  10. utilities/cachetools.py +9 -6
  11. utilities/click.py +144 -49
  12. utilities/concurrent.py +1 -1
  13. utilities/contextlib.py +4 -2
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +15 -28
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +2 -2
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +8 -3
  21. utilities/fpdf2.py +2 -2
  22. utilities/functions.py +20 -297
  23. utilities/git.py +19 -0
  24. utilities/grp.py +28 -0
  25. utilities/hypothesis.py +361 -79
  26. utilities/importlib.py +17 -1
  27. utilities/inflect.py +1 -1
  28. utilities/iterables.py +33 -58
  29. utilities/jinja2.py +148 -0
  30. utilities/json.py +1 -1
  31. utilities/libcst.py +7 -7
  32. utilities/logging.py +131 -93
  33. utilities/math.py +8 -4
  34. utilities/more_itertools.py +4 -6
  35. utilities/operator.py +1 -1
  36. utilities/orjson.py +86 -34
  37. utilities/os.py +49 -2
  38. utilities/packaging.py +115 -0
  39. utilities/parse.py +2 -2
  40. utilities/pathlib.py +66 -34
  41. utilities/permissions.py +298 -0
  42. utilities/platform.py +5 -4
  43. utilities/polars.py +934 -420
  44. utilities/polars_ols.py +1 -1
  45. utilities/postgres.py +317 -153
  46. utilities/pottery.py +10 -86
  47. utilities/pqdm.py +3 -3
  48. utilities/pwd.py +28 -0
  49. utilities/pydantic.py +4 -51
  50. utilities/pydantic_settings.py +240 -0
  51. utilities/pydantic_settings_sops.py +76 -0
  52. utilities/pyinstrument.py +5 -5
  53. utilities/pytest.py +100 -126
  54. utilities/pytest_plugins/pytest_randomly.py +1 -1
  55. utilities/pytest_plugins/pytest_regressions.py +7 -3
  56. utilities/pytest_regressions.py +27 -8
  57. utilities/random.py +11 -6
  58. utilities/re.py +1 -1
  59. utilities/redis.py +101 -64
  60. utilities/sentinel.py +10 -0
  61. utilities/shelve.py +4 -1
  62. utilities/shutil.py +25 -0
  63. utilities/slack_sdk.py +9 -4
  64. utilities/sqlalchemy.py +422 -352
  65. utilities/sqlalchemy_polars.py +28 -52
  66. utilities/string.py +1 -1
  67. utilities/subprocess.py +1977 -0
  68. utilities/tempfile.py +112 -4
  69. utilities/testbook.py +50 -0
  70. utilities/text.py +174 -42
  71. utilities/throttle.py +158 -0
  72. utilities/timer.py +2 -2
  73. utilities/traceback.py +59 -38
  74. utilities/types.py +68 -22
  75. utilities/typing.py +479 -19
  76. utilities/uuid.py +42 -5
  77. utilities/version.py +27 -26
  78. utilities/whenever.py +663 -178
  79. utilities/zoneinfo.py +80 -22
  80. dycw_utilities-0.146.2.dist-info/METADATA +0 -41
  81. dycw_utilities-0.146.2.dist-info/RECORD +0 -99
  82. dycw_utilities-0.146.2.dist-info/WHEEL +0 -4
  83. dycw_utilities-0.146.2.dist-info/licenses/LICENSE +0 -21
  84. utilities/aiolimiter.py +0 -25
  85. utilities/eventkit.py +0 -388
  86. utilities/period.py +0 -237
  87. utilities/python_dotenv.py +0 -101
  88. utilities/streamlit.py +0 -105
  89. utilities/typed_settings.py +0 -144
utilities/orjson.py CHANGED
@@ -5,7 +5,7 @@ import re
5
5
  from collections.abc import Callable, Iterable, Mapping, Sequence
6
6
  from contextlib import suppress
7
7
  from dataclasses import dataclass, field, replace
8
- from enum import Enum, unique
8
+ from enum import Enum, StrEnum, unique
9
9
  from functools import cached_property, partial
10
10
  from itertools import chain
11
11
  from logging import Formatter, LogRecord
@@ -20,6 +20,7 @@ from orjson import (
20
20
  OPT_PASSTHROUGH_DATACLASS,
21
21
  OPT_PASSTHROUGH_DATETIME,
22
22
  OPT_SORT_KEYS,
23
+ JSONDecodeError,
23
24
  dumps,
24
25
  loads,
25
26
  )
@@ -37,7 +38,7 @@ from whenever import (
37
38
 
38
39
  from utilities.concurrent import concurrent_map
39
40
  from utilities.dataclasses import dataclass_to_dict
40
- from utilities.functions import ensure_class, is_string_mapping
41
+ from utilities.functions import ensure_class
41
42
  from utilities.gzip import read_binary
42
43
  from utilities.iterables import (
43
44
  OneEmptyError,
@@ -50,9 +51,15 @@ from utilities.json import write_formatted_json
50
51
  from utilities.logging import get_logging_level_number
51
52
  from utilities.math import MAX_INT64, MIN_INT64
52
53
  from utilities.types import Dataclass, LogLevel, MaybeIterable, PathLike, StrMapping
54
+ from utilities.typing import is_string_mapping
53
55
  from utilities.tzlocal import LOCAL_TIME_ZONE
54
56
  from utilities.version import Version, parse_version
55
- from utilities.whenever import from_timestamp
57
+ from utilities.whenever import (
58
+ DatePeriod,
59
+ TimePeriod,
60
+ ZonedDateTimePeriod,
61
+ from_timestamp,
62
+ )
56
63
 
57
64
  if TYPE_CHECKING:
58
65
  from collections.abc import Set as AbstractSet
@@ -65,10 +72,11 @@ if TYPE_CHECKING:
65
72
 
66
73
 
67
74
  @unique
68
- class _Prefixes(Enum):
75
+ class _Prefixes(StrEnum):
69
76
  dataclass = "dc"
70
77
  date = "d"
71
78
  date_delta = "dd"
79
+ date_period = "dp"
72
80
  date_time_delta = "D"
73
81
  enum = "e"
74
82
  exception_class = "Ex"
@@ -77,7 +85,7 @@ class _Prefixes(Enum):
77
85
  frozenset_ = "fr"
78
86
  list_ = "l"
79
87
  month_day = "md"
80
- none = "none"
88
+ none = "0"
81
89
  path = "p"
82
90
  plain_date_time = "pd"
83
91
  py_date = "!d"
@@ -87,12 +95,14 @@ class _Prefixes(Enum):
87
95
  set_ = "s"
88
96
  time = "ti"
89
97
  time_delta = "td"
98
+ time_period = "tp"
90
99
  tuple_ = "tu"
91
100
  unserializable = "un"
92
101
  uuid = "uu"
93
102
  version = "v"
94
103
  year_month = "ym"
95
104
  zoned_date_time = "zd"
105
+ zoned_date_time_period = "zp"
96
106
 
97
107
 
98
108
  type _DataclassHook = Callable[[type[Dataclass], StrMapping], StrMapping]
@@ -167,8 +177,10 @@ def _pre_process(
167
177
  return f"[{_Prefixes.date.value}]{date}"
168
178
  case DateDelta() as date:
169
179
  return f"[{_Prefixes.date_delta.value}]{date}"
170
- case DateTimeDelta() as date:
171
- return f"[{_Prefixes.date_time_delta.value}]{date}"
180
+ case DatePeriod() as period:
181
+ return f"[{_Prefixes.date_period.value}]{period.start},{period.end}"
182
+ case DateTimeDelta() as date_time_delta:
183
+ return f"[{_Prefixes.date_time_delta.value}]{date_time_delta}"
172
184
  case Exception() as error_:
173
185
  return {
174
186
  f"[{_Prefixes.exception_instance.value}|{type(error_).__qualname__}]": pre(
@@ -187,14 +199,16 @@ def _pre_process(
187
199
  return f"[{_Prefixes.month_day.value}]{month_day!s}"
188
200
  case Path() as path:
189
201
  return f"[{_Prefixes.path.value}]{path!s}"
190
- case PlainDateTime() as datetime:
191
- return f"[{_Prefixes.plain_date_time.value}]{datetime}"
192
- case str() as str_:
193
- return str_
202
+ case PlainDateTime() as date_time:
203
+ return f"[{_Prefixes.plain_date_time.value}]{date_time}"
204
+ case str() as text:
205
+ return text
194
206
  case Time() as time:
195
207
  return f"[{_Prefixes.time.value}]{time}"
196
208
  case TimeDelta() as time_delta:
197
209
  return f"[{_Prefixes.time_delta.value}]{time_delta}"
210
+ case TimePeriod() as period:
211
+ return f"[{_Prefixes.time_period.value}]{period.start},{period.end}"
198
212
  case type() as error_cls if issubclass(error_cls, Exception):
199
213
  return f"[{_Prefixes.exception_class.value}|{error_cls.__qualname__}]"
200
214
  case UUID() as uuid:
@@ -203,8 +217,10 @@ def _pre_process(
203
217
  return f"[{_Prefixes.version.value}]{version}"
204
218
  case YearMonth() as year_month:
205
219
  return f"[{_Prefixes.year_month.value}]{year_month}"
206
- case ZonedDateTime() as datetime:
207
- return f"[{_Prefixes.zoned_date_time.value}]{datetime}"
220
+ case ZonedDateTime() as date_time:
221
+ return f"[{_Prefixes.zoned_date_time.value}]{date_time}"
222
+ case ZonedDateTimePeriod() as period:
223
+ return f"[{_Prefixes.zoned_date_time_period.value}]{period.start.to_plain()},{period.end}"
208
224
  case dt.datetime() as py_datetime:
209
225
  match py_datetime.tzinfo:
210
226
  case None:
@@ -357,8 +373,12 @@ def deserialize(
357
373
  redirects: Mapping[str, type[Any]] | None = None,
358
374
  ) -> Any:
359
375
  """Deserialize an object."""
376
+ try:
377
+ obj = loads(data)
378
+ except JSONDecodeError:
379
+ raise _DeserializeInvalidJSONError(data=data) from None
360
380
  return _object_hook(
361
- loads(data),
381
+ obj,
362
382
  data=data,
363
383
  dataclass_hook=dataclass_hook,
364
384
  objects=objects,
@@ -366,9 +386,15 @@ def deserialize(
366
386
  )
367
387
 
368
388
 
389
+ @dataclass(kw_only=True, slots=True)
390
+ class DeerializeError(Exception):
391
+ obj: Any
392
+
393
+
369
394
  (
370
395
  _DATE_PATTERN,
371
396
  _DATE_DELTA_PATTERN,
397
+ _DATE_PERIOD_PATTERN,
372
398
  _DATE_TIME_DELTA_PATTERN,
373
399
  _FLOAT_PATTERN,
374
400
  _MONTH_DAY_PATTERN,
@@ -381,15 +407,18 @@ def deserialize(
381
407
  _PY_ZONED_DATE_TIME_PATTERN,
382
408
  _TIME_PATTERN,
383
409
  _TIME_DELTA_PATTERN,
410
+ _TIME_PERIOD_PATTERN,
384
411
  _UUID_PATTERN,
385
412
  _VERSION_PATTERN,
386
413
  _YEAR_MONTH_PATTERN,
387
414
  _ZONED_DATE_TIME_PATTERN,
415
+ _ZONED_DATE_TIME_PERIOD_PATTERN,
388
416
  ) = [
389
417
  re.compile(r"^\[" + p.value + r"\](" + ".*" + ")$")
390
418
  for p in [
391
419
  _Prefixes.date,
392
420
  _Prefixes.date_delta,
421
+ _Prefixes.date_period,
393
422
  _Prefixes.date_time_delta,
394
423
  _Prefixes.float_,
395
424
  _Prefixes.month_day,
@@ -402,10 +431,12 @@ def deserialize(
402
431
  _Prefixes.py_zoned_date_time,
403
432
  _Prefixes.time,
404
433
  _Prefixes.time_delta,
434
+ _Prefixes.time_period,
405
435
  _Prefixes.uuid,
406
436
  _Prefixes.version,
407
437
  _Prefixes.year_month,
408
438
  _Prefixes.zoned_date_time,
439
+ _Prefixes.zoned_date_time_period,
409
440
  ]
410
441
  ]
411
442
 
@@ -450,39 +481,50 @@ def _object_hook(
450
481
  if match := _NONE_PATTERN.search(text):
451
482
  return None
452
483
  if match := _DATE_PATTERN.search(text):
453
- return Date.parse_common_iso(match.group(1))
484
+ return Date.parse_iso(match.group(1))
454
485
  if match := _DATE_DELTA_PATTERN.search(text):
455
- return DateDelta.parse_common_iso(match.group(1))
486
+ return DateDelta.parse_iso(match.group(1))
487
+ if match := _DATE_PERIOD_PATTERN.search(text):
488
+ start, end = map(Date.parse_iso, match.group(1).split(","))
489
+ return DatePeriod(start, end)
456
490
  if match := _DATE_TIME_DELTA_PATTERN.search(text):
457
- return DateTimeDelta.parse_common_iso(match.group(1))
491
+ return DateTimeDelta.parse_iso(match.group(1))
458
492
  if match := _FLOAT_PATTERN.search(text):
459
493
  return float(match.group(1))
460
494
  if match := _MONTH_DAY_PATTERN.search(text):
461
- return MonthDay.parse_common_iso(match.group(1))
495
+ return MonthDay.parse_iso(match.group(1))
462
496
  if match := _PATH_PATTERN.search(text):
463
497
  return Path(match.group(1))
464
498
  if match := _PLAIN_DATE_TIME_PATTERN.search(text):
465
- return PlainDateTime.parse_common_iso(match.group(1))
499
+ return PlainDateTime.parse_iso(match.group(1))
466
500
  if match := _PY_DATE_PATTERN.search(text):
467
- return Date.parse_common_iso(match.group(1)).py_date()
501
+ return Date.parse_iso(match.group(1)).py_date()
468
502
  if match := _PY_PLAIN_DATE_TIME_PATTERN.search(text):
469
- return PlainDateTime.parse_common_iso(match.group(1)).py_datetime()
503
+ return PlainDateTime.parse_iso(match.group(1)).py_datetime()
470
504
  if match := _PY_TIME_PATTERN.search(text):
471
- return Time.parse_common_iso(match.group(1)).py_time()
505
+ return Time.parse_iso(match.group(1)).py_time()
472
506
  if match := _PY_ZONED_DATE_TIME_PATTERN.search(text):
473
- return ZonedDateTime.parse_common_iso(match.group(1)).py_datetime()
507
+ return ZonedDateTime.parse_iso(match.group(1)).py_datetime()
474
508
  if match := _TIME_PATTERN.search(text):
475
- return Time.parse_common_iso(match.group(1))
509
+ return Time.parse_iso(match.group(1))
476
510
  if match := _TIME_DELTA_PATTERN.search(text):
477
- return TimeDelta.parse_common_iso(match.group(1))
511
+ return TimeDelta.parse_iso(match.group(1))
512
+ if match := _TIME_PERIOD_PATTERN.search(text):
513
+ start, end = map(Time.parse_iso, match.group(1).split(","))
514
+ return TimePeriod(start, end)
478
515
  if match := _UUID_PATTERN.search(text):
479
516
  return UUID(match.group(1))
480
517
  if match := _VERSION_PATTERN.search(text):
481
518
  return parse_version(match.group(1))
482
519
  if match := _YEAR_MONTH_PATTERN.search(text):
483
- return YearMonth.parse_common_iso(match.group(1))
520
+ return YearMonth.parse_iso(match.group(1))
484
521
  if match := _ZONED_DATE_TIME_PATTERN.search(text):
485
- return ZonedDateTime.parse_common_iso(match.group(1))
522
+ return ZonedDateTime.parse_iso(match.group(1))
523
+ if match := _ZONED_DATE_TIME_PERIOD_PATTERN.search(text):
524
+ start, end = match.group(1).split(",")
525
+ end = ZonedDateTime.parse_iso(end)
526
+ start = PlainDateTime.parse_iso(start).assume_tz(end.tz)
527
+ return ZonedDateTimePeriod(start, end)
486
528
  if (
487
529
  exc_class := _object_hook_exception_class(
488
530
  text, data=data, objects=objects, redirects=redirects
@@ -564,7 +606,7 @@ def _object_hook(
564
606
  )
565
607
  for k, v in mapping.items()
566
608
  }
567
- case _ as never:
609
+ case never:
568
610
  assert_never(never)
569
611
 
570
612
 
@@ -708,11 +750,19 @@ def _object_hook_get_object(
708
750
  @dataclass(kw_only=True, slots=True)
709
751
  class DeserializeError(Exception):
710
752
  data: bytes
711
- qualname: str
753
+
754
+
755
+ @dataclass(kw_only=True, slots=True)
756
+ class _DeserializeInvalidJSONError(DeserializeError):
757
+ @override
758
+ def __str__(self) -> str:
759
+ return f"Invalid JSON: {self.data!r}"
712
760
 
713
761
 
714
762
  @dataclass(kw_only=True, slots=True)
715
763
  class _DeserializeNoObjectsError(DeserializeError):
764
+ qualname: str
765
+
716
766
  @override
717
767
  def __str__(self) -> str:
718
768
  return f"Objects required to deserialize {self.qualname!r} from {self.data!r}"
@@ -720,6 +770,8 @@ class _DeserializeNoObjectsError(DeserializeError):
720
770
 
721
771
  @dataclass(kw_only=True, slots=True)
722
772
  class _DeserializeObjectNotFoundError(DeserializeError):
773
+ qualname: str
774
+
723
775
  @override
724
776
  def __str__(self) -> str:
725
777
  return (
@@ -1006,7 +1058,7 @@ class GetLogRecordsOutput:
1006
1058
  for r in records
1007
1059
  if (r.func_name is not None) and search(func_name, r.func_name)
1008
1060
  ]
1009
- case _ as never:
1061
+ case never:
1010
1062
  assert_never(never)
1011
1063
  if extra is not None:
1012
1064
  match extra:
@@ -1019,7 +1071,7 @@ class GetLogRecordsOutput:
1019
1071
  if (r.extra is not None)
1020
1072
  and set(r.extra).issuperset(always_iterable(keys))
1021
1073
  ]
1022
- case _ as never:
1074
+ case never:
1023
1075
  assert_never(never)
1024
1076
  if log_file is not None:
1025
1077
  match log_file:
@@ -1034,7 +1086,7 @@ class GetLogRecordsOutput:
1034
1086
  if (r.log_file is not None)
1035
1087
  and search(str(log_file), str(r.log_file))
1036
1088
  ]
1037
- case _ as never:
1089
+ case never:
1038
1090
  assert_never(never)
1039
1091
  if log_file_line_num is not None:
1040
1092
  match log_file_line_num:
@@ -1048,7 +1100,7 @@ class GetLogRecordsOutput:
1048
1100
  records = [
1049
1101
  r for r in records if r.log_file_line_num == log_file_line_num
1050
1102
  ]
1051
- case _ as never:
1103
+ case never:
1052
1104
  assert_never(never)
1053
1105
  if min_log_file_line_num is not None:
1054
1106
  records = [
@@ -1126,7 +1178,7 @@ def _get_log_records_one(
1126
1178
  path = Path(path)
1127
1179
  try:
1128
1180
  lines = path.read_text().splitlines()
1129
- except UnicodeDecodeError as error: # skipif-ci-and-windows
1181
+ except UnicodeDecodeError as error:
1130
1182
  return _GetLogRecordsOneOutput(path=path, file_ok=False, other_errors=[error])
1131
1183
  num_lines_blank, num_lines_error = 0, 0
1132
1184
  missing: set[str] = set()
utilities/os.py CHANGED
@@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Literal, assert_never, overload, override
7
7
 
8
8
  from utilities.contextlib import enhanced_context_manager
9
9
  from utilities.iterables import OneStrEmptyError, one_str
10
+ from utilities.platform import SYSTEM
10
11
 
11
12
  if TYPE_CHECKING:
12
13
  from collections.abc import Iterator, Mapping
@@ -48,7 +49,7 @@ def get_cpu_use(*, n: IntOrAll = "all") -> int:
48
49
  raise GetCPUUseError(n=n)
49
50
  case "all":
50
51
  return CPU_COUNT
51
- case _ as never:
52
+ case never:
52
53
  assert_never(never)
53
54
 
54
55
 
@@ -105,7 +106,7 @@ def get_env_var(
105
106
  return None
106
107
  case str(), _:
107
108
  return default
108
- case _ as never:
109
+ case never:
109
110
  assert_never(never)
110
111
  return environ[key_use]
111
112
 
@@ -124,6 +125,39 @@ class GetEnvVarError(Exception):
124
125
  ##
125
126
 
126
127
 
128
+ def get_effective_group_id() -> int | None:
129
+ """Get the effective group ID."""
130
+ match SYSTEM:
131
+ case "windows": # skipif-not-windows
132
+ return None
133
+ case "mac" | "linux": # skipif-windows
134
+ from os import getegid
135
+
136
+ return getegid()
137
+ case never:
138
+ assert_never(never)
139
+
140
+
141
+ def get_effective_user_id() -> int | None:
142
+ """Get the effective user ID."""
143
+ match SYSTEM:
144
+ case "windows": # skipif-not-windows
145
+ return None
146
+ case "mac" | "linux": # skipif-windows
147
+ from os import geteuid
148
+
149
+ return geteuid()
150
+ case never:
151
+ assert_never(never)
152
+
153
+
154
+ EFFECTIVE_USER_ID = get_effective_user_id()
155
+ EFFECTIVE_GROUP_ID = get_effective_group_id()
156
+
157
+
158
+ ##
159
+
160
+
127
161
  def is_debug() -> bool:
128
162
  """Check if we are in `DEBUG` mode."""
129
163
  return get_env_var("DEBUG", nullable=True) is not None
@@ -132,6 +166,14 @@ def is_debug() -> bool:
132
166
  ##
133
167
 
134
168
 
169
+ def is_pytest() -> bool:
170
+ """Check if `pytest` is running."""
171
+ return get_env_var("PYTEST_VERSION", nullable=True) is not None
172
+
173
+
174
+ ##
175
+
176
+
135
177
  @enhanced_context_manager
136
178
  def temp_environ(
137
179
  env: Mapping[str, str | None] | None = None, **env_kwargs: str | None
@@ -157,12 +199,17 @@ def temp_environ(
157
199
 
158
200
  __all__ = [
159
201
  "CPU_COUNT",
202
+ "EFFECTIVE_GROUP_ID",
203
+ "EFFECTIVE_USER_ID",
160
204
  "GetCPUCountError",
161
205
  "GetCPUUseError",
162
206
  "IntOrAll",
163
207
  "get_cpu_count",
164
208
  "get_cpu_use",
209
+ "get_effective_group_id",
210
+ "get_effective_user_id",
165
211
  "get_env_var",
166
212
  "is_debug",
213
+ "is_pytest",
167
214
  "temp_environ",
168
215
  ]
utilities/packaging.py ADDED
@@ -0,0 +1,115 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ from typing import TYPE_CHECKING, Self, overload, override
5
+
6
+ import packaging._parser
7
+ import packaging.requirements
8
+ from packaging.requirements import _parse_requirement
9
+ from packaging.specifiers import Specifier, SpecifierSet
10
+
11
+ from utilities.iterables import OneEmptyError, one
12
+
13
+ if TYPE_CHECKING:
14
+ from packaging._parser import MarkerList
15
+
16
+
17
+ @dataclass(order=True, unsafe_hash=True, slots=True)
18
+ class Requirement:
19
+ requirement: str
20
+ _parsed_req: packaging._parser.ParsedRequirement = field(init=False, repr=False)
21
+ _custom_req: _CustomRequirement = field(init=False, repr=False)
22
+
23
+ def __getitem__(self, operator: str, /) -> str:
24
+ return self.specifier_set[operator]
25
+
26
+ def __post_init__(self) -> None:
27
+ self._parsed_req = _parse_requirement(self.requirement)
28
+ self._custom_req = _CustomRequirement(self.requirement)
29
+
30
+ @override
31
+ def __str__(self) -> str:
32
+ return str(self._custom_req)
33
+
34
+ @property
35
+ def extras(self) -> list[str]:
36
+ return self._parsed_req.extras
37
+
38
+ @overload
39
+ def get(self, operator: str, default: str, /) -> str: ...
40
+ @overload
41
+ def get(self, operator: str, default: None = None, /) -> str | None: ...
42
+ def get(self, operator: str, default: str | None = None, /) -> str | None:
43
+ return self.specifier_set.get(operator, default)
44
+
45
+ @property
46
+ def marker(self) -> MarkerList | None:
47
+ return self._parsed_req.marker
48
+
49
+ @property
50
+ def name(self) -> str:
51
+ return self._parsed_req.name
52
+
53
+ def replace(self, operator: str, version: str, /) -> Self:
54
+ return type(self)(str(self._custom_req.replace(operator, version)))
55
+
56
+ @property
57
+ def specifier(self) -> str:
58
+ return self._parsed_req.specifier
59
+
60
+ @property
61
+ def specifier_set(self) -> _CustomSpecifierSet:
62
+ return _CustomSpecifierSet(_parse_requirement(self.requirement).specifier)
63
+
64
+ @property
65
+ def url(self) -> str:
66
+ return self._parsed_req.url
67
+
68
+
69
+ class _CustomRequirement(packaging.requirements.Requirement):
70
+ specifier: _CustomSpecifierSet
71
+
72
+ @override
73
+ def __init__(self, requirement_string: str) -> None:
74
+ super().__init__(requirement_string)
75
+ parsed = _parse_requirement(requirement_string)
76
+ self.specifier = _CustomSpecifierSet(parsed.specifier) # pyright: ignore[reportIncompatibleVariableOverride]
77
+
78
+ def replace(self, operator: str, version: str, /) -> Self:
79
+ new = type(self)(super().__str__())
80
+ new.specifier = self.specifier.replace(operator, version)
81
+ return new
82
+
83
+
84
+ class _CustomSpecifierSet(SpecifierSet):
85
+ def __getitem__(self, operator: str, /) -> str:
86
+ try:
87
+ return one(s.version for s in self if s.operator == operator)
88
+ except OneEmptyError:
89
+ raise KeyError(operator) from None
90
+
91
+ @override
92
+ def __str__(self) -> str:
93
+ specs = sorted(self._specs, key=self._sort_key)
94
+ return ", ".join(map(str, specs))
95
+
96
+ @overload
97
+ def get(self, operator: str, default: str, /) -> str: ...
98
+ @overload
99
+ def get(self, operator: str, default: None = None, /) -> str | None: ...
100
+ def get(self, operator: str, default: str | None = None, /) -> str | None:
101
+ try:
102
+ return self[operator]
103
+ except KeyError:
104
+ return default
105
+
106
+ def replace(self, operator: str, version: str, /) -> Self:
107
+ new = Specifier(spec=f"{operator}{version}")
108
+ remainder = (s for s in self if s.operator != operator)
109
+ return type(self)([new, *remainder])
110
+
111
+ def _sort_key(self, spec: Specifier, /) -> int:
112
+ return [">=", "<"].index(spec.operator)
113
+
114
+
115
+ __all__ = ["Requirement"]
utilities/parse.py CHANGED
@@ -204,7 +204,7 @@ def _parse_object_type(
204
204
  ),
205
205
  ):
206
206
  try:
207
- return cls.parse_common_iso(text)
207
+ return cls.parse_iso(text)
208
208
  except ValueError:
209
209
  raise _ParseObjectParseError(type_=cls, text=text) from None
210
210
  if issubclass(cls, Path):
@@ -477,7 +477,7 @@ def serialize_object(
477
477
  ZonedDateTime,
478
478
  ),
479
479
  ):
480
- return obj.format_common_iso()
480
+ return obj.format_iso()
481
481
  if isinstance(obj, Enum):
482
482
  return obj.name
483
483
  if isinstance(obj, dict):