spitch 1.34.0__py3-none-any.whl → 1.36.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of spitch might be problematic. Click here for more details.

spitch/_models.py CHANGED
@@ -49,7 +49,7 @@ from ._utils import (
49
49
  strip_annotated_type,
50
50
  )
51
51
  from ._compat import (
52
- PYDANTIC_V2,
52
+ PYDANTIC_V1,
53
53
  ConfigDict,
54
54
  GenericModel as BaseGenericModel,
55
55
  get_args,
@@ -80,11 +80,7 @@ class _ConfigProtocol(Protocol):
80
80
 
81
81
 
82
82
  class BaseModel(pydantic.BaseModel):
83
- if PYDANTIC_V2:
84
- model_config: ClassVar[ConfigDict] = ConfigDict(
85
- extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
86
- )
87
- else:
83
+ if PYDANTIC_V1:
88
84
 
89
85
  @property
90
86
  @override
@@ -94,6 +90,10 @@ class BaseModel(pydantic.BaseModel):
94
90
 
95
91
  class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
96
92
  extra: Any = pydantic.Extra.allow # type: ignore
93
+ else:
94
+ model_config: ClassVar[ConfigDict] = ConfigDict(
95
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
96
+ )
97
97
 
98
98
  def to_dict(
99
99
  self,
@@ -214,25 +214,25 @@ class BaseModel(pydantic.BaseModel):
214
214
  if key not in model_fields:
215
215
  parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
216
216
 
217
- if PYDANTIC_V2:
218
- _extra[key] = parsed
219
- else:
217
+ if PYDANTIC_V1:
220
218
  _fields_set.add(key)
221
219
  fields_values[key] = parsed
220
+ else:
221
+ _extra[key] = parsed
222
222
 
223
223
  object.__setattr__(m, "__dict__", fields_values)
224
224
 
225
- if PYDANTIC_V2:
226
- # these properties are copied from Pydantic's `model_construct()` method
227
- object.__setattr__(m, "__pydantic_private__", None)
228
- object.__setattr__(m, "__pydantic_extra__", _extra)
229
- object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
230
- else:
225
+ if PYDANTIC_V1:
231
226
  # init_private_attributes() does not exist in v2
232
227
  m._init_private_attributes() # type: ignore
233
228
 
234
229
  # copied from Pydantic v1's `construct()` method
235
230
  object.__setattr__(m, "__fields_set__", _fields_set)
231
+ else:
232
+ # these properties are copied from Pydantic's `model_construct()` method
233
+ object.__setattr__(m, "__pydantic_private__", None)
234
+ object.__setattr__(m, "__pydantic_extra__", _extra)
235
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
236
236
 
237
237
  return m
238
238
 
@@ -242,7 +242,7 @@ class BaseModel(pydantic.BaseModel):
242
242
  # although not in practice
243
243
  model_construct = construct
244
244
 
245
- if not PYDANTIC_V2:
245
+ if PYDANTIC_V1:
246
246
  # we define aliases for some of the new pydantic v2 methods so
247
247
  # that we can just document these methods without having to specify
248
248
  # a specific pydantic version as some users may not know which
@@ -255,7 +255,7 @@ class BaseModel(pydantic.BaseModel):
255
255
  mode: Literal["json", "python"] | str = "python",
256
256
  include: IncEx | None = None,
257
257
  exclude: IncEx | None = None,
258
- by_alias: bool = False,
258
+ by_alias: bool | None = None,
259
259
  exclude_unset: bool = False,
260
260
  exclude_defaults: bool = False,
261
261
  exclude_none: bool = False,
@@ -263,6 +263,7 @@ class BaseModel(pydantic.BaseModel):
263
263
  warnings: bool | Literal["none", "warn", "error"] = True,
264
264
  context: dict[str, Any] | None = None,
265
265
  serialize_as_any: bool = False,
266
+ fallback: Callable[[Any], Any] | None = None,
266
267
  ) -> dict[str, Any]:
267
268
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
268
269
 
@@ -294,16 +295,18 @@ class BaseModel(pydantic.BaseModel):
294
295
  raise ValueError("context is only supported in Pydantic v2")
295
296
  if serialize_as_any != False:
296
297
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
298
+ if fallback is not None:
299
+ raise ValueError("fallback is only supported in Pydantic v2")
297
300
  dumped = super().dict( # pyright: ignore[reportDeprecated]
298
301
  include=include,
299
302
  exclude=exclude,
300
- by_alias=by_alias,
303
+ by_alias=by_alias if by_alias is not None else False,
301
304
  exclude_unset=exclude_unset,
302
305
  exclude_defaults=exclude_defaults,
303
306
  exclude_none=exclude_none,
304
307
  )
305
308
 
306
- return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
309
+ return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped
307
310
 
308
311
  @override
309
312
  def model_dump_json(
@@ -312,13 +315,14 @@ class BaseModel(pydantic.BaseModel):
312
315
  indent: int | None = None,
313
316
  include: IncEx | None = None,
314
317
  exclude: IncEx | None = None,
315
- by_alias: bool = False,
318
+ by_alias: bool | None = None,
316
319
  exclude_unset: bool = False,
317
320
  exclude_defaults: bool = False,
318
321
  exclude_none: bool = False,
319
322
  round_trip: bool = False,
320
323
  warnings: bool | Literal["none", "warn", "error"] = True,
321
324
  context: dict[str, Any] | None = None,
325
+ fallback: Callable[[Any], Any] | None = None,
322
326
  serialize_as_any: bool = False,
323
327
  ) -> str:
324
328
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
@@ -347,11 +351,13 @@ class BaseModel(pydantic.BaseModel):
347
351
  raise ValueError("context is only supported in Pydantic v2")
348
352
  if serialize_as_any != False:
349
353
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
354
+ if fallback is not None:
355
+ raise ValueError("fallback is only supported in Pydantic v2")
350
356
  return super().json( # type: ignore[reportDeprecated]
351
357
  indent=indent,
352
358
  include=include,
353
359
  exclude=exclude,
354
- by_alias=by_alias,
360
+ by_alias=by_alias if by_alias is not None else False,
355
361
  exclude_unset=exclude_unset,
356
362
  exclude_defaults=exclude_defaults,
357
363
  exclude_none=exclude_none,
@@ -362,10 +368,10 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
362
368
  if value is None:
363
369
  return field_get_default(field)
364
370
 
365
- if PYDANTIC_V2:
366
- type_ = field.annotation
367
- else:
371
+ if PYDANTIC_V1:
368
372
  type_ = cast(type, field.outer_type_) # type: ignore
373
+ else:
374
+ type_ = field.annotation # type: ignore
369
375
 
370
376
  if type_ is None:
371
377
  raise RuntimeError(f"Unexpected field type is None for {key}")
@@ -374,7 +380,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
374
380
 
375
381
 
376
382
  def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
377
- if not PYDANTIC_V2:
383
+ if PYDANTIC_V1:
378
384
  # TODO
379
385
  return None
380
386
 
@@ -624,30 +630,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
624
630
  for variant in get_args(union):
625
631
  variant = strip_annotated_type(variant)
626
632
  if is_basemodel_type(variant):
627
- if PYDANTIC_V2:
628
- field = _extract_field_schema_pv2(variant, discriminator_field_name)
629
- if not field:
633
+ if PYDANTIC_V1:
634
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
635
+ if not field_info:
630
636
  continue
631
637
 
632
638
  # Note: if one variant defines an alias then they all should
633
- discriminator_alias = field.get("serialization_alias")
634
-
635
- field_schema = field["schema"]
639
+ discriminator_alias = field_info.alias
636
640
 
637
- if field_schema["type"] == "literal":
638
- for entry in cast("LiteralSchema", field_schema)["expected"]:
641
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
642
+ for entry in get_args(annotation):
639
643
  if isinstance(entry, str):
640
644
  mapping[entry] = variant
641
645
  else:
642
- field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
643
- if not field_info:
646
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
647
+ if not field:
644
648
  continue
645
649
 
646
650
  # Note: if one variant defines an alias then they all should
647
- discriminator_alias = field_info.alias
651
+ discriminator_alias = field.get("serialization_alias")
648
652
 
649
- if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
650
- for entry in get_args(annotation):
653
+ field_schema = field["schema"]
654
+
655
+ if field_schema["type"] == "literal":
656
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
651
657
  if isinstance(entry, str):
652
658
  mapping[entry] = variant
653
659
 
@@ -710,7 +716,7 @@ else:
710
716
  pass
711
717
 
712
718
 
713
- if PYDANTIC_V2:
719
+ if not PYDANTIC_V1:
714
720
  from pydantic import TypeAdapter as _TypeAdapter
715
721
 
716
722
  _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
@@ -778,12 +784,12 @@ class FinalRequestOptions(pydantic.BaseModel):
778
784
  json_data: Union[Body, None] = None
779
785
  extra_json: Union[AnyMapping, None] = None
780
786
 
781
- if PYDANTIC_V2:
782
- model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
783
- else:
787
+ if PYDANTIC_V1:
784
788
 
785
789
  class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
786
790
  arbitrary_types_allowed: bool = True
791
+ else:
792
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
787
793
 
788
794
  def get_max_retries(self, max_retries: int) -> int:
789
795
  if isinstance(self.max_retries, NotGiven):
@@ -816,9 +822,9 @@ class FinalRequestOptions(pydantic.BaseModel):
816
822
  key: strip_not_given(value)
817
823
  for key, value in values.items()
818
824
  }
819
- if PYDANTIC_V2:
820
- return super().model_construct(_fields_set, **kwargs)
821
- return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
825
+ if PYDANTIC_V1:
826
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
827
+ return super().model_construct(_fields_set, **kwargs)
822
828
 
823
829
  if not TYPE_CHECKING:
824
830
  # type checkers incorrectly complain about this assignment
spitch/_qs.py CHANGED
@@ -4,7 +4,7 @@ from typing import Any, List, Tuple, Union, Mapping, TypeVar
4
4
  from urllib.parse import parse_qs, urlencode
5
5
  from typing_extensions import Literal, get_args
6
6
 
7
- from ._types import NOT_GIVEN, NotGiven, NotGivenOr
7
+ from ._types import NotGiven, not_given
8
8
  from ._utils import flatten
9
9
 
10
10
  _T = TypeVar("_T")
@@ -41,8 +41,8 @@ class Querystring:
41
41
  self,
42
42
  params: Params,
43
43
  *,
44
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
45
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
44
+ array_format: ArrayFormat | NotGiven = not_given,
45
+ nested_format: NestedFormat | NotGiven = not_given,
46
46
  ) -> str:
47
47
  return urlencode(
48
48
  self.stringify_items(
@@ -56,8 +56,8 @@ class Querystring:
56
56
  self,
57
57
  params: Params,
58
58
  *,
59
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
60
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
59
+ array_format: ArrayFormat | NotGiven = not_given,
60
+ nested_format: NestedFormat | NotGiven = not_given,
61
61
  ) -> list[tuple[str, str]]:
62
62
  opts = Options(
63
63
  qs=self,
@@ -143,8 +143,8 @@ class Options:
143
143
  self,
144
144
  qs: Querystring = _qs,
145
145
  *,
146
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
147
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
146
+ array_format: ArrayFormat | NotGiven = not_given,
147
+ nested_format: NestedFormat | NotGiven = not_given,
148
148
  ) -> None:
149
149
  self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
150
150
  self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
spitch/_types.py CHANGED
@@ -13,10 +13,21 @@ from typing import (
13
13
  Mapping,
14
14
  TypeVar,
15
15
  Callable,
16
+ Iterator,
16
17
  Optional,
17
18
  Sequence,
18
19
  )
19
- from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
20
+ from typing_extensions import (
21
+ Set,
22
+ Literal,
23
+ Protocol,
24
+ TypeAlias,
25
+ TypedDict,
26
+ SupportsIndex,
27
+ overload,
28
+ override,
29
+ runtime_checkable,
30
+ )
20
31
 
21
32
  import httpx
22
33
  import pydantic
@@ -106,18 +117,21 @@ class RequestOptions(TypedDict, total=False):
106
117
  # Sentinel class used until PEP 0661 is accepted
107
118
  class NotGiven:
108
119
  """
109
- A sentinel singleton class used to distinguish omitted keyword arguments
110
- from those passed in with the value None (which may have different behavior).
120
+ For parameters with a meaningful None value, we need to distinguish between
121
+ the user explicitly passing None, and the user not passing the parameter at
122
+ all.
123
+
124
+ User code shouldn't need to use not_given directly.
111
125
 
112
126
  For example:
113
127
 
114
128
  ```py
115
- def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
129
+ def create(timeout: Timeout | None | NotGiven = not_given): ...
116
130
 
117
131
 
118
- get(timeout=1) # 1s timeout
119
- get(timeout=None) # No timeout
120
- get() # Default timeout behavior, which may not be statically known at the method definition.
132
+ create(timeout=1) # 1s timeout
133
+ create(timeout=None) # No timeout
134
+ create() # Default timeout behavior
121
135
  ```
122
136
  """
123
137
 
@@ -129,13 +143,14 @@ class NotGiven:
129
143
  return "NOT_GIVEN"
130
144
 
131
145
 
132
- NotGivenOr = Union[_T, NotGiven]
146
+ not_given = NotGiven()
147
+ # for backwards compatibility:
133
148
  NOT_GIVEN = NotGiven()
134
149
 
135
150
 
136
151
  class Omit:
137
- """In certain situations you need to be able to represent a case where a default value has
138
- to be explicitly removed and `None` is not an appropriate substitute, for example:
152
+ """
153
+ To explicitly omit something from being sent in a request, use `omit`.
139
154
 
140
155
  ```py
141
156
  # as the default `Content-Type` header is `application/json` that will be sent
@@ -145,8 +160,8 @@ class Omit:
145
160
  # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
146
161
  client.post(..., headers={"Content-Type": "multipart/form-data"})
147
162
 
148
- # instead you can remove the default `application/json` header by passing Omit
149
- client.post(..., headers={"Content-Type": Omit()})
163
+ # instead you can remove the default `application/json` header by passing omit
164
+ client.post(..., headers={"Content-Type": omit})
150
165
  ```
151
166
  """
152
167
 
@@ -154,6 +169,9 @@ class Omit:
154
169
  return False
155
170
 
156
171
 
172
+ omit = Omit()
173
+
174
+
157
175
  @runtime_checkable
158
176
  class ModelBuilderProtocol(Protocol):
159
177
  @classmethod
@@ -219,3 +237,26 @@ class _GenericAlias(Protocol):
219
237
  class HttpxSendArgs(TypedDict, total=False):
220
238
  auth: httpx.Auth
221
239
  follow_redirects: bool
240
+
241
+
242
+ _T_co = TypeVar("_T_co", covariant=True)
243
+
244
+
245
+ if TYPE_CHECKING:
246
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
247
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
248
+ class SequenceNotStr(Protocol[_T_co]):
249
+ @overload
250
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
251
+ @overload
252
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
253
+ def __contains__(self, value: object, /) -> bool: ...
254
+ def __len__(self) -> int: ...
255
+ def __iter__(self) -> Iterator[_T_co]: ...
256
+ def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ...
257
+ def count(self, value: Any, /) -> int: ...
258
+ def __reversed__(self) -> Iterator[_T_co]: ...
259
+ else:
260
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
261
+ # deserializing our custom sequence type
262
+ SequenceNotStr = Sequence
spitch/_utils/__init__.py CHANGED
@@ -10,7 +10,6 @@ from ._utils import (
10
10
  lru_cache as lru_cache,
11
11
  is_mapping as is_mapping,
12
12
  is_tuple_t as is_tuple_t,
13
- parse_date as parse_date,
14
13
  is_iterable as is_iterable,
15
14
  is_sequence as is_sequence,
16
15
  coerce_float as coerce_float,
@@ -23,7 +22,6 @@ from ._utils import (
23
22
  coerce_boolean as coerce_boolean,
24
23
  coerce_integer as coerce_integer,
25
24
  file_from_path as file_from_path,
26
- parse_datetime as parse_datetime,
27
25
  strip_not_given as strip_not_given,
28
26
  deepcopy_minimal as deepcopy_minimal,
29
27
  get_async_library as get_async_library,
@@ -32,12 +30,20 @@ from ._utils import (
32
30
  maybe_coerce_boolean as maybe_coerce_boolean,
33
31
  maybe_coerce_integer as maybe_coerce_integer,
34
32
  )
33
+ from ._compat import (
34
+ get_args as get_args,
35
+ is_union as is_union,
36
+ get_origin as get_origin,
37
+ is_typeddict as is_typeddict,
38
+ is_literal_type as is_literal_type,
39
+ )
35
40
  from ._typing import (
36
41
  is_list_type as is_list_type,
37
42
  is_union_type as is_union_type,
38
43
  extract_type_arg as extract_type_arg,
39
44
  is_iterable_type as is_iterable_type,
40
45
  is_required_type as is_required_type,
46
+ is_sequence_type as is_sequence_type,
41
47
  is_annotated_type as is_annotated_type,
42
48
  strip_annotated_type as strip_annotated_type,
43
49
  extract_type_var_from_base as extract_type_var_from_base,
@@ -54,3 +60,4 @@ from ._reflection import (
54
60
  function_has_argument as function_has_argument,
55
61
  assert_signatures_in_sync as assert_signatures_in_sync,
56
62
  )
63
+ from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
@@ -0,0 +1,45 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ import typing_extensions
5
+ from typing import Any, Type, Union, Literal, Optional
6
+ from datetime import date, datetime
7
+ from typing_extensions import get_args as _get_args, get_origin as _get_origin
8
+
9
+ from .._types import StrBytesIntFloat
10
+ from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
11
+
12
+ _LITERAL_TYPES = {Literal, typing_extensions.Literal}
13
+
14
+
15
+ def get_args(tp: type[Any]) -> tuple[Any, ...]:
16
+ return _get_args(tp)
17
+
18
+
19
+ def get_origin(tp: type[Any]) -> type[Any] | None:
20
+ return _get_origin(tp)
21
+
22
+
23
+ def is_union(tp: Optional[Type[Any]]) -> bool:
24
+ if sys.version_info < (3, 10):
25
+ return tp is Union # type: ignore[comparison-overlap]
26
+ else:
27
+ import types
28
+
29
+ return tp is Union or tp is types.UnionType
30
+
31
+
32
+ def is_typeddict(tp: Type[Any]) -> bool:
33
+ return typing_extensions.is_typeddict(tp)
34
+
35
+
36
+ def is_literal_type(tp: Type[Any]) -> bool:
37
+ return get_origin(tp) in _LITERAL_TYPES
38
+
39
+
40
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
41
+ return _parse_date(value)
42
+
43
+
44
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
45
+ return _parse_datetime(value)
@@ -0,0 +1,136 @@
1
+ """
2
+ This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
3
+ without the Pydantic v1 specific errors.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import re
9
+ from typing import Dict, Union, Optional
10
+ from datetime import date, datetime, timezone, timedelta
11
+
12
+ from .._types import StrBytesIntFloat
13
+
14
+ date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
15
+ time_expr = (
16
+ r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
17
+ r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
18
+ r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
19
+ )
20
+
21
+ date_re = re.compile(f"{date_expr}$")
22
+ datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
23
+
24
+
25
+ EPOCH = datetime(1970, 1, 1)
26
+ # if greater than this, the number is in ms, if less than or equal it's in seconds
27
+ # (in seconds this is 11th October 2603, in ms it's 20th August 1970)
28
+ MS_WATERSHED = int(2e10)
29
+ # slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
30
+ MAX_NUMBER = int(3e20)
31
+
32
+
33
+ def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
34
+ if isinstance(value, (int, float)):
35
+ return value
36
+ try:
37
+ return float(value)
38
+ except ValueError:
39
+ return None
40
+ except TypeError:
41
+ raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
42
+
43
+
44
+ def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
45
+ if seconds > MAX_NUMBER:
46
+ return datetime.max
47
+ elif seconds < -MAX_NUMBER:
48
+ return datetime.min
49
+
50
+ while abs(seconds) > MS_WATERSHED:
51
+ seconds /= 1000
52
+ dt = EPOCH + timedelta(seconds=seconds)
53
+ return dt.replace(tzinfo=timezone.utc)
54
+
55
+
56
+ def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
57
+ if value == "Z":
58
+ return timezone.utc
59
+ elif value is not None:
60
+ offset_mins = int(value[-2:]) if len(value) > 3 else 0
61
+ offset = 60 * int(value[1:3]) + offset_mins
62
+ if value[0] == "-":
63
+ offset = -offset
64
+ return timezone(timedelta(minutes=offset))
65
+ else:
66
+ return None
67
+
68
+
69
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
70
+ """
71
+ Parse a datetime/int/float/string and return a datetime.datetime.
72
+
73
+ This function supports time zone offsets. When the input contains one,
74
+ the output uses a timezone with a fixed offset from UTC.
75
+
76
+ Raise ValueError if the input is well formatted but not a valid datetime.
77
+ Raise ValueError if the input isn't well formatted.
78
+ """
79
+ if isinstance(value, datetime):
80
+ return value
81
+
82
+ number = _get_numeric(value, "datetime")
83
+ if number is not None:
84
+ return _from_unix_seconds(number)
85
+
86
+ if isinstance(value, bytes):
87
+ value = value.decode()
88
+
89
+ assert not isinstance(value, (float, int))
90
+
91
+ match = datetime_re.match(value)
92
+ if match is None:
93
+ raise ValueError("invalid datetime format")
94
+
95
+ kw = match.groupdict()
96
+ if kw["microsecond"]:
97
+ kw["microsecond"] = kw["microsecond"].ljust(6, "0")
98
+
99
+ tzinfo = _parse_timezone(kw.pop("tzinfo"))
100
+ kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
101
+ kw_["tzinfo"] = tzinfo
102
+
103
+ return datetime(**kw_) # type: ignore
104
+
105
+
106
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
107
+ """
108
+ Parse a date/int/float/string and return a datetime.date.
109
+
110
+ Raise ValueError if the input is well formatted but not a valid date.
111
+ Raise ValueError if the input isn't well formatted.
112
+ """
113
+ if isinstance(value, date):
114
+ if isinstance(value, datetime):
115
+ return value.date()
116
+ else:
117
+ return value
118
+
119
+ number = _get_numeric(value, "date")
120
+ if number is not None:
121
+ return _from_unix_seconds(number).date()
122
+
123
+ if isinstance(value, bytes):
124
+ value = value.decode()
125
+
126
+ assert not isinstance(value, (float, int))
127
+ match = date_re.match(value)
128
+ if match is None:
129
+ raise ValueError("invalid date format")
130
+
131
+ kw = {k: int(v) for k, v in match.groupdict().items()}
132
+
133
+ try:
134
+ return date(**kw)
135
+ except ValueError:
136
+ raise ValueError("invalid date format") from None