isaacus 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. isaacus/__init__.py +3 -1
  2. isaacus/_base_client.py +40 -12
  3. isaacus/_client.py +17 -9
  4. isaacus/_compat.py +48 -48
  5. isaacus/_files.py +4 -4
  6. isaacus/_models.py +51 -45
  7. isaacus/_qs.py +7 -7
  8. isaacus/_types.py +53 -12
  9. isaacus/_utils/__init__.py +9 -2
  10. isaacus/_utils/_compat.py +45 -0
  11. isaacus/_utils/_datetime_parse.py +136 -0
  12. isaacus/_utils/_transform.py +13 -3
  13. isaacus/_utils/_typing.py +6 -1
  14. isaacus/_utils/_utils.py +4 -5
  15. isaacus/_version.py +1 -1
  16. isaacus/resources/__init__.py +14 -0
  17. isaacus/resources/classifications/universal.py +17 -17
  18. isaacus/resources/embeddings.py +246 -0
  19. isaacus/resources/extractions/qa.py +23 -21
  20. isaacus/resources/rerankings.py +19 -19
  21. isaacus/types/__init__.py +3 -1
  22. isaacus/types/classifications/__init__.py +1 -1
  23. isaacus/types/classifications/{universal_classification.py → universal_classification_response.py} +2 -2
  24. isaacus/types/classifications/universal_create_params.py +4 -2
  25. isaacus/types/embedding_create_params.py +49 -0
  26. isaacus/types/embedding_response.py +31 -0
  27. isaacus/types/extractions/__init__.py +1 -1
  28. isaacus/types/extractions/{answer_extraction.py → answer_extraction_response.py} +2 -2
  29. isaacus/types/extractions/qa_create_params.py +7 -4
  30. isaacus/types/reranking_create_params.py +4 -2
  31. isaacus/types/{reranking.py → reranking_response.py} +2 -2
  32. {isaacus-0.8.0.dist-info → isaacus-0.9.0.dist-info}/METADATA +53 -40
  33. isaacus-0.9.0.dist-info/RECORD +52 -0
  34. isaacus-0.8.0.dist-info/RECORD +0 -47
  35. {isaacus-0.8.0.dist-info → isaacus-0.9.0.dist-info}/WHEEL +0 -0
  36. {isaacus-0.8.0.dist-info → isaacus-0.9.0.dist-info}/licenses/LICENSE +0 -0
isaacus/_models.py CHANGED
@@ -50,7 +50,7 @@ from ._utils import (
50
50
  strip_annotated_type,
51
51
  )
52
52
  from ._compat import (
53
- PYDANTIC_V2,
53
+ PYDANTIC_V1,
54
54
  ConfigDict,
55
55
  GenericModel as BaseGenericModel,
56
56
  get_args,
@@ -81,11 +81,7 @@ class _ConfigProtocol(Protocol):
81
81
 
82
82
 
83
83
  class BaseModel(pydantic.BaseModel):
84
- if PYDANTIC_V2:
85
- model_config: ClassVar[ConfigDict] = ConfigDict(
86
- extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
87
- )
88
- else:
84
+ if PYDANTIC_V1:
89
85
 
90
86
  @property
91
87
  @override
@@ -95,6 +91,10 @@ class BaseModel(pydantic.BaseModel):
95
91
 
96
92
  class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
97
93
  extra: Any = pydantic.Extra.allow # type: ignore
94
+ else:
95
+ model_config: ClassVar[ConfigDict] = ConfigDict(
96
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
97
+ )
98
98
 
99
99
  def to_dict(
100
100
  self,
@@ -215,25 +215,25 @@ class BaseModel(pydantic.BaseModel):
215
215
  if key not in model_fields:
216
216
  parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
217
217
 
218
- if PYDANTIC_V2:
219
- _extra[key] = parsed
220
- else:
218
+ if PYDANTIC_V1:
221
219
  _fields_set.add(key)
222
220
  fields_values[key] = parsed
221
+ else:
222
+ _extra[key] = parsed
223
223
 
224
224
  object.__setattr__(m, "__dict__", fields_values)
225
225
 
226
- if PYDANTIC_V2:
227
- # these properties are copied from Pydantic's `model_construct()` method
228
- object.__setattr__(m, "__pydantic_private__", None)
229
- object.__setattr__(m, "__pydantic_extra__", _extra)
230
- object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
231
- else:
226
+ if PYDANTIC_V1:
232
227
  # init_private_attributes() does not exist in v2
233
228
  m._init_private_attributes() # type: ignore
234
229
 
235
230
  # copied from Pydantic v1's `construct()` method
236
231
  object.__setattr__(m, "__fields_set__", _fields_set)
232
+ else:
233
+ # these properties are copied from Pydantic's `model_construct()` method
234
+ object.__setattr__(m, "__pydantic_private__", None)
235
+ object.__setattr__(m, "__pydantic_extra__", _extra)
236
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
237
237
 
238
238
  return m
239
239
 
@@ -243,7 +243,7 @@ class BaseModel(pydantic.BaseModel):
243
243
  # although not in practice
244
244
  model_construct = construct
245
245
 
246
- if not PYDANTIC_V2:
246
+ if PYDANTIC_V1:
247
247
  # we define aliases for some of the new pydantic v2 methods so
248
248
  # that we can just document these methods without having to specify
249
249
  # a specific pydantic version as some users may not know which
@@ -256,7 +256,7 @@ class BaseModel(pydantic.BaseModel):
256
256
  mode: Literal["json", "python"] | str = "python",
257
257
  include: IncEx | None = None,
258
258
  exclude: IncEx | None = None,
259
- by_alias: bool = False,
259
+ by_alias: bool | None = None,
260
260
  exclude_unset: bool = False,
261
261
  exclude_defaults: bool = False,
262
262
  exclude_none: bool = False,
@@ -264,6 +264,7 @@ class BaseModel(pydantic.BaseModel):
264
264
  warnings: bool | Literal["none", "warn", "error"] = True,
265
265
  context: dict[str, Any] | None = None,
266
266
  serialize_as_any: bool = False,
267
+ fallback: Callable[[Any], Any] | None = None,
267
268
  ) -> dict[str, Any]:
268
269
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
269
270
 
@@ -295,16 +296,18 @@ class BaseModel(pydantic.BaseModel):
295
296
  raise ValueError("context is only supported in Pydantic v2")
296
297
  if serialize_as_any != False:
297
298
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
299
+ if fallback is not None:
300
+ raise ValueError("fallback is only supported in Pydantic v2")
298
301
  dumped = super().dict( # pyright: ignore[reportDeprecated]
299
302
  include=include,
300
303
  exclude=exclude,
301
- by_alias=by_alias,
304
+ by_alias=by_alias if by_alias is not None else False,
302
305
  exclude_unset=exclude_unset,
303
306
  exclude_defaults=exclude_defaults,
304
307
  exclude_none=exclude_none,
305
308
  )
306
309
 
307
- return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
310
+ return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped
308
311
 
309
312
  @override
310
313
  def model_dump_json(
@@ -313,13 +316,14 @@ class BaseModel(pydantic.BaseModel):
313
316
  indent: int | None = None,
314
317
  include: IncEx | None = None,
315
318
  exclude: IncEx | None = None,
316
- by_alias: bool = False,
319
+ by_alias: bool | None = None,
317
320
  exclude_unset: bool = False,
318
321
  exclude_defaults: bool = False,
319
322
  exclude_none: bool = False,
320
323
  round_trip: bool = False,
321
324
  warnings: bool | Literal["none", "warn", "error"] = True,
322
325
  context: dict[str, Any] | None = None,
326
+ fallback: Callable[[Any], Any] | None = None,
323
327
  serialize_as_any: bool = False,
324
328
  ) -> str:
325
329
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
@@ -348,11 +352,13 @@ class BaseModel(pydantic.BaseModel):
348
352
  raise ValueError("context is only supported in Pydantic v2")
349
353
  if serialize_as_any != False:
350
354
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
355
+ if fallback is not None:
356
+ raise ValueError("fallback is only supported in Pydantic v2")
351
357
  return super().json( # type: ignore[reportDeprecated]
352
358
  indent=indent,
353
359
  include=include,
354
360
  exclude=exclude,
355
- by_alias=by_alias,
361
+ by_alias=by_alias if by_alias is not None else False,
356
362
  exclude_unset=exclude_unset,
357
363
  exclude_defaults=exclude_defaults,
358
364
  exclude_none=exclude_none,
@@ -363,10 +369,10 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
363
369
  if value is None:
364
370
  return field_get_default(field)
365
371
 
366
- if PYDANTIC_V2:
367
- type_ = field.annotation
368
- else:
372
+ if PYDANTIC_V1:
369
373
  type_ = cast(type, field.outer_type_) # type: ignore
374
+ else:
375
+ type_ = field.annotation # type: ignore
370
376
 
371
377
  if type_ is None:
372
378
  raise RuntimeError(f"Unexpected field type is None for {key}")
@@ -375,7 +381,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
375
381
 
376
382
 
377
383
  def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
378
- if not PYDANTIC_V2:
384
+ if PYDANTIC_V1:
379
385
  # TODO
380
386
  return None
381
387
 
@@ -628,30 +634,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
628
634
  for variant in get_args(union):
629
635
  variant = strip_annotated_type(variant)
630
636
  if is_basemodel_type(variant):
631
- if PYDANTIC_V2:
632
- field = _extract_field_schema_pv2(variant, discriminator_field_name)
633
- if not field:
637
+ if PYDANTIC_V1:
638
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
639
+ if not field_info:
634
640
  continue
635
641
 
636
642
  # Note: if one variant defines an alias then they all should
637
- discriminator_alias = field.get("serialization_alias")
638
-
639
- field_schema = field["schema"]
643
+ discriminator_alias = field_info.alias
640
644
 
641
- if field_schema["type"] == "literal":
642
- for entry in cast("LiteralSchema", field_schema)["expected"]:
645
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
646
+ for entry in get_args(annotation):
643
647
  if isinstance(entry, str):
644
648
  mapping[entry] = variant
645
649
  else:
646
- field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
647
- if not field_info:
650
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
651
+ if not field:
648
652
  continue
649
653
 
650
654
  # Note: if one variant defines an alias then they all should
651
- discriminator_alias = field_info.alias
655
+ discriminator_alias = field.get("serialization_alias")
652
656
 
653
- if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
654
- for entry in get_args(annotation):
657
+ field_schema = field["schema"]
658
+
659
+ if field_schema["type"] == "literal":
660
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
655
661
  if isinstance(entry, str):
656
662
  mapping[entry] = variant
657
663
 
@@ -714,7 +720,7 @@ else:
714
720
  pass
715
721
 
716
722
 
717
- if PYDANTIC_V2:
723
+ if not PYDANTIC_V1:
718
724
  from pydantic import TypeAdapter as _TypeAdapter
719
725
 
720
726
  _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
@@ -782,12 +788,12 @@ class FinalRequestOptions(pydantic.BaseModel):
782
788
  json_data: Union[Body, None] = None
783
789
  extra_json: Union[AnyMapping, None] = None
784
790
 
785
- if PYDANTIC_V2:
786
- model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
787
- else:
791
+ if PYDANTIC_V1:
788
792
 
789
793
  class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
790
794
  arbitrary_types_allowed: bool = True
795
+ else:
796
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
791
797
 
792
798
  def get_max_retries(self, max_retries: int) -> int:
793
799
  if isinstance(self.max_retries, NotGiven):
@@ -820,9 +826,9 @@ class FinalRequestOptions(pydantic.BaseModel):
820
826
  key: strip_not_given(value)
821
827
  for key, value in values.items()
822
828
  }
823
- if PYDANTIC_V2:
824
- return super().model_construct(_fields_set, **kwargs)
825
- return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
829
+ if PYDANTIC_V1:
830
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
831
+ return super().model_construct(_fields_set, **kwargs)
826
832
 
827
833
  if not TYPE_CHECKING:
828
834
  # type checkers incorrectly complain about this assignment
isaacus/_qs.py CHANGED
@@ -4,7 +4,7 @@ from typing import Any, List, Tuple, Union, Mapping, TypeVar
4
4
  from urllib.parse import parse_qs, urlencode
5
5
  from typing_extensions import Literal, get_args
6
6
 
7
- from ._types import NOT_GIVEN, NotGiven, NotGivenOr
7
+ from ._types import NotGiven, not_given
8
8
  from ._utils import flatten
9
9
 
10
10
  _T = TypeVar("_T")
@@ -41,8 +41,8 @@ class Querystring:
41
41
  self,
42
42
  params: Params,
43
43
  *,
44
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
45
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
44
+ array_format: ArrayFormat | NotGiven = not_given,
45
+ nested_format: NestedFormat | NotGiven = not_given,
46
46
  ) -> str:
47
47
  return urlencode(
48
48
  self.stringify_items(
@@ -56,8 +56,8 @@ class Querystring:
56
56
  self,
57
57
  params: Params,
58
58
  *,
59
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
60
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
59
+ array_format: ArrayFormat | NotGiven = not_given,
60
+ nested_format: NestedFormat | NotGiven = not_given,
61
61
  ) -> list[tuple[str, str]]:
62
62
  opts = Options(
63
63
  qs=self,
@@ -143,8 +143,8 @@ class Options:
143
143
  self,
144
144
  qs: Querystring = _qs,
145
145
  *,
146
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
147
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
146
+ array_format: ArrayFormat | NotGiven = not_given,
147
+ nested_format: NestedFormat | NotGiven = not_given,
148
148
  ) -> None:
149
149
  self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
150
150
  self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
isaacus/_types.py CHANGED
@@ -13,10 +13,21 @@ from typing import (
13
13
  Mapping,
14
14
  TypeVar,
15
15
  Callable,
16
+ Iterator,
16
17
  Optional,
17
18
  Sequence,
18
19
  )
19
- from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
20
+ from typing_extensions import (
21
+ Set,
22
+ Literal,
23
+ Protocol,
24
+ TypeAlias,
25
+ TypedDict,
26
+ SupportsIndex,
27
+ overload,
28
+ override,
29
+ runtime_checkable,
30
+ )
20
31
 
21
32
  import httpx
22
33
  import pydantic
@@ -106,18 +117,21 @@ class RequestOptions(TypedDict, total=False):
106
117
  # Sentinel class used until PEP 0661 is accepted
107
118
  class NotGiven:
108
119
  """
109
- A sentinel singleton class used to distinguish omitted keyword arguments
110
- from those passed in with the value None (which may have different behavior).
120
+ For parameters with a meaningful None value, we need to distinguish between
121
+ the user explicitly passing None, and the user not passing the parameter at
122
+ all.
123
+
124
+ User code shouldn't need to use not_given directly.
111
125
 
112
126
  For example:
113
127
 
114
128
  ```py
115
- def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
129
+ def create(timeout: Timeout | None | NotGiven = not_given): ...
116
130
 
117
131
 
118
- get(timeout=1) # 1s timeout
119
- get(timeout=None) # No timeout
120
- get() # Default timeout behavior, which may not be statically known at the method definition.
132
+ create(timeout=1) # 1s timeout
133
+ create(timeout=None) # No timeout
134
+ create() # Default timeout behavior
121
135
  ```
122
136
  """
123
137
 
@@ -129,13 +143,14 @@ class NotGiven:
129
143
  return "NOT_GIVEN"
130
144
 
131
145
 
132
- NotGivenOr = Union[_T, NotGiven]
146
+ not_given = NotGiven()
147
+ # for backwards compatibility:
133
148
  NOT_GIVEN = NotGiven()
134
149
 
135
150
 
136
151
  class Omit:
137
- """In certain situations you need to be able to represent a case where a default value has
138
- to be explicitly removed and `None` is not an appropriate substitute, for example:
152
+ """
153
+ To explicitly omit something from being sent in a request, use `omit`.
139
154
 
140
155
  ```py
141
156
  # as the default `Content-Type` header is `application/json` that will be sent
@@ -145,8 +160,8 @@ class Omit:
145
160
  # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
146
161
  client.post(..., headers={"Content-Type": "multipart/form-data"})
147
162
 
148
- # instead you can remove the default `application/json` header by passing Omit
149
- client.post(..., headers={"Content-Type": Omit()})
163
+ # instead you can remove the default `application/json` header by passing omit
164
+ client.post(..., headers={"Content-Type": omit})
150
165
  ```
151
166
  """
152
167
 
@@ -154,6 +169,9 @@ class Omit:
154
169
  return False
155
170
 
156
171
 
172
+ omit = Omit()
173
+
174
+
157
175
  @runtime_checkable
158
176
  class ModelBuilderProtocol(Protocol):
159
177
  @classmethod
@@ -217,3 +235,26 @@ class _GenericAlias(Protocol):
217
235
  class HttpxSendArgs(TypedDict, total=False):
218
236
  auth: httpx.Auth
219
237
  follow_redirects: bool
238
+
239
+
240
+ _T_co = TypeVar("_T_co", covariant=True)
241
+
242
+
243
+ if TYPE_CHECKING:
244
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
245
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
246
+ class SequenceNotStr(Protocol[_T_co]):
247
+ @overload
248
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
249
+ @overload
250
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
251
+ def __contains__(self, value: object, /) -> bool: ...
252
+ def __len__(self) -> int: ...
253
+ def __iter__(self) -> Iterator[_T_co]: ...
254
+ def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ...
255
+ def count(self, value: Any, /) -> int: ...
256
+ def __reversed__(self) -> Iterator[_T_co]: ...
257
+ else:
258
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
259
+ # deserializing our custom sequence type
260
+ SequenceNotStr = Sequence
@@ -10,7 +10,6 @@ from ._utils import (
10
10
  lru_cache as lru_cache,
11
11
  is_mapping as is_mapping,
12
12
  is_tuple_t as is_tuple_t,
13
- parse_date as parse_date,
14
13
  is_iterable as is_iterable,
15
14
  is_sequence as is_sequence,
16
15
  coerce_float as coerce_float,
@@ -23,7 +22,6 @@ from ._utils import (
23
22
  coerce_boolean as coerce_boolean,
24
23
  coerce_integer as coerce_integer,
25
24
  file_from_path as file_from_path,
26
- parse_datetime as parse_datetime,
27
25
  strip_not_given as strip_not_given,
28
26
  deepcopy_minimal as deepcopy_minimal,
29
27
  get_async_library as get_async_library,
@@ -32,12 +30,20 @@ from ._utils import (
32
30
  maybe_coerce_boolean as maybe_coerce_boolean,
33
31
  maybe_coerce_integer as maybe_coerce_integer,
34
32
  )
33
+ from ._compat import (
34
+ get_args as get_args,
35
+ is_union as is_union,
36
+ get_origin as get_origin,
37
+ is_typeddict as is_typeddict,
38
+ is_literal_type as is_literal_type,
39
+ )
35
40
  from ._typing import (
36
41
  is_list_type as is_list_type,
37
42
  is_union_type as is_union_type,
38
43
  extract_type_arg as extract_type_arg,
39
44
  is_iterable_type as is_iterable_type,
40
45
  is_required_type as is_required_type,
46
+ is_sequence_type as is_sequence_type,
41
47
  is_annotated_type as is_annotated_type,
42
48
  is_type_alias_type as is_type_alias_type,
43
49
  strip_annotated_type as strip_annotated_type,
@@ -55,3 +61,4 @@ from ._reflection import (
55
61
  function_has_argument as function_has_argument,
56
62
  assert_signatures_in_sync as assert_signatures_in_sync,
57
63
  )
64
+ from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
@@ -0,0 +1,45 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ import typing_extensions
5
+ from typing import Any, Type, Union, Literal, Optional
6
+ from datetime import date, datetime
7
+ from typing_extensions import get_args as _get_args, get_origin as _get_origin
8
+
9
+ from .._types import StrBytesIntFloat
10
+ from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
11
+
12
+ _LITERAL_TYPES = {Literal, typing_extensions.Literal}
13
+
14
+
15
+ def get_args(tp: type[Any]) -> tuple[Any, ...]:
16
+ return _get_args(tp)
17
+
18
+
19
+ def get_origin(tp: type[Any]) -> type[Any] | None:
20
+ return _get_origin(tp)
21
+
22
+
23
+ def is_union(tp: Optional[Type[Any]]) -> bool:
24
+ if sys.version_info < (3, 10):
25
+ return tp is Union # type: ignore[comparison-overlap]
26
+ else:
27
+ import types
28
+
29
+ return tp is Union or tp is types.UnionType
30
+
31
+
32
+ def is_typeddict(tp: Type[Any]) -> bool:
33
+ return typing_extensions.is_typeddict(tp)
34
+
35
+
36
+ def is_literal_type(tp: Type[Any]) -> bool:
37
+ return get_origin(tp) in _LITERAL_TYPES
38
+
39
+
40
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
41
+ return _parse_date(value)
42
+
43
+
44
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
45
+ return _parse_datetime(value)
@@ -0,0 +1,136 @@
1
+ """
2
+ This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
3
+ without the Pydantic v1 specific errors.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import re
9
+ from typing import Dict, Union, Optional
10
+ from datetime import date, datetime, timezone, timedelta
11
+
12
+ from .._types import StrBytesIntFloat
13
+
14
+ date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
15
+ time_expr = (
16
+ r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
17
+ r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
18
+ r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
19
+ )
20
+
21
+ date_re = re.compile(f"{date_expr}$")
22
+ datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
23
+
24
+
25
+ EPOCH = datetime(1970, 1, 1)
26
+ # if greater than this, the number is in ms, if less than or equal it's in seconds
27
+ # (in seconds this is 11th October 2603, in ms it's 20th August 1970)
28
+ MS_WATERSHED = int(2e10)
29
+ # slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
30
+ MAX_NUMBER = int(3e20)
31
+
32
+
33
+ def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
34
+ if isinstance(value, (int, float)):
35
+ return value
36
+ try:
37
+ return float(value)
38
+ except ValueError:
39
+ return None
40
+ except TypeError:
41
+ raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
42
+
43
+
44
+ def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
45
+ if seconds > MAX_NUMBER:
46
+ return datetime.max
47
+ elif seconds < -MAX_NUMBER:
48
+ return datetime.min
49
+
50
+ while abs(seconds) > MS_WATERSHED:
51
+ seconds /= 1000
52
+ dt = EPOCH + timedelta(seconds=seconds)
53
+ return dt.replace(tzinfo=timezone.utc)
54
+
55
+
56
+ def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
57
+ if value == "Z":
58
+ return timezone.utc
59
+ elif value is not None:
60
+ offset_mins = int(value[-2:]) if len(value) > 3 else 0
61
+ offset = 60 * int(value[1:3]) + offset_mins
62
+ if value[0] == "-":
63
+ offset = -offset
64
+ return timezone(timedelta(minutes=offset))
65
+ else:
66
+ return None
67
+
68
+
69
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
70
+ """
71
+ Parse a datetime/int/float/string and return a datetime.datetime.
72
+
73
+ This function supports time zone offsets. When the input contains one,
74
+ the output uses a timezone with a fixed offset from UTC.
75
+
76
+ Raise ValueError if the input is well formatted but not a valid datetime.
77
+ Raise ValueError if the input isn't well formatted.
78
+ """
79
+ if isinstance(value, datetime):
80
+ return value
81
+
82
+ number = _get_numeric(value, "datetime")
83
+ if number is not None:
84
+ return _from_unix_seconds(number)
85
+
86
+ if isinstance(value, bytes):
87
+ value = value.decode()
88
+
89
+ assert not isinstance(value, (float, int))
90
+
91
+ match = datetime_re.match(value)
92
+ if match is None:
93
+ raise ValueError("invalid datetime format")
94
+
95
+ kw = match.groupdict()
96
+ if kw["microsecond"]:
97
+ kw["microsecond"] = kw["microsecond"].ljust(6, "0")
98
+
99
+ tzinfo = _parse_timezone(kw.pop("tzinfo"))
100
+ kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
101
+ kw_["tzinfo"] = tzinfo
102
+
103
+ return datetime(**kw_) # type: ignore
104
+
105
+
106
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
107
+ """
108
+ Parse a date/int/float/string and return a datetime.date.
109
+
110
+ Raise ValueError if the input is well formatted but not a valid date.
111
+ Raise ValueError if the input isn't well formatted.
112
+ """
113
+ if isinstance(value, date):
114
+ if isinstance(value, datetime):
115
+ return value.date()
116
+ else:
117
+ return value
118
+
119
+ number = _get_numeric(value, "date")
120
+ if number is not None:
121
+ return _from_unix_seconds(number).date()
122
+
123
+ if isinstance(value, bytes):
124
+ value = value.decode()
125
+
126
+ assert not isinstance(value, (float, int))
127
+ match = date_re.match(value)
128
+ if match is None:
129
+ raise ValueError("invalid date format")
130
+
131
+ kw = {k: int(v) for k, v in match.groupdict().items()}
132
+
133
+ try:
134
+ return date(**kw)
135
+ except ValueError:
136
+ raise ValueError("invalid date format") from None