isaacus 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. isaacus/__init__.py +5 -2
  2. isaacus/_base_client.py +86 -15
  3. isaacus/_client.py +17 -9
  4. isaacus/_compat.py +48 -48
  5. isaacus/_files.py +4 -4
  6. isaacus/_models.py +80 -50
  7. isaacus/_qs.py +7 -7
  8. isaacus/_types.py +53 -12
  9. isaacus/_utils/__init__.py +9 -2
  10. isaacus/_utils/_compat.py +45 -0
  11. isaacus/_utils/_datetime_parse.py +136 -0
  12. isaacus/_utils/_transform.py +13 -3
  13. isaacus/_utils/_typing.py +6 -1
  14. isaacus/_utils/_utils.py +4 -5
  15. isaacus/_version.py +1 -1
  16. isaacus/resources/__init__.py +14 -0
  17. isaacus/resources/classifications/universal.py +17 -17
  18. isaacus/resources/embeddings.py +246 -0
  19. isaacus/resources/extractions/qa.py +23 -21
  20. isaacus/resources/rerankings.py +19 -19
  21. isaacus/types/__init__.py +3 -1
  22. isaacus/types/classifications/__init__.py +1 -1
  23. isaacus/types/classifications/{universal_classification.py → universal_classification_response.py} +2 -2
  24. isaacus/types/classifications/universal_create_params.py +4 -2
  25. isaacus/types/embedding_create_params.py +49 -0
  26. isaacus/types/embedding_response.py +31 -0
  27. isaacus/types/extractions/__init__.py +1 -1
  28. isaacus/types/extractions/{answer_extraction.py → answer_extraction_response.py} +2 -2
  29. isaacus/types/extractions/qa_create_params.py +7 -4
  30. isaacus/types/reranking_create_params.py +4 -2
  31. isaacus/types/{reranking.py → reranking_response.py} +2 -2
  32. {isaacus-0.7.0.dist-info → isaacus-0.9.0.dist-info}/METADATA +90 -37
  33. isaacus-0.9.0.dist-info/RECORD +52 -0
  34. isaacus-0.7.0.dist-info/RECORD +0 -47
  35. {isaacus-0.7.0.dist-info → isaacus-0.9.0.dist-info}/WHEEL +0 -0
  36. {isaacus-0.7.0.dist-info → isaacus-0.9.0.dist-info}/licenses/LICENSE +0 -0
isaacus/_models.py CHANGED
@@ -2,9 +2,10 @@ from __future__ import annotations
2
2
 
3
3
  import os
4
4
  import inspect
5
- from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
5
+ from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
6
6
  from datetime import date, datetime
7
7
  from typing_extensions import (
8
+ List,
8
9
  Unpack,
9
10
  Literal,
10
11
  ClassVar,
@@ -49,7 +50,7 @@ from ._utils import (
49
50
  strip_annotated_type,
50
51
  )
51
52
  from ._compat import (
52
- PYDANTIC_V2,
53
+ PYDANTIC_V1,
53
54
  ConfigDict,
54
55
  GenericModel as BaseGenericModel,
55
56
  get_args,
@@ -80,11 +81,7 @@ class _ConfigProtocol(Protocol):
80
81
 
81
82
 
82
83
  class BaseModel(pydantic.BaseModel):
83
- if PYDANTIC_V2:
84
- model_config: ClassVar[ConfigDict] = ConfigDict(
85
- extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
86
- )
87
- else:
84
+ if PYDANTIC_V1:
88
85
 
89
86
  @property
90
87
  @override
@@ -94,6 +91,10 @@ class BaseModel(pydantic.BaseModel):
94
91
 
95
92
  class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
96
93
  extra: Any = pydantic.Extra.allow # type: ignore
94
+ else:
95
+ model_config: ClassVar[ConfigDict] = ConfigDict(
96
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
97
+ )
97
98
 
98
99
  def to_dict(
99
100
  self,
@@ -207,28 +208,32 @@ class BaseModel(pydantic.BaseModel):
207
208
  else:
208
209
  fields_values[name] = field_get_default(field)
209
210
 
211
+ extra_field_type = _get_extra_fields_type(__cls)
212
+
210
213
  _extra = {}
211
214
  for key, value in values.items():
212
215
  if key not in model_fields:
213
- if PYDANTIC_V2:
214
- _extra[key] = value
215
- else:
216
+ parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
217
+
218
+ if PYDANTIC_V1:
216
219
  _fields_set.add(key)
217
- fields_values[key] = value
220
+ fields_values[key] = parsed
221
+ else:
222
+ _extra[key] = parsed
218
223
 
219
224
  object.__setattr__(m, "__dict__", fields_values)
220
225
 
221
- if PYDANTIC_V2:
222
- # these properties are copied from Pydantic's `model_construct()` method
223
- object.__setattr__(m, "__pydantic_private__", None)
224
- object.__setattr__(m, "__pydantic_extra__", _extra)
225
- object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
226
- else:
226
+ if PYDANTIC_V1:
227
227
  # init_private_attributes() does not exist in v2
228
228
  m._init_private_attributes() # type: ignore
229
229
 
230
230
  # copied from Pydantic v1's `construct()` method
231
231
  object.__setattr__(m, "__fields_set__", _fields_set)
232
+ else:
233
+ # these properties are copied from Pydantic's `model_construct()` method
234
+ object.__setattr__(m, "__pydantic_private__", None)
235
+ object.__setattr__(m, "__pydantic_extra__", _extra)
236
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
232
237
 
233
238
  return m
234
239
 
@@ -238,7 +243,7 @@ class BaseModel(pydantic.BaseModel):
238
243
  # although not in practice
239
244
  model_construct = construct
240
245
 
241
- if not PYDANTIC_V2:
246
+ if PYDANTIC_V1:
242
247
  # we define aliases for some of the new pydantic v2 methods so
243
248
  # that we can just document these methods without having to specify
244
249
  # a specific pydantic version as some users may not know which
@@ -251,7 +256,7 @@ class BaseModel(pydantic.BaseModel):
251
256
  mode: Literal["json", "python"] | str = "python",
252
257
  include: IncEx | None = None,
253
258
  exclude: IncEx | None = None,
254
- by_alias: bool = False,
259
+ by_alias: bool | None = None,
255
260
  exclude_unset: bool = False,
256
261
  exclude_defaults: bool = False,
257
262
  exclude_none: bool = False,
@@ -259,6 +264,7 @@ class BaseModel(pydantic.BaseModel):
259
264
  warnings: bool | Literal["none", "warn", "error"] = True,
260
265
  context: dict[str, Any] | None = None,
261
266
  serialize_as_any: bool = False,
267
+ fallback: Callable[[Any], Any] | None = None,
262
268
  ) -> dict[str, Any]:
263
269
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
264
270
 
@@ -290,16 +296,18 @@ class BaseModel(pydantic.BaseModel):
290
296
  raise ValueError("context is only supported in Pydantic v2")
291
297
  if serialize_as_any != False:
292
298
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
299
+ if fallback is not None:
300
+ raise ValueError("fallback is only supported in Pydantic v2")
293
301
  dumped = super().dict( # pyright: ignore[reportDeprecated]
294
302
  include=include,
295
303
  exclude=exclude,
296
- by_alias=by_alias,
304
+ by_alias=by_alias if by_alias is not None else False,
297
305
  exclude_unset=exclude_unset,
298
306
  exclude_defaults=exclude_defaults,
299
307
  exclude_none=exclude_none,
300
308
  )
301
309
 
302
- return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
310
+ return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped
303
311
 
304
312
  @override
305
313
  def model_dump_json(
@@ -308,13 +316,14 @@ class BaseModel(pydantic.BaseModel):
308
316
  indent: int | None = None,
309
317
  include: IncEx | None = None,
310
318
  exclude: IncEx | None = None,
311
- by_alias: bool = False,
319
+ by_alias: bool | None = None,
312
320
  exclude_unset: bool = False,
313
321
  exclude_defaults: bool = False,
314
322
  exclude_none: bool = False,
315
323
  round_trip: bool = False,
316
324
  warnings: bool | Literal["none", "warn", "error"] = True,
317
325
  context: dict[str, Any] | None = None,
326
+ fallback: Callable[[Any], Any] | None = None,
318
327
  serialize_as_any: bool = False,
319
328
  ) -> str:
320
329
  """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
@@ -343,11 +352,13 @@ class BaseModel(pydantic.BaseModel):
343
352
  raise ValueError("context is only supported in Pydantic v2")
344
353
  if serialize_as_any != False:
345
354
  raise ValueError("serialize_as_any is only supported in Pydantic v2")
355
+ if fallback is not None:
356
+ raise ValueError("fallback is only supported in Pydantic v2")
346
357
  return super().json( # type: ignore[reportDeprecated]
347
358
  indent=indent,
348
359
  include=include,
349
360
  exclude=exclude,
350
- by_alias=by_alias,
361
+ by_alias=by_alias if by_alias is not None else False,
351
362
  exclude_unset=exclude_unset,
352
363
  exclude_defaults=exclude_defaults,
353
364
  exclude_none=exclude_none,
@@ -358,15 +369,32 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
358
369
  if value is None:
359
370
  return field_get_default(field)
360
371
 
361
- if PYDANTIC_V2:
362
- type_ = field.annotation
363
- else:
372
+ if PYDANTIC_V1:
364
373
  type_ = cast(type, field.outer_type_) # type: ignore
374
+ else:
375
+ type_ = field.annotation # type: ignore
365
376
 
366
377
  if type_ is None:
367
378
  raise RuntimeError(f"Unexpected field type is None for {key}")
368
379
 
369
- return construct_type(value=value, type_=type_)
380
+ return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None))
381
+
382
+
383
+ def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
384
+ if PYDANTIC_V1:
385
+ # TODO
386
+ return None
387
+
388
+ schema = cls.__pydantic_core_schema__
389
+ if schema["type"] == "model":
390
+ fields = schema["schema"]
391
+ if fields["type"] == "model-fields":
392
+ extras = fields.get("extras_schema")
393
+ if extras and "cls" in extras:
394
+ # mypy can't narrow the type
395
+ return extras["cls"] # type: ignore[no-any-return]
396
+
397
+ return None
370
398
 
371
399
 
372
400
  def is_basemodel(type_: type) -> bool:
@@ -420,7 +448,7 @@ def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
420
448
  return cast(_T, construct_type(value=value, type_=type_))
421
449
 
422
450
 
423
- def construct_type(*, value: object, type_: object) -> object:
451
+ def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object:
424
452
  """Loose coercion to the expected type with construction of nested values.
425
453
 
426
454
  If the given value does not match the expected type then it is returned as-is.
@@ -438,8 +466,10 @@ def construct_type(*, value: object, type_: object) -> object:
438
466
  type_ = type_.__value__ # type: ignore[unreachable]
439
467
 
440
468
  # unwrap `Annotated[T, ...]` -> `T`
441
- if is_annotated_type(type_):
442
- meta: tuple[Any, ...] = get_args(type_)[1:]
469
+ if metadata is not None and len(metadata) > 0:
470
+ meta: tuple[Any, ...] = tuple(metadata)
471
+ elif is_annotated_type(type_):
472
+ meta = get_args(type_)[1:]
443
473
  type_ = extract_type_arg(type_, 0)
444
474
  else:
445
475
  meta = tuple()
@@ -604,30 +634,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
604
634
  for variant in get_args(union):
605
635
  variant = strip_annotated_type(variant)
606
636
  if is_basemodel_type(variant):
607
- if PYDANTIC_V2:
608
- field = _extract_field_schema_pv2(variant, discriminator_field_name)
609
- if not field:
637
+ if PYDANTIC_V1:
638
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
639
+ if not field_info:
610
640
  continue
611
641
 
612
642
  # Note: if one variant defines an alias then they all should
613
- discriminator_alias = field.get("serialization_alias")
614
-
615
- field_schema = field["schema"]
643
+ discriminator_alias = field_info.alias
616
644
 
617
- if field_schema["type"] == "literal":
618
- for entry in cast("LiteralSchema", field_schema)["expected"]:
645
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
646
+ for entry in get_args(annotation):
619
647
  if isinstance(entry, str):
620
648
  mapping[entry] = variant
621
649
  else:
622
- field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
623
- if not field_info:
650
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
651
+ if not field:
624
652
  continue
625
653
 
626
654
  # Note: if one variant defines an alias then they all should
627
- discriminator_alias = field_info.alias
655
+ discriminator_alias = field.get("serialization_alias")
628
656
 
629
- if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
630
- for entry in get_args(annotation):
657
+ field_schema = field["schema"]
658
+
659
+ if field_schema["type"] == "literal":
660
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
631
661
  if isinstance(entry, str):
632
662
  mapping[entry] = variant
633
663
 
@@ -690,7 +720,7 @@ else:
690
720
  pass
691
721
 
692
722
 
693
- if PYDANTIC_V2:
723
+ if not PYDANTIC_V1:
694
724
  from pydantic import TypeAdapter as _TypeAdapter
695
725
 
696
726
  _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
@@ -758,12 +788,12 @@ class FinalRequestOptions(pydantic.BaseModel):
758
788
  json_data: Union[Body, None] = None
759
789
  extra_json: Union[AnyMapping, None] = None
760
790
 
761
- if PYDANTIC_V2:
762
- model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
763
- else:
791
+ if PYDANTIC_V1:
764
792
 
765
793
  class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
766
794
  arbitrary_types_allowed: bool = True
795
+ else:
796
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
767
797
 
768
798
  def get_max_retries(self, max_retries: int) -> int:
769
799
  if isinstance(self.max_retries, NotGiven):
@@ -796,9 +826,9 @@ class FinalRequestOptions(pydantic.BaseModel):
796
826
  key: strip_not_given(value)
797
827
  for key, value in values.items()
798
828
  }
799
- if PYDANTIC_V2:
800
- return super().model_construct(_fields_set, **kwargs)
801
- return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
829
+ if PYDANTIC_V1:
830
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
831
+ return super().model_construct(_fields_set, **kwargs)
802
832
 
803
833
  if not TYPE_CHECKING:
804
834
  # type checkers incorrectly complain about this assignment
isaacus/_qs.py CHANGED
@@ -4,7 +4,7 @@ from typing import Any, List, Tuple, Union, Mapping, TypeVar
4
4
  from urllib.parse import parse_qs, urlencode
5
5
  from typing_extensions import Literal, get_args
6
6
 
7
- from ._types import NOT_GIVEN, NotGiven, NotGivenOr
7
+ from ._types import NotGiven, not_given
8
8
  from ._utils import flatten
9
9
 
10
10
  _T = TypeVar("_T")
@@ -41,8 +41,8 @@ class Querystring:
41
41
  self,
42
42
  params: Params,
43
43
  *,
44
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
45
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
44
+ array_format: ArrayFormat | NotGiven = not_given,
45
+ nested_format: NestedFormat | NotGiven = not_given,
46
46
  ) -> str:
47
47
  return urlencode(
48
48
  self.stringify_items(
@@ -56,8 +56,8 @@ class Querystring:
56
56
  self,
57
57
  params: Params,
58
58
  *,
59
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
60
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
59
+ array_format: ArrayFormat | NotGiven = not_given,
60
+ nested_format: NestedFormat | NotGiven = not_given,
61
61
  ) -> list[tuple[str, str]]:
62
62
  opts = Options(
63
63
  qs=self,
@@ -143,8 +143,8 @@ class Options:
143
143
  self,
144
144
  qs: Querystring = _qs,
145
145
  *,
146
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
147
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
146
+ array_format: ArrayFormat | NotGiven = not_given,
147
+ nested_format: NestedFormat | NotGiven = not_given,
148
148
  ) -> None:
149
149
  self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
150
150
  self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
isaacus/_types.py CHANGED
@@ -13,10 +13,21 @@ from typing import (
13
13
  Mapping,
14
14
  TypeVar,
15
15
  Callable,
16
+ Iterator,
16
17
  Optional,
17
18
  Sequence,
18
19
  )
19
- from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
20
+ from typing_extensions import (
21
+ Set,
22
+ Literal,
23
+ Protocol,
24
+ TypeAlias,
25
+ TypedDict,
26
+ SupportsIndex,
27
+ overload,
28
+ override,
29
+ runtime_checkable,
30
+ )
20
31
 
21
32
  import httpx
22
33
  import pydantic
@@ -106,18 +117,21 @@ class RequestOptions(TypedDict, total=False):
106
117
  # Sentinel class used until PEP 0661 is accepted
107
118
  class NotGiven:
108
119
  """
109
- A sentinel singleton class used to distinguish omitted keyword arguments
110
- from those passed in with the value None (which may have different behavior).
120
+ For parameters with a meaningful None value, we need to distinguish between
121
+ the user explicitly passing None, and the user not passing the parameter at
122
+ all.
123
+
124
+ User code shouldn't need to use not_given directly.
111
125
 
112
126
  For example:
113
127
 
114
128
  ```py
115
- def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
129
+ def create(timeout: Timeout | None | NotGiven = not_given): ...
116
130
 
117
131
 
118
- get(timeout=1) # 1s timeout
119
- get(timeout=None) # No timeout
120
- get() # Default timeout behavior, which may not be statically known at the method definition.
132
+ create(timeout=1) # 1s timeout
133
+ create(timeout=None) # No timeout
134
+ create() # Default timeout behavior
121
135
  ```
122
136
  """
123
137
 
@@ -129,13 +143,14 @@ class NotGiven:
129
143
  return "NOT_GIVEN"
130
144
 
131
145
 
132
- NotGivenOr = Union[_T, NotGiven]
146
+ not_given = NotGiven()
147
+ # for backwards compatibility:
133
148
  NOT_GIVEN = NotGiven()
134
149
 
135
150
 
136
151
  class Omit:
137
- """In certain situations you need to be able to represent a case where a default value has
138
- to be explicitly removed and `None` is not an appropriate substitute, for example:
152
+ """
153
+ To explicitly omit something from being sent in a request, use `omit`.
139
154
 
140
155
  ```py
141
156
  # as the default `Content-Type` header is `application/json` that will be sent
@@ -145,8 +160,8 @@ class Omit:
145
160
  # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
146
161
  client.post(..., headers={"Content-Type": "multipart/form-data"})
147
162
 
148
- # instead you can remove the default `application/json` header by passing Omit
149
- client.post(..., headers={"Content-Type": Omit()})
163
+ # instead you can remove the default `application/json` header by passing omit
164
+ client.post(..., headers={"Content-Type": omit})
150
165
  ```
151
166
  """
152
167
 
@@ -154,6 +169,9 @@ class Omit:
154
169
  return False
155
170
 
156
171
 
172
+ omit = Omit()
173
+
174
+
157
175
  @runtime_checkable
158
176
  class ModelBuilderProtocol(Protocol):
159
177
  @classmethod
@@ -217,3 +235,26 @@ class _GenericAlias(Protocol):
217
235
  class HttpxSendArgs(TypedDict, total=False):
218
236
  auth: httpx.Auth
219
237
  follow_redirects: bool
238
+
239
+
240
+ _T_co = TypeVar("_T_co", covariant=True)
241
+
242
+
243
+ if TYPE_CHECKING:
244
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
245
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
246
+ class SequenceNotStr(Protocol[_T_co]):
247
+ @overload
248
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
249
+ @overload
250
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
251
+ def __contains__(self, value: object, /) -> bool: ...
252
+ def __len__(self) -> int: ...
253
+ def __iter__(self) -> Iterator[_T_co]: ...
254
+ def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ...
255
+ def count(self, value: Any, /) -> int: ...
256
+ def __reversed__(self) -> Iterator[_T_co]: ...
257
+ else:
258
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
259
+ # deserializing our custom sequence type
260
+ SequenceNotStr = Sequence
@@ -10,7 +10,6 @@ from ._utils import (
10
10
  lru_cache as lru_cache,
11
11
  is_mapping as is_mapping,
12
12
  is_tuple_t as is_tuple_t,
13
- parse_date as parse_date,
14
13
  is_iterable as is_iterable,
15
14
  is_sequence as is_sequence,
16
15
  coerce_float as coerce_float,
@@ -23,7 +22,6 @@ from ._utils import (
23
22
  coerce_boolean as coerce_boolean,
24
23
  coerce_integer as coerce_integer,
25
24
  file_from_path as file_from_path,
26
- parse_datetime as parse_datetime,
27
25
  strip_not_given as strip_not_given,
28
26
  deepcopy_minimal as deepcopy_minimal,
29
27
  get_async_library as get_async_library,
@@ -32,12 +30,20 @@ from ._utils import (
32
30
  maybe_coerce_boolean as maybe_coerce_boolean,
33
31
  maybe_coerce_integer as maybe_coerce_integer,
34
32
  )
33
+ from ._compat import (
34
+ get_args as get_args,
35
+ is_union as is_union,
36
+ get_origin as get_origin,
37
+ is_typeddict as is_typeddict,
38
+ is_literal_type as is_literal_type,
39
+ )
35
40
  from ._typing import (
36
41
  is_list_type as is_list_type,
37
42
  is_union_type as is_union_type,
38
43
  extract_type_arg as extract_type_arg,
39
44
  is_iterable_type as is_iterable_type,
40
45
  is_required_type as is_required_type,
46
+ is_sequence_type as is_sequence_type,
41
47
  is_annotated_type as is_annotated_type,
42
48
  is_type_alias_type as is_type_alias_type,
43
49
  strip_annotated_type as strip_annotated_type,
@@ -55,3 +61,4 @@ from ._reflection import (
55
61
  function_has_argument as function_has_argument,
56
62
  assert_signatures_in_sync as assert_signatures_in_sync,
57
63
  )
64
+ from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
@@ -0,0 +1,45 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ import typing_extensions
5
+ from typing import Any, Type, Union, Literal, Optional
6
+ from datetime import date, datetime
7
+ from typing_extensions import get_args as _get_args, get_origin as _get_origin
8
+
9
+ from .._types import StrBytesIntFloat
10
+ from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
11
+
12
+ _LITERAL_TYPES = {Literal, typing_extensions.Literal}
13
+
14
+
15
+ def get_args(tp: type[Any]) -> tuple[Any, ...]:
16
+ return _get_args(tp)
17
+
18
+
19
+ def get_origin(tp: type[Any]) -> type[Any] | None:
20
+ return _get_origin(tp)
21
+
22
+
23
+ def is_union(tp: Optional[Type[Any]]) -> bool:
24
+ if sys.version_info < (3, 10):
25
+ return tp is Union # type: ignore[comparison-overlap]
26
+ else:
27
+ import types
28
+
29
+ return tp is Union or tp is types.UnionType
30
+
31
+
32
+ def is_typeddict(tp: Type[Any]) -> bool:
33
+ return typing_extensions.is_typeddict(tp)
34
+
35
+
36
+ def is_literal_type(tp: Type[Any]) -> bool:
37
+ return get_origin(tp) in _LITERAL_TYPES
38
+
39
+
40
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
41
+ return _parse_date(value)
42
+
43
+
44
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
45
+ return _parse_datetime(value)
@@ -0,0 +1,136 @@
1
+ """
2
+ This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
3
+ without the Pydantic v1 specific errors.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import re
9
+ from typing import Dict, Union, Optional
10
+ from datetime import date, datetime, timezone, timedelta
11
+
12
+ from .._types import StrBytesIntFloat
13
+
14
+ date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
15
+ time_expr = (
16
+ r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
17
+ r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
18
+ r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
19
+ )
20
+
21
+ date_re = re.compile(f"{date_expr}$")
22
+ datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
23
+
24
+
25
+ EPOCH = datetime(1970, 1, 1)
26
+ # if greater than this, the number is in ms, if less than or equal it's in seconds
27
+ # (in seconds this is 11th October 2603, in ms it's 20th August 1970)
28
+ MS_WATERSHED = int(2e10)
29
+ # slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
30
+ MAX_NUMBER = int(3e20)
31
+
32
+
33
+ def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
34
+ if isinstance(value, (int, float)):
35
+ return value
36
+ try:
37
+ return float(value)
38
+ except ValueError:
39
+ return None
40
+ except TypeError:
41
+ raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
42
+
43
+
44
+ def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
45
+ if seconds > MAX_NUMBER:
46
+ return datetime.max
47
+ elif seconds < -MAX_NUMBER:
48
+ return datetime.min
49
+
50
+ while abs(seconds) > MS_WATERSHED:
51
+ seconds /= 1000
52
+ dt = EPOCH + timedelta(seconds=seconds)
53
+ return dt.replace(tzinfo=timezone.utc)
54
+
55
+
56
+ def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
57
+ if value == "Z":
58
+ return timezone.utc
59
+ elif value is not None:
60
+ offset_mins = int(value[-2:]) if len(value) > 3 else 0
61
+ offset = 60 * int(value[1:3]) + offset_mins
62
+ if value[0] == "-":
63
+ offset = -offset
64
+ return timezone(timedelta(minutes=offset))
65
+ else:
66
+ return None
67
+
68
+
69
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
70
+ """
71
+ Parse a datetime/int/float/string and return a datetime.datetime.
72
+
73
+ This function supports time zone offsets. When the input contains one,
74
+ the output uses a timezone with a fixed offset from UTC.
75
+
76
+ Raise ValueError if the input is well formatted but not a valid datetime.
77
+ Raise ValueError if the input isn't well formatted.
78
+ """
79
+ if isinstance(value, datetime):
80
+ return value
81
+
82
+ number = _get_numeric(value, "datetime")
83
+ if number is not None:
84
+ return _from_unix_seconds(number)
85
+
86
+ if isinstance(value, bytes):
87
+ value = value.decode()
88
+
89
+ assert not isinstance(value, (float, int))
90
+
91
+ match = datetime_re.match(value)
92
+ if match is None:
93
+ raise ValueError("invalid datetime format")
94
+
95
+ kw = match.groupdict()
96
+ if kw["microsecond"]:
97
+ kw["microsecond"] = kw["microsecond"].ljust(6, "0")
98
+
99
+ tzinfo = _parse_timezone(kw.pop("tzinfo"))
100
+ kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
101
+ kw_["tzinfo"] = tzinfo
102
+
103
+ return datetime(**kw_) # type: ignore
104
+
105
+
106
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
107
+ """
108
+ Parse a date/int/float/string and return a datetime.date.
109
+
110
+ Raise ValueError if the input is well formatted but not a valid date.
111
+ Raise ValueError if the input isn't well formatted.
112
+ """
113
+ if isinstance(value, date):
114
+ if isinstance(value, datetime):
115
+ return value.date()
116
+ else:
117
+ return value
118
+
119
+ number = _get_numeric(value, "date")
120
+ if number is not None:
121
+ return _from_unix_seconds(number).date()
122
+
123
+ if isinstance(value, bytes):
124
+ value = value.decode()
125
+
126
+ assert not isinstance(value, (float, int))
127
+ match = date_re.match(value)
128
+ if match is None:
129
+ raise ValueError("invalid date format")
130
+
131
+ kw = {k: int(v) for k, v in match.groupdict().items()}
132
+
133
+ try:
134
+ return date(**kw)
135
+ except ValueError:
136
+ raise ValueError("invalid date format") from None