google-genai 1.8.0__tar.gz → 1.10.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {google_genai-1.8.0/google_genai.egg-info → google_genai-1.10.0}/PKG-INFO +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_api_client.py +117 -28
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_automatic_function_calling_util.py +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_extra_utils.py +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_replay_api_client.py +32 -8
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_transformers.py +101 -61
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/batches.py +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/caches.py +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/errors.py +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/files.py +23 -7
- google_genai-1.10.0/google/genai/live.py +1875 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/models.py +24 -10
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/operations.py +18 -10
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/tunings.py +1 -4
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/types.py +742 -81
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/version.py +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0/google_genai.egg-info}/PKG-INFO +1 -1
- {google_genai-1.8.0 → google_genai-1.10.0}/pyproject.toml +2 -2
- google_genai-1.8.0/google/genai/live.py +0 -922
- {google_genai-1.8.0 → google_genai-1.10.0}/LICENSE +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/MANIFEST.in +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/README.md +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/__init__.py +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_api_module.py +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_common.py +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/_test_api_client.py +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/chats.py +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/client.py +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google/genai/pagers.py +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google_genai.egg-info/SOURCES.txt +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google_genai.egg-info/dependency_links.txt +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google_genai.egg-info/requires.txt +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/google_genai.egg-info/top_level.txt +0 -0
- {google_genai-1.8.0 → google_genai-1.10.0}/setup.cfg +0 -0
@@ -19,18 +19,21 @@
|
|
19
19
|
The BaseApiClient is intended to be a private module and is subject to change.
|
20
20
|
"""
|
21
21
|
|
22
|
-
import anyio
|
23
22
|
import asyncio
|
24
23
|
import copy
|
25
24
|
from dataclasses import dataclass
|
26
25
|
import datetime
|
26
|
+
import http
|
27
27
|
import io
|
28
28
|
import json
|
29
29
|
import logging
|
30
|
+
import math
|
30
31
|
import os
|
31
32
|
import sys
|
33
|
+
import time
|
32
34
|
from typing import Any, AsyncIterator, Optional, Tuple, Union
|
33
35
|
from urllib.parse import urlparse, urlunparse
|
36
|
+
import anyio
|
34
37
|
import google.auth
|
35
38
|
import google.auth.credentials
|
36
39
|
from google.auth.credentials import Credentials
|
@@ -95,6 +98,14 @@ def _patch_http_options(
|
|
95
98
|
return copy_option
|
96
99
|
|
97
100
|
|
101
|
+
def _populate_server_timeout_header(
|
102
|
+
headers: dict[str, str], timeout_in_seconds: Optional[Union[float, int]]
|
103
|
+
) -> None:
|
104
|
+
"""Populates the server timeout header in the headers dict."""
|
105
|
+
if timeout_in_seconds and 'X-Server-Timeout' not in headers:
|
106
|
+
headers['X-Server-Timeout'] = str(math.ceil(timeout_in_seconds))
|
107
|
+
|
108
|
+
|
98
109
|
def _join_url_path(base_url: str, path: str) -> str:
|
99
110
|
parsed_base = urlparse(base_url)
|
100
111
|
base_path = (
|
@@ -128,6 +139,19 @@ def _refresh_auth(credentials: Credentials) -> Credentials:
|
|
128
139
|
return credentials
|
129
140
|
|
130
141
|
|
142
|
+
def _get_timeout_in_seconds(
|
143
|
+
timeout: Optional[Union[float, int]],
|
144
|
+
) -> Optional[float]:
|
145
|
+
"""Converts the timeout to seconds."""
|
146
|
+
if timeout:
|
147
|
+
# HttpOptions.timeout is in milliseconds. But httpx.Client.request()
|
148
|
+
# expects seconds.
|
149
|
+
timeout_in_seconds = timeout / 1000.0
|
150
|
+
else:
|
151
|
+
timeout_in_seconds = None
|
152
|
+
return timeout_in_seconds
|
153
|
+
|
154
|
+
|
131
155
|
@dataclass
|
132
156
|
class HttpRequest:
|
133
157
|
headers: dict[str, str]
|
@@ -520,18 +544,13 @@ class BaseApiClient:
|
|
520
544
|
versioned_path,
|
521
545
|
)
|
522
546
|
|
523
|
-
timeout_in_seconds
|
524
|
-
patched_http_options.timeout
|
525
|
-
)
|
526
|
-
if timeout_in_seconds:
|
527
|
-
# HttpOptions.timeout is in milliseconds. But httpx.Client.request()
|
528
|
-
# expects seconds.
|
529
|
-
timeout_in_seconds = timeout_in_seconds / 1000.0
|
530
|
-
else:
|
531
|
-
timeout_in_seconds = None
|
547
|
+
timeout_in_seconds = _get_timeout_in_seconds(patched_http_options.timeout)
|
532
548
|
|
533
549
|
if patched_http_options.headers is None:
|
534
550
|
raise ValueError('Request headers must be set.')
|
551
|
+
_populate_server_timeout_header(
|
552
|
+
patched_http_options.headers, timeout_in_seconds
|
553
|
+
)
|
535
554
|
return HttpRequest(
|
536
555
|
method=http_method,
|
537
556
|
url=url,
|
@@ -712,7 +731,12 @@ class BaseApiClient:
|
|
712
731
|
return async_generator()
|
713
732
|
|
714
733
|
def upload_file(
|
715
|
-
self,
|
734
|
+
self,
|
735
|
+
file_path: Union[str, io.IOBase],
|
736
|
+
upload_url: str,
|
737
|
+
upload_size: int,
|
738
|
+
*,
|
739
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
716
740
|
) -> HttpResponse:
|
717
741
|
"""Transfers a file to the given URL.
|
718
742
|
|
@@ -723,18 +747,28 @@ class BaseApiClient:
|
|
723
747
|
upload_url: The URL to upload the file to.
|
724
748
|
upload_size: The size of file content to be uploaded, this will have to
|
725
749
|
match the size requested in the resumable upload request.
|
750
|
+
http_options: The http options to use for the request.
|
726
751
|
|
727
752
|
returns:
|
728
753
|
The HttpResponse object from the finalize request.
|
729
754
|
"""
|
730
755
|
if isinstance(file_path, io.IOBase):
|
731
|
-
return self._upload_fd(
|
756
|
+
return self._upload_fd(
|
757
|
+
file_path, upload_url, upload_size, http_options=http_options
|
758
|
+
)
|
732
759
|
else:
|
733
760
|
with open(file_path, 'rb') as file:
|
734
|
-
return self._upload_fd(
|
761
|
+
return self._upload_fd(
|
762
|
+
file, upload_url, upload_size, http_options=http_options
|
763
|
+
)
|
735
764
|
|
736
765
|
def _upload_fd(
|
737
|
-
self,
|
766
|
+
self,
|
767
|
+
file: io.IOBase,
|
768
|
+
upload_url: str,
|
769
|
+
upload_size: int,
|
770
|
+
*,
|
771
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
738
772
|
) -> HttpResponse:
|
739
773
|
"""Transfers a file to the given URL.
|
740
774
|
|
@@ -743,6 +777,7 @@ class BaseApiClient:
|
|
743
777
|
upload_url: The URL to upload the file to.
|
744
778
|
upload_size: The size of file content to be uploaded, this will have to
|
745
779
|
match the size requested in the resumable upload request.
|
780
|
+
http_options: The http options to use for the request.
|
746
781
|
|
747
782
|
returns:
|
748
783
|
The HttpResponse object from the finalize request.
|
@@ -758,15 +793,32 @@ class BaseApiClient:
|
|
758
793
|
# If last chunk, finalize the upload.
|
759
794
|
if chunk_size + offset >= upload_size:
|
760
795
|
upload_command += ', finalize'
|
796
|
+
http_options = http_options if http_options else self._http_options
|
797
|
+
timeout = (
|
798
|
+
http_options.get('timeout')
|
799
|
+
if isinstance(http_options, dict)
|
800
|
+
else http_options.timeout
|
801
|
+
)
|
802
|
+
if timeout is None:
|
803
|
+
# Per request timeout is not configured. Check the global timeout.
|
804
|
+
timeout = (
|
805
|
+
self._http_options.timeout
|
806
|
+
if isinstance(self._http_options, dict)
|
807
|
+
else self._http_options.timeout
|
808
|
+
)
|
809
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
810
|
+
upload_headers = {
|
811
|
+
'X-Goog-Upload-Command': upload_command,
|
812
|
+
'X-Goog-Upload-Offset': str(offset),
|
813
|
+
'Content-Length': str(chunk_size),
|
814
|
+
}
|
815
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
761
816
|
response = self._httpx_client.request(
|
762
817
|
method='POST',
|
763
818
|
url=upload_url,
|
764
|
-
headers=
|
765
|
-
'X-Goog-Upload-Command': upload_command,
|
766
|
-
'X-Goog-Upload-Offset': str(offset),
|
767
|
-
'Content-Length': str(chunk_size),
|
768
|
-
},
|
819
|
+
headers=upload_headers,
|
769
820
|
content=file_chunk,
|
821
|
+
timeout=timeout_in_seconds,
|
770
822
|
)
|
771
823
|
offset += chunk_size
|
772
824
|
if response.headers['x-goog-upload-status'] != 'active':
|
@@ -783,7 +835,12 @@ class BaseApiClient:
|
|
783
835
|
)
|
784
836
|
return HttpResponse(response.headers, response_stream=[response.text])
|
785
837
|
|
786
|
-
def download_file(
|
838
|
+
def download_file(
|
839
|
+
self,
|
840
|
+
path: str,
|
841
|
+
*,
|
842
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
843
|
+
):
|
787
844
|
"""Downloads the file data.
|
788
845
|
|
789
846
|
Args:
|
@@ -822,6 +879,8 @@ class BaseApiClient:
|
|
822
879
|
file_path: Union[str, io.IOBase],
|
823
880
|
upload_url: str,
|
824
881
|
upload_size: int,
|
882
|
+
*,
|
883
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
825
884
|
) -> HttpResponse:
|
826
885
|
"""Transfers a file asynchronously to the given URL.
|
827
886
|
|
@@ -831,23 +890,30 @@ class BaseApiClient:
|
|
831
890
|
upload_url: The URL to upload the file to.
|
832
891
|
upload_size: The size of file content to be uploaded, this will have to
|
833
892
|
match the size requested in the resumable upload request.
|
893
|
+
http_options: The http options to use for the request.
|
834
894
|
|
835
895
|
returns:
|
836
896
|
The HttpResponse object from the finalize request.
|
837
897
|
"""
|
838
898
|
if isinstance(file_path, io.IOBase):
|
839
|
-
return await self._async_upload_fd(
|
899
|
+
return await self._async_upload_fd(
|
900
|
+
file_path, upload_url, upload_size, http_options=http_options
|
901
|
+
)
|
840
902
|
else:
|
841
903
|
file = anyio.Path(file_path)
|
842
904
|
fd = await file.open('rb')
|
843
905
|
async with fd:
|
844
|
-
return await self._async_upload_fd(
|
906
|
+
return await self._async_upload_fd(
|
907
|
+
fd, upload_url, upload_size, http_options=http_options
|
908
|
+
)
|
845
909
|
|
846
910
|
async def _async_upload_fd(
|
847
911
|
self,
|
848
912
|
file: Union[io.IOBase, anyio.AsyncFile],
|
849
913
|
upload_url: str,
|
850
914
|
upload_size: int,
|
915
|
+
*,
|
916
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
851
917
|
) -> HttpResponse:
|
852
918
|
"""Transfers a file asynchronously to the given URL.
|
853
919
|
|
@@ -856,6 +922,7 @@ class BaseApiClient:
|
|
856
922
|
upload_url: The URL to upload the file to.
|
857
923
|
upload_size: The size of file content to be uploaded, this will have to
|
858
924
|
match the size requested in the resumable upload request.
|
925
|
+
http_options: The http options to use for the request.
|
859
926
|
|
860
927
|
returns:
|
861
928
|
The HttpResponse object from the finalized request.
|
@@ -874,15 +941,32 @@ class BaseApiClient:
|
|
874
941
|
# If last chunk, finalize the upload.
|
875
942
|
if chunk_size + offset >= upload_size:
|
876
943
|
upload_command += ', finalize'
|
944
|
+
http_options = http_options if http_options else self._http_options
|
945
|
+
timeout = (
|
946
|
+
http_options.get('timeout')
|
947
|
+
if isinstance(http_options, dict)
|
948
|
+
else http_options.timeout
|
949
|
+
)
|
950
|
+
if timeout is None:
|
951
|
+
# Per request timeout is not configured. Check the global timeout.
|
952
|
+
timeout = (
|
953
|
+
self._http_options.timeout
|
954
|
+
if isinstance(self._http_options, dict)
|
955
|
+
else self._http_options.timeout
|
956
|
+
)
|
957
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
958
|
+
upload_headers = {
|
959
|
+
'X-Goog-Upload-Command': upload_command,
|
960
|
+
'X-Goog-Upload-Offset': str(offset),
|
961
|
+
'Content-Length': str(chunk_size),
|
962
|
+
}
|
963
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
877
964
|
response = await self._async_httpx_client.request(
|
878
965
|
method='POST',
|
879
966
|
url=upload_url,
|
880
967
|
content=file_chunk,
|
881
|
-
headers=
|
882
|
-
|
883
|
-
'X-Goog-Upload-Offset': str(offset),
|
884
|
-
'Content-Length': str(chunk_size),
|
885
|
-
},
|
968
|
+
headers=upload_headers,
|
969
|
+
timeout=timeout_in_seconds,
|
886
970
|
)
|
887
971
|
offset += chunk_size
|
888
972
|
if response.headers.get('x-goog-upload-status') != 'active':
|
@@ -899,7 +983,12 @@ class BaseApiClient:
|
|
899
983
|
)
|
900
984
|
return HttpResponse(response.headers, response_stream=[response.text])
|
901
985
|
|
902
|
-
async def async_download_file(
|
986
|
+
async def async_download_file(
|
987
|
+
self,
|
988
|
+
path: str,
|
989
|
+
*,
|
990
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
991
|
+
):
|
903
992
|
"""Downloads the file data.
|
904
993
|
|
905
994
|
Args:
|
@@ -28,7 +28,7 @@ from . import types
|
|
28
28
|
if sys.version_info >= (3, 10):
|
29
29
|
VersionedUnionType = builtin_types.UnionType
|
30
30
|
else:
|
31
|
-
VersionedUnionType = typing._UnionGenericAlias
|
31
|
+
VersionedUnionType = typing._UnionGenericAlias # type: ignore[attr-defined]
|
32
32
|
|
33
33
|
_py_builtin_type_to_schema_type = {
|
34
34
|
str: types.Type.STRING,
|
@@ -34,6 +34,8 @@ from ._api_client import HttpOptions
|
|
34
34
|
from ._api_client import HttpRequest
|
35
35
|
from ._api_client import HttpResponse
|
36
36
|
from ._common import BaseModel
|
37
|
+
from .types import HttpOptionsOrDict
|
38
|
+
from .types import GenerateVideosOperation
|
37
39
|
|
38
40
|
|
39
41
|
def _redact_version_numbers(version_string: str) -> str:
|
@@ -396,7 +398,12 @@ class ReplayApiClient(BaseApiClient):
|
|
396
398
|
if isinstance(response_model, list):
|
397
399
|
response_model = response_model[0]
|
398
400
|
print('response_model: ', response_model.model_dump(exclude_none=True))
|
399
|
-
|
401
|
+
if isinstance(response_model, GenerateVideosOperation):
|
402
|
+
actual = response_model.model_dump(
|
403
|
+
exclude={'result'}, exclude_none=True, mode='json'
|
404
|
+
)
|
405
|
+
else:
|
406
|
+
actual = response_model.model_dump(exclude_none=True, mode='json')
|
400
407
|
expected = interaction.response.sdk_response_segments[
|
401
408
|
self._sdk_response_index
|
402
409
|
]
|
@@ -461,7 +468,14 @@ class ReplayApiClient(BaseApiClient):
|
|
461
468
|
else:
|
462
469
|
return self._build_response_from_replay(http_request)
|
463
470
|
|
464
|
-
def upload_file(
|
471
|
+
def upload_file(
|
472
|
+
self,
|
473
|
+
file_path: Union[str, io.IOBase],
|
474
|
+
upload_url: str,
|
475
|
+
upload_size: int,
|
476
|
+
*,
|
477
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
478
|
+
) -> HttpResponse:
|
465
479
|
if isinstance(file_path, io.IOBase):
|
466
480
|
offset = file_path.tell()
|
467
481
|
content = file_path.read()
|
@@ -479,7 +493,9 @@ class ReplayApiClient(BaseApiClient):
|
|
479
493
|
if self._should_call_api():
|
480
494
|
result: Union[str, HttpResponse]
|
481
495
|
try:
|
482
|
-
result = super().upload_file(
|
496
|
+
result = super().upload_file(
|
497
|
+
file_path, upload_url, upload_size, http_options=http_options
|
498
|
+
)
|
483
499
|
except HTTPError as e:
|
484
500
|
result = HttpResponse(
|
485
501
|
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
@@ -496,6 +512,8 @@ class ReplayApiClient(BaseApiClient):
|
|
496
512
|
file_path: Union[str, io.IOBase],
|
497
513
|
upload_url: str,
|
498
514
|
upload_size: int,
|
515
|
+
*,
|
516
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
499
517
|
) -> HttpResponse:
|
500
518
|
if isinstance(file_path, io.IOBase):
|
501
519
|
offset = file_path.tell()
|
@@ -515,7 +533,7 @@ class ReplayApiClient(BaseApiClient):
|
|
515
533
|
result: HttpResponse
|
516
534
|
try:
|
517
535
|
result = await super().async_upload_file(
|
518
|
-
file_path, upload_url, upload_size
|
536
|
+
file_path, upload_url, upload_size, http_options=http_options
|
519
537
|
)
|
520
538
|
except HTTPError as e:
|
521
539
|
result = HttpResponse(
|
@@ -528,14 +546,16 @@ class ReplayApiClient(BaseApiClient):
|
|
528
546
|
else:
|
529
547
|
return self._build_response_from_replay(request)
|
530
548
|
|
531
|
-
def download_file(
|
549
|
+
def download_file(
|
550
|
+
self, path: str, *, http_options: Optional[HttpOptionsOrDict] = None
|
551
|
+
):
|
532
552
|
self._initialize_replay_session_if_not_loaded()
|
533
553
|
request = self._build_request(
|
534
554
|
'get', path=path, request_dict={}, http_options=http_options
|
535
555
|
)
|
536
556
|
if self._should_call_api():
|
537
557
|
try:
|
538
|
-
result = super().download_file(path, http_options)
|
558
|
+
result = super().download_file(path, http_options=http_options)
|
539
559
|
except HTTPError as e:
|
540
560
|
result = HttpResponse(
|
541
561
|
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
@@ -547,14 +567,18 @@ class ReplayApiClient(BaseApiClient):
|
|
547
567
|
else:
|
548
568
|
return self._build_response_from_replay(request).byte_stream[0]
|
549
569
|
|
550
|
-
async def async_download_file(
|
570
|
+
async def async_download_file(
|
571
|
+
self, path: str, *, http_options: Optional[HttpOptionsOrDict] = None
|
572
|
+
):
|
551
573
|
self._initialize_replay_session_if_not_loaded()
|
552
574
|
request = self._build_request(
|
553
575
|
'get', path=path, request_dict={}, http_options=http_options
|
554
576
|
)
|
555
577
|
if self._should_call_api():
|
556
578
|
try:
|
557
|
-
result = await super().async_download_file(
|
579
|
+
result = await super().async_download_file(
|
580
|
+
path, http_options=http_options
|
581
|
+
)
|
558
582
|
except HTTPError as e:
|
559
583
|
result = HttpResponse(
|
560
584
|
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
@@ -1,4 +1,4 @@
|
|
1
|
-
# Copyright
|
1
|
+
# Copyright 2025 Google LLC
|
2
2
|
#
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
4
|
# you may not use this file except in compliance with the License.
|
@@ -26,7 +26,7 @@ import sys
|
|
26
26
|
import time
|
27
27
|
import types as builtin_types
|
28
28
|
import typing
|
29
|
-
from typing import Any, GenericAlias, Optional, Union # type: ignore[attr-defined]
|
29
|
+
from typing import Any, GenericAlias, Optional, Sequence, Union # type: ignore[attr-defined]
|
30
30
|
|
31
31
|
if typing.TYPE_CHECKING:
|
32
32
|
import PIL.Image
|
@@ -43,7 +43,7 @@ if sys.version_info >= (3, 10):
|
|
43
43
|
_UNION_TYPES = (typing.Union, builtin_types.UnionType)
|
44
44
|
from typing import TypeGuard
|
45
45
|
else:
|
46
|
-
VersionedUnionType = typing._UnionGenericAlias
|
46
|
+
VersionedUnionType = typing._UnionGenericAlias # type: ignore[attr-defined]
|
47
47
|
_UNION_TYPES = (typing.Union,)
|
48
48
|
from typing_extensions import TypeGuard
|
49
49
|
|
@@ -251,6 +251,62 @@ def pil_to_blob(img) -> types.Blob:
|
|
251
251
|
return types.Blob(mime_type=mime_type, data=data)
|
252
252
|
|
253
253
|
|
254
|
+
def t_function_response(
|
255
|
+
function_response: types.FunctionResponseOrDict,
|
256
|
+
) -> types.FunctionResponse:
|
257
|
+
if not function_response:
|
258
|
+
raise ValueError('function_response is required.')
|
259
|
+
if isinstance(function_response, dict):
|
260
|
+
return types.FunctionResponse.model_validate(function_response)
|
261
|
+
elif isinstance(function_response, types.FunctionResponse):
|
262
|
+
return function_response
|
263
|
+
else:
|
264
|
+
raise TypeError(
|
265
|
+
f'Could not parse input as FunctionResponse. Unsupported'
|
266
|
+
f' function_response type: {type(function_response)}'
|
267
|
+
)
|
268
|
+
|
269
|
+
def t_function_responses(
|
270
|
+
function_responses: Union[
|
271
|
+
types.FunctionResponseOrDict,
|
272
|
+
Sequence[types.FunctionResponseOrDict],
|
273
|
+
],
|
274
|
+
) -> list[types.FunctionResponse]:
|
275
|
+
if not function_responses:
|
276
|
+
raise ValueError('function_responses are required.')
|
277
|
+
if isinstance(function_responses, Sequence):
|
278
|
+
return [t_function_response(response) for response in function_responses]
|
279
|
+
else:
|
280
|
+
return [t_function_response(function_responses)]
|
281
|
+
|
282
|
+
|
283
|
+
BlobUnion = Union[types.Blob, types.BlobDict, 'PIL.Image.Image']
|
284
|
+
|
285
|
+
def t_blob(blob: BlobUnion) -> types.Blob:
|
286
|
+
try:
|
287
|
+
import PIL.Image
|
288
|
+
|
289
|
+
PIL_Image = PIL.Image.Image
|
290
|
+
except ImportError:
|
291
|
+
PIL_Image = None
|
292
|
+
|
293
|
+
if not blob:
|
294
|
+
raise ValueError('blob is required.')
|
295
|
+
|
296
|
+
if isinstance(blob, types.Blob):
|
297
|
+
return blob
|
298
|
+
|
299
|
+
if isinstance(blob, dict):
|
300
|
+
return types.Blob.model_validate(blob)
|
301
|
+
|
302
|
+
if PIL_Image is not None and isinstance(blob, PIL_Image):
|
303
|
+
return pil_to_blob(blob)
|
304
|
+
|
305
|
+
raise TypeError(
|
306
|
+
f'Could not parse input as Blob. Unsupported blob type: {type(blob)}'
|
307
|
+
)
|
308
|
+
|
309
|
+
|
254
310
|
def t_part(part: Optional[types.PartUnionDict]) -> types.Part:
|
255
311
|
try:
|
256
312
|
import PIL.Image
|
@@ -601,51 +657,55 @@ def process_schema(
|
|
601
657
|
)
|
602
658
|
|
603
659
|
if schema.get('title') == 'PlaceholderLiteralEnum':
|
604
|
-
schema
|
605
|
-
|
606
|
-
#
|
607
|
-
#
|
608
|
-
|
609
|
-
|
660
|
+
del schema['title']
|
661
|
+
|
662
|
+
# Standardize spelling for relevant schema fields. For example, if a dict is
|
663
|
+
# provided directly to response_schema, it may use `any_of` instead of `anyOf.
|
664
|
+
# Otherwise, model_json_schema() uses `anyOf`.
|
665
|
+
for from_name, to_name in [
|
666
|
+
('any_of', 'anyOf'),
|
667
|
+
('property_ordering', 'propertyOrdering'),
|
668
|
+
]:
|
669
|
+
if (value := schema.pop(from_name, None)) is not None:
|
670
|
+
schema[to_name] = value
|
610
671
|
|
611
672
|
if defs is None:
|
612
673
|
defs = schema.pop('$defs', {})
|
613
674
|
for _, sub_schema in defs.items():
|
614
|
-
|
675
|
+
# We can skip the '$ref' check, because JSON schema forbids a '$ref' from
|
676
|
+
# directly referencing another '$ref':
|
677
|
+
# https://json-schema.org/understanding-json-schema/structuring#recursion
|
678
|
+
process_schema(
|
679
|
+
sub_schema, client, defs, order_properties=order_properties
|
680
|
+
)
|
615
681
|
|
616
682
|
handle_null_fields(schema)
|
617
683
|
|
618
684
|
# After removing null fields, Optional fields with only one possible type
|
619
685
|
# will have a $ref key that needs to be flattened
|
620
686
|
# For example: {'default': None, 'description': 'Name of the person', 'nullable': True, '$ref': '#/$defs/TestPerson'}
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
629
|
-
|
630
|
-
|
631
|
-
|
632
|
-
|
633
|
-
if ref_key is None:
|
634
|
-
process_schema(sub_schema, client, defs)
|
635
|
-
else:
|
636
|
-
ref = defs[ref_key.split('defs/')[-1]]
|
637
|
-
any_of.append(ref)
|
638
|
-
schema['anyOf'] = [item for item in any_of if '$ref' not in item]
|
687
|
+
if (ref := schema.pop('$ref', None)) is not None:
|
688
|
+
schema.update(defs[ref.split('defs/')[-1]])
|
689
|
+
|
690
|
+
def _recurse(sub_schema: dict[str, Any]) -> dict[str, Any]:
|
691
|
+
"""Returns the processed `sub_schema`, resolving its '$ref' if any."""
|
692
|
+
if (ref := sub_schema.pop('$ref', None)) is not None:
|
693
|
+
sub_schema = defs[ref.split('defs/')[-1]]
|
694
|
+
process_schema(sub_schema, client, defs, order_properties=order_properties)
|
695
|
+
return sub_schema
|
696
|
+
|
697
|
+
if (any_of := schema.get('anyOf')) is not None:
|
698
|
+
schema['anyOf'] = [_recurse(sub_schema) for sub_schema in any_of]
|
639
699
|
return
|
640
700
|
|
641
|
-
schema_type = schema.get('type'
|
701
|
+
schema_type = schema.get('type')
|
642
702
|
if isinstance(schema_type, Enum):
|
643
703
|
schema_type = schema_type.value
|
644
704
|
schema_type = schema_type.upper()
|
645
705
|
|
646
706
|
# model_json_schema() returns a schema with a 'const' field when a Literal with one value is provided as a pydantic field
|
647
707
|
# For example `genre: Literal['action']` becomes: {'const': 'action', 'title': 'Genre', 'type': 'string'}
|
648
|
-
const = schema.get('const'
|
708
|
+
const = schema.get('const')
|
649
709
|
if const is not None:
|
650
710
|
if schema_type == 'STRING':
|
651
711
|
schema['enum'] = [const]
|
@@ -654,38 +714,18 @@ def process_schema(
|
|
654
714
|
raise ValueError('Literal values must be strings.')
|
655
715
|
|
656
716
|
if schema_type == 'OBJECT':
|
657
|
-
properties
|
658
|
-
|
659
|
-
|
660
|
-
|
661
|
-
|
662
|
-
|
663
|
-
|
664
|
-
|
665
|
-
|
666
|
-
process_schema(ref, client, defs)
|
667
|
-
properties[name] = ref
|
668
|
-
if (
|
669
|
-
len(properties.items()) > 1
|
670
|
-
and order_properties
|
671
|
-
and all(
|
672
|
-
ordering_key not in schema
|
673
|
-
for ordering_key in ['property_ordering', 'propertyOrdering']
|
674
|
-
)
|
675
|
-
):
|
676
|
-
property_names = list(properties.keys())
|
677
|
-
schema['property_ordering'] = property_names
|
717
|
+
if (properties := schema.get('properties')) is not None:
|
718
|
+
for name, sub_schema in list(properties.items()):
|
719
|
+
properties[name] = _recurse(sub_schema)
|
720
|
+
if (
|
721
|
+
len(properties.items()) > 1
|
722
|
+
and order_properties
|
723
|
+
and 'propertyOrdering' not in schema
|
724
|
+
):
|
725
|
+
schema['property_ordering'] = list(properties.keys())
|
678
726
|
elif schema_type == 'ARRAY':
|
679
|
-
|
680
|
-
|
681
|
-
return
|
682
|
-
ref_key = sub_schema.get('$ref', None)
|
683
|
-
if ref_key is None:
|
684
|
-
process_schema(sub_schema, client, defs)
|
685
|
-
else:
|
686
|
-
ref = defs[ref_key.split('defs/')[-1]]
|
687
|
-
process_schema(ref, client, defs)
|
688
|
-
schema['items'] = ref
|
727
|
+
if (items := schema.get('items')) is not None:
|
728
|
+
schema['items'] = _recurse(items)
|
689
729
|
|
690
730
|
|
691
731
|
def _process_enum(
|
@@ -36,7 +36,7 @@ class APIError(Exception):
|
|
36
36
|
self,
|
37
37
|
code: int,
|
38
38
|
response_json: Any,
|
39
|
-
response: Union['ReplayResponse', httpx.Response],
|
39
|
+
response: Optional[Union['ReplayResponse', httpx.Response]] = None,
|
40
40
|
):
|
41
41
|
self.response = response
|
42
42
|
self.details = response_json
|