label-studio-sdk 0.0.34__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of label-studio-sdk might be problematic. Click here for more details.
- label_studio_sdk/__init__.py +232 -9
- label_studio_sdk/_extensions/label_studio_tools/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/core/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/core/label_config.py +163 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/exceptions.py +2 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/io.py +228 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/params.py +45 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/__init__.py +1 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/beam.py +34 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/example.py +17 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/registry.py +67 -0
- label_studio_sdk/_extensions/label_studio_tools/postprocessing/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/postprocessing/video.py +97 -0
- label_studio_sdk/_extensions/pager_ext.py +49 -0
- label_studio_sdk/_legacy/__init__.py +11 -0
- label_studio_sdk/_legacy/client.py +471 -0
- label_studio_sdk/_legacy/objects.py +74 -0
- label_studio_sdk/{project.py → _legacy/project.py} +2 -2
- label_studio_sdk/{schema → _legacy/schema}/label_config_schema.json +14 -14
- label_studio_sdk/actions/__init__.py +27 -0
- label_studio_sdk/actions/client.py +223 -0
- label_studio_sdk/actions/types/__init__.py +25 -0
- label_studio_sdk/actions/types/actions_create_request_filters.py +43 -0
- label_studio_sdk/actions/types/actions_create_request_filters_conjunction.py +5 -0
- label_studio_sdk/actions/types/actions_create_request_filters_items_item.py +49 -0
- label_studio_sdk/actions/types/actions_create_request_filters_items_item_filter.py +31 -0
- label_studio_sdk/actions/types/actions_create_request_filters_items_item_operator.py +23 -0
- label_studio_sdk/actions/types/actions_create_request_id.py +19 -0
- label_studio_sdk/actions/types/actions_create_request_ordering_item.py +31 -0
- label_studio_sdk/actions/types/actions_create_request_selected_items.py +10 -0
- label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py +39 -0
- label_studio_sdk/actions/types/actions_create_request_selected_items_included.py +39 -0
- label_studio_sdk/annotations/__init__.py +2 -0
- label_studio_sdk/annotations/client.py +750 -0
- label_studio_sdk/base_client.py +183 -0
- label_studio_sdk/client.py +17 -463
- label_studio_sdk/converter/__init__.py +7 -0
- label_studio_sdk/converter/audio.py +56 -0
- label_studio_sdk/converter/brush.py +452 -0
- label_studio_sdk/converter/converter.py +1175 -0
- label_studio_sdk/converter/exports/__init__.py +0 -0
- label_studio_sdk/converter/exports/csv.py +82 -0
- label_studio_sdk/converter/exports/csv2.py +103 -0
- label_studio_sdk/converter/funsd.py +85 -0
- label_studio_sdk/converter/imports/__init__.py +0 -0
- label_studio_sdk/converter/imports/coco.py +314 -0
- label_studio_sdk/converter/imports/colors.py +198 -0
- label_studio_sdk/converter/imports/label_config.py +45 -0
- label_studio_sdk/converter/imports/pathtrack.py +269 -0
- label_studio_sdk/converter/imports/yolo.py +236 -0
- label_studio_sdk/converter/main.py +202 -0
- label_studio_sdk/converter/utils.py +473 -0
- label_studio_sdk/core/__init__.py +33 -0
- label_studio_sdk/core/api_error.py +15 -0
- label_studio_sdk/core/client_wrapper.py +55 -0
- label_studio_sdk/core/datetime_utils.py +28 -0
- label_studio_sdk/core/file.py +38 -0
- label_studio_sdk/core/http_client.py +447 -0
- label_studio_sdk/core/jsonable_encoder.py +99 -0
- label_studio_sdk/core/pagination.py +87 -0
- label_studio_sdk/core/pydantic_utilities.py +28 -0
- label_studio_sdk/core/query_encoder.py +33 -0
- label_studio_sdk/core/remove_none_from_dict.py +11 -0
- label_studio_sdk/core/request_options.py +32 -0
- label_studio_sdk/environment.py +7 -0
- label_studio_sdk/errors/__init__.py +6 -0
- label_studio_sdk/errors/bad_request_error.py +8 -0
- label_studio_sdk/errors/internal_server_error.py +8 -0
- label_studio_sdk/export_storage/__init__.py +28 -0
- label_studio_sdk/export_storage/azure/__init__.py +5 -0
- label_studio_sdk/export_storage/azure/client.py +888 -0
- label_studio_sdk/export_storage/azure/types/__init__.py +6 -0
- label_studio_sdk/export_storage/azure/types/azure_create_response.py +67 -0
- label_studio_sdk/export_storage/azure/types/azure_update_response.py +67 -0
- label_studio_sdk/export_storage/client.py +107 -0
- label_studio_sdk/export_storage/gcs/__init__.py +5 -0
- label_studio_sdk/export_storage/gcs/client.py +888 -0
- label_studio_sdk/export_storage/gcs/types/__init__.py +6 -0
- label_studio_sdk/export_storage/gcs/types/gcs_create_response.py +67 -0
- label_studio_sdk/export_storage/gcs/types/gcs_update_response.py +67 -0
- label_studio_sdk/export_storage/local/__init__.py +5 -0
- label_studio_sdk/export_storage/local/client.py +834 -0
- label_studio_sdk/export_storage/local/types/__init__.py +6 -0
- label_studio_sdk/export_storage/local/types/local_create_response.py +57 -0
- label_studio_sdk/export_storage/local/types/local_update_response.py +57 -0
- label_studio_sdk/export_storage/redis/__init__.py +5 -0
- label_studio_sdk/export_storage/redis/client.py +918 -0
- label_studio_sdk/export_storage/redis/types/__init__.py +6 -0
- label_studio_sdk/export_storage/redis/types/redis_create_response.py +72 -0
- label_studio_sdk/export_storage/redis/types/redis_update_response.py +72 -0
- label_studio_sdk/export_storage/s3/__init__.py +5 -0
- label_studio_sdk/export_storage/s3/client.py +1008 -0
- label_studio_sdk/export_storage/s3/types/__init__.py +6 -0
- label_studio_sdk/export_storage/s3/types/s3create_response.py +89 -0
- label_studio_sdk/export_storage/s3/types/s3update_response.py +89 -0
- label_studio_sdk/export_storage/types/__init__.py +5 -0
- label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py +30 -0
- label_studio_sdk/files/__init__.py +2 -0
- label_studio_sdk/files/client.py +556 -0
- label_studio_sdk/import_storage/__init__.py +28 -0
- label_studio_sdk/import_storage/azure/__init__.py +5 -0
- label_studio_sdk/import_storage/azure/client.py +988 -0
- label_studio_sdk/import_storage/azure/types/__init__.py +6 -0
- label_studio_sdk/import_storage/azure/types/azure_create_response.py +82 -0
- label_studio_sdk/import_storage/azure/types/azure_update_response.py +82 -0
- label_studio_sdk/import_storage/client.py +107 -0
- label_studio_sdk/import_storage/gcs/__init__.py +5 -0
- label_studio_sdk/import_storage/gcs/client.py +988 -0
- label_studio_sdk/import_storage/gcs/types/__init__.py +6 -0
- label_studio_sdk/import_storage/gcs/types/gcs_create_response.py +82 -0
- label_studio_sdk/import_storage/gcs/types/gcs_update_response.py +82 -0
- label_studio_sdk/import_storage/local/__init__.py +5 -0
- label_studio_sdk/import_storage/local/client.py +836 -0
- label_studio_sdk/import_storage/local/types/__init__.py +6 -0
- label_studio_sdk/import_storage/local/types/local_create_response.py +57 -0
- label_studio_sdk/import_storage/local/types/local_update_response.py +57 -0
- label_studio_sdk/import_storage/redis/__init__.py +5 -0
- label_studio_sdk/import_storage/redis/client.py +924 -0
- label_studio_sdk/import_storage/redis/types/__init__.py +6 -0
- label_studio_sdk/import_storage/redis/types/redis_create_response.py +72 -0
- label_studio_sdk/import_storage/redis/types/redis_update_response.py +72 -0
- label_studio_sdk/import_storage/s3/__init__.py +5 -0
- label_studio_sdk/import_storage/s3/client.py +1138 -0
- label_studio_sdk/import_storage/s3/types/__init__.py +6 -0
- label_studio_sdk/import_storage/s3/types/s3create_response.py +109 -0
- label_studio_sdk/import_storage/s3/types/s3update_response.py +109 -0
- label_studio_sdk/import_storage/types/__init__.py +5 -0
- label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py +30 -0
- label_studio_sdk/label_interface/base.py +10 -0
- label_studio_sdk/label_interface/control_tags.py +109 -71
- label_studio_sdk/label_interface/data_examples.json +96 -0
- label_studio_sdk/label_interface/interface.py +102 -53
- label_studio_sdk/label_interface/object_tags.py +8 -13
- label_studio_sdk/label_interface/region.py +33 -1
- label_studio_sdk/ml/__init__.py +19 -0
- label_studio_sdk/ml/client.py +981 -0
- label_studio_sdk/ml/types/__init__.py +17 -0
- label_studio_sdk/ml/types/ml_create_request_auth_method.py +5 -0
- label_studio_sdk/ml/types/ml_create_response.py +78 -0
- label_studio_sdk/ml/types/ml_create_response_auth_method.py +5 -0
- label_studio_sdk/ml/types/ml_update_request_auth_method.py +5 -0
- label_studio_sdk/ml/types/ml_update_response.py +78 -0
- label_studio_sdk/ml/types/ml_update_response_auth_method.py +5 -0
- label_studio_sdk/predictions/__init__.py +2 -0
- label_studio_sdk/predictions/client.py +638 -0
- label_studio_sdk/projects/__init__.py +6 -0
- label_studio_sdk/projects/client.py +1055 -0
- label_studio_sdk/projects/client_ext.py +19 -0
- label_studio_sdk/projects/exports/__init__.py +2 -0
- label_studio_sdk/projects/exports/client.py +930 -0
- label_studio_sdk/projects/types/__init__.py +7 -0
- label_studio_sdk/projects/types/projects_create_response.py +96 -0
- label_studio_sdk/projects/types/projects_import_tasks_response.py +71 -0
- label_studio_sdk/projects/types/projects_list_response.py +33 -0
- label_studio_sdk/py.typed +0 -0
- label_studio_sdk/tasks/__init__.py +5 -0
- label_studio_sdk/tasks/client.py +835 -0
- label_studio_sdk/tasks/client_ext.py +18 -0
- label_studio_sdk/tasks/types/__init__.py +6 -0
- label_studio_sdk/tasks/types/tasks_list_request_fields.py +5 -0
- label_studio_sdk/tasks/types/tasks_list_response.py +48 -0
- label_studio_sdk/types/__init__.py +115 -0
- label_studio_sdk/types/annotation.py +116 -0
- label_studio_sdk/types/annotation_filter_options.py +42 -0
- label_studio_sdk/types/annotation_last_action.py +19 -0
- label_studio_sdk/types/azure_blob_export_storage.py +112 -0
- label_studio_sdk/types/azure_blob_export_storage_status.py +7 -0
- label_studio_sdk/types/azure_blob_import_storage.py +113 -0
- label_studio_sdk/types/azure_blob_import_storage_status.py +7 -0
- label_studio_sdk/types/base_task.py +113 -0
- label_studio_sdk/types/base_user.py +42 -0
- label_studio_sdk/types/converted_format.py +36 -0
- label_studio_sdk/types/converted_format_status.py +5 -0
- label_studio_sdk/types/export.py +48 -0
- label_studio_sdk/types/export_convert.py +32 -0
- label_studio_sdk/types/export_create.py +54 -0
- label_studio_sdk/types/export_create_status.py +5 -0
- label_studio_sdk/types/export_status.py +5 -0
- label_studio_sdk/types/file_upload.py +30 -0
- label_studio_sdk/types/filter.py +53 -0
- label_studio_sdk/types/filter_group.py +35 -0
- label_studio_sdk/types/gcs_export_storage.py +112 -0
- label_studio_sdk/types/gcs_export_storage_status.py +7 -0
- label_studio_sdk/types/gcs_import_storage.py +113 -0
- label_studio_sdk/types/gcs_import_storage_status.py +7 -0
- label_studio_sdk/types/local_files_export_storage.py +97 -0
- label_studio_sdk/types/local_files_export_storage_status.py +7 -0
- label_studio_sdk/types/local_files_import_storage.py +92 -0
- label_studio_sdk/types/local_files_import_storage_status.py +7 -0
- label_studio_sdk/types/ml_backend.py +89 -0
- label_studio_sdk/types/ml_backend_auth_method.py +5 -0
- label_studio_sdk/types/ml_backend_state.py +5 -0
- label_studio_sdk/types/prediction.py +78 -0
- label_studio_sdk/types/project.py +198 -0
- label_studio_sdk/types/project_import.py +63 -0
- label_studio_sdk/types/project_import_status.py +5 -0
- label_studio_sdk/types/project_label_config.py +32 -0
- label_studio_sdk/types/project_sampling.py +7 -0
- label_studio_sdk/types/project_skip_queue.py +5 -0
- label_studio_sdk/types/redis_export_storage.py +117 -0
- label_studio_sdk/types/redis_export_storage_status.py +7 -0
- label_studio_sdk/types/redis_import_storage.py +112 -0
- label_studio_sdk/types/redis_import_storage_status.py +7 -0
- label_studio_sdk/types/s3export_storage.py +134 -0
- label_studio_sdk/types/s3export_storage_status.py +7 -0
- label_studio_sdk/types/s3import_storage.py +140 -0
- label_studio_sdk/types/s3import_storage_status.py +7 -0
- label_studio_sdk/types/serialization_option.py +36 -0
- label_studio_sdk/types/serialization_options.py +45 -0
- label_studio_sdk/types/task.py +157 -0
- label_studio_sdk/types/task_filter_options.py +49 -0
- label_studio_sdk/types/user_simple.py +37 -0
- label_studio_sdk/types/view.py +55 -0
- label_studio_sdk/types/webhook.py +67 -0
- label_studio_sdk/types/webhook_actions_item.py +21 -0
- label_studio_sdk/types/webhook_serializer_for_update.py +67 -0
- label_studio_sdk/types/webhook_serializer_for_update_actions_item.py +21 -0
- label_studio_sdk/users/__init__.py +5 -0
- label_studio_sdk/users/client.py +830 -0
- label_studio_sdk/users/types/__init__.py +6 -0
- label_studio_sdk/users/types/users_get_token_response.py +36 -0
- label_studio_sdk/users/types/users_reset_token_response.py +36 -0
- label_studio_sdk/version.py +4 -0
- label_studio_sdk/views/__init__.py +35 -0
- label_studio_sdk/views/client.py +564 -0
- label_studio_sdk/views/types/__init__.py +33 -0
- label_studio_sdk/views/types/views_create_request_data.py +43 -0
- label_studio_sdk/views/types/views_create_request_data_filters.py +43 -0
- label_studio_sdk/views/types/views_create_request_data_filters_conjunction.py +5 -0
- label_studio_sdk/views/types/views_create_request_data_filters_items_item.py +49 -0
- label_studio_sdk/views/types/views_create_request_data_filters_items_item_filter.py +31 -0
- label_studio_sdk/views/types/views_create_request_data_filters_items_item_operator.py +23 -0
- label_studio_sdk/views/types/views_create_request_data_ordering_item.py +31 -0
- label_studio_sdk/views/types/views_update_request_data.py +43 -0
- label_studio_sdk/views/types/views_update_request_data_filters.py +43 -0
- label_studio_sdk/views/types/views_update_request_data_filters_conjunction.py +5 -0
- label_studio_sdk/views/types/views_update_request_data_filters_items_item.py +49 -0
- label_studio_sdk/views/types/views_update_request_data_filters_items_item_filter.py +31 -0
- label_studio_sdk/views/types/views_update_request_data_filters_items_item_operator.py +23 -0
- label_studio_sdk/views/types/views_update_request_data_ordering_item.py +31 -0
- label_studio_sdk/webhooks/__init__.py +5 -0
- label_studio_sdk/webhooks/client.py +636 -0
- label_studio_sdk/webhooks/types/__init__.py +5 -0
- label_studio_sdk/webhooks/types/webhooks_update_request_actions_item.py +21 -0
- label_studio_sdk-1.0.1.dist-info/METADATA +163 -0
- label_studio_sdk-1.0.1.dist-info/RECORD +256 -0
- {label_studio_sdk-0.0.34.dist-info → label_studio_sdk-1.0.1.dist-info}/WHEEL +1 -2
- label_studio_sdk/objects.py +0 -35
- label_studio_sdk-0.0.34.dist-info/LICENSE +0 -201
- label_studio_sdk-0.0.34.dist-info/METADATA +0 -24
- label_studio_sdk-0.0.34.dist-info/RECORD +0 -37
- label_studio_sdk-0.0.34.dist-info/top_level.txt +0 -2
- tests/test_client.py +0 -37
- tests/test_export.py +0 -105
- tests/test_interface/__init__.py +0 -1
- tests/test_interface/configs.py +0 -137
- tests/test_interface/mockups.py +0 -22
- tests/test_interface/test_compat.py +0 -64
- tests/test_interface/test_control_tags.py +0 -55
- tests/test_interface/test_data_generation.py +0 -45
- tests/test_interface/test_lpi.py +0 -15
- tests/test_interface/test_main.py +0 -196
- tests/test_interface/test_object_tags.py +0 -36
- tests/test_interface/test_region.py +0 -36
- tests/test_interface/test_validate_summary.py +0 -35
- tests/test_interface/test_validation.py +0 -59
- {tests → label_studio_sdk/_extensions}/__init__.py +0 -0
- /label_studio_sdk/{exceptions.py → _legacy/exceptions.py} +0 -0
- /label_studio_sdk/{users.py → _legacy/users.py} +0 -0
- /label_studio_sdk/{utils.py → _legacy/utils.py} +0 -0
- /label_studio_sdk/{workspaces.py → _legacy/workspaces.py} +0 -0
|
@@ -0,0 +1,447 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import email.utils
|
|
5
|
+
import re
|
|
6
|
+
import time
|
|
7
|
+
import typing
|
|
8
|
+
import urllib.parse
|
|
9
|
+
from contextlib import asynccontextmanager, contextmanager
|
|
10
|
+
from random import random
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
from .file import File, convert_file_dict_to_httpx_tuples
|
|
15
|
+
from .jsonable_encoder import jsonable_encoder
|
|
16
|
+
from .query_encoder import encode_query
|
|
17
|
+
from .remove_none_from_dict import remove_none_from_dict
|
|
18
|
+
from .request_options import RequestOptions
|
|
19
|
+
|
|
20
|
+
INITIAL_RETRY_DELAY_SECONDS = 0.5
|
|
21
|
+
MAX_RETRY_DELAY_SECONDS = 10
|
|
22
|
+
MAX_RETRY_DELAY_SECONDS_FROM_HEADER = 30
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]:
|
|
26
|
+
"""
|
|
27
|
+
This function parses the `Retry-After` header in a HTTP response and returns the number of seconds to wait.
|
|
28
|
+
|
|
29
|
+
Inspired by the urllib3 retry implementation.
|
|
30
|
+
"""
|
|
31
|
+
retry_after_ms = response_headers.get("retry-after-ms")
|
|
32
|
+
if retry_after_ms is not None:
|
|
33
|
+
try:
|
|
34
|
+
return int(retry_after_ms) / 1000 if retry_after_ms > 0 else 0
|
|
35
|
+
except Exception:
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
retry_after = response_headers.get("retry-after")
|
|
39
|
+
if retry_after is None:
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
# Attempt to parse the header as an int.
|
|
43
|
+
if re.match(r"^\s*[0-9]+\s*$", retry_after):
|
|
44
|
+
seconds = float(retry_after)
|
|
45
|
+
# Fallback to parsing it as a date.
|
|
46
|
+
else:
|
|
47
|
+
retry_date_tuple = email.utils.parsedate_tz(retry_after)
|
|
48
|
+
if retry_date_tuple is None:
|
|
49
|
+
return None
|
|
50
|
+
if retry_date_tuple[9] is None: # Python 2
|
|
51
|
+
# Assume UTC if no timezone was specified
|
|
52
|
+
# On Python2.7, parsedate_tz returns None for a timezone offset
|
|
53
|
+
# instead of 0 if no timezone is given, where mktime_tz treats
|
|
54
|
+
# a None timezone offset as local time.
|
|
55
|
+
retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
|
|
56
|
+
|
|
57
|
+
retry_date = email.utils.mktime_tz(retry_date_tuple)
|
|
58
|
+
seconds = retry_date - time.time()
|
|
59
|
+
|
|
60
|
+
if seconds < 0:
|
|
61
|
+
seconds = 0
|
|
62
|
+
|
|
63
|
+
return seconds
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _retry_timeout(response: httpx.Response, retries: int) -> float:
|
|
67
|
+
"""
|
|
68
|
+
Determine the amount of time to wait before retrying a request.
|
|
69
|
+
This function begins by trying to parse a retry-after header from the response, and then proceeds to use exponential backoff
|
|
70
|
+
with a jitter to determine the number of seconds to wait.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
# If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says.
|
|
74
|
+
retry_after = _parse_retry_after(response.headers)
|
|
75
|
+
if retry_after is not None and retry_after <= MAX_RETRY_DELAY_SECONDS_FROM_HEADER:
|
|
76
|
+
return retry_after
|
|
77
|
+
|
|
78
|
+
# Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS.
|
|
79
|
+
retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS)
|
|
80
|
+
|
|
81
|
+
# Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries.
|
|
82
|
+
timeout = retry_delay * (1 - 0.25 * random())
|
|
83
|
+
return timeout if timeout >= 0 else 0
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _should_retry(response: httpx.Response) -> bool:
|
|
87
|
+
retriable_400s = [429, 408, 409]
|
|
88
|
+
return response.status_code >= 500 or response.status_code in retriable_400s
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def remove_omit_from_dict(
|
|
92
|
+
original: typing.Dict[str, typing.Optional[typing.Any]], omit: typing.Optional[typing.Any]
|
|
93
|
+
) -> typing.Dict[str, typing.Any]:
|
|
94
|
+
if omit is None:
|
|
95
|
+
return original
|
|
96
|
+
new: typing.Dict[str, typing.Any] = {}
|
|
97
|
+
for key, value in original.items():
|
|
98
|
+
if value is not omit:
|
|
99
|
+
new[key] = value
|
|
100
|
+
return new
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def maybe_filter_request_body(
|
|
104
|
+
data: typing.Optional[typing.Any],
|
|
105
|
+
request_options: typing.Optional[RequestOptions],
|
|
106
|
+
omit: typing.Optional[typing.Any],
|
|
107
|
+
) -> typing.Optional[typing.Any]:
|
|
108
|
+
if data is None:
|
|
109
|
+
return (
|
|
110
|
+
jsonable_encoder(request_options.get("additional_body_parameters", {}))
|
|
111
|
+
if request_options is not None
|
|
112
|
+
else None
|
|
113
|
+
)
|
|
114
|
+
elif not isinstance(data, typing.Mapping):
|
|
115
|
+
data_content = jsonable_encoder(data)
|
|
116
|
+
else:
|
|
117
|
+
data_content = {
|
|
118
|
+
**(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore
|
|
119
|
+
**(
|
|
120
|
+
jsonable_encoder(request_options.get("additional_body_parameters", {}))
|
|
121
|
+
if request_options is not None
|
|
122
|
+
else {}
|
|
123
|
+
),
|
|
124
|
+
}
|
|
125
|
+
return data_content
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class HttpClient:
|
|
129
|
+
def __init__(
|
|
130
|
+
self,
|
|
131
|
+
*,
|
|
132
|
+
httpx_client: httpx.Client,
|
|
133
|
+
base_timeout: typing.Optional[float],
|
|
134
|
+
base_headers: typing.Dict[str, str],
|
|
135
|
+
base_url: typing.Optional[str] = None,
|
|
136
|
+
):
|
|
137
|
+
self.base_url = base_url
|
|
138
|
+
self.base_timeout = base_timeout
|
|
139
|
+
self.base_headers = base_headers
|
|
140
|
+
self.httpx_client = httpx_client
|
|
141
|
+
|
|
142
|
+
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
|
|
143
|
+
base_url = self.base_url if maybe_base_url is None else maybe_base_url
|
|
144
|
+
if base_url is None:
|
|
145
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
|
146
|
+
return base_url
|
|
147
|
+
|
|
148
|
+
def request(
|
|
149
|
+
self,
|
|
150
|
+
path: typing.Optional[str] = None,
|
|
151
|
+
*,
|
|
152
|
+
method: str,
|
|
153
|
+
base_url: typing.Optional[str] = None,
|
|
154
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
155
|
+
json: typing.Optional[typing.Any] = None,
|
|
156
|
+
data: typing.Optional[typing.Any] = None,
|
|
157
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
158
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
|
159
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
160
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
161
|
+
retries: int = 0,
|
|
162
|
+
omit: typing.Optional[typing.Any] = None,
|
|
163
|
+
) -> httpx.Response:
|
|
164
|
+
base_url = self.get_base_url(base_url)
|
|
165
|
+
timeout = (
|
|
166
|
+
request_options.get("timeout_in_seconds")
|
|
167
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
168
|
+
else self.base_timeout
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
response = self.httpx_client.request(
|
|
172
|
+
method=method,
|
|
173
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
174
|
+
headers=jsonable_encoder(
|
|
175
|
+
remove_none_from_dict(
|
|
176
|
+
{
|
|
177
|
+
**self.base_headers,
|
|
178
|
+
**(headers if headers is not None else {}),
|
|
179
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
180
|
+
}
|
|
181
|
+
)
|
|
182
|
+
),
|
|
183
|
+
params=encode_query(
|
|
184
|
+
jsonable_encoder(
|
|
185
|
+
remove_none_from_dict(
|
|
186
|
+
remove_omit_from_dict(
|
|
187
|
+
{
|
|
188
|
+
**(params if params is not None else {}),
|
|
189
|
+
**(
|
|
190
|
+
request_options.get("additional_query_parameters", {})
|
|
191
|
+
if request_options is not None
|
|
192
|
+
else {}
|
|
193
|
+
),
|
|
194
|
+
},
|
|
195
|
+
omit,
|
|
196
|
+
)
|
|
197
|
+
)
|
|
198
|
+
)
|
|
199
|
+
),
|
|
200
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
|
201
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
|
202
|
+
content=content,
|
|
203
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
|
204
|
+
timeout=timeout,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
|
208
|
+
if _should_retry(response=response):
|
|
209
|
+
if max_retries > retries:
|
|
210
|
+
time.sleep(_retry_timeout(response=response, retries=retries))
|
|
211
|
+
return self.request(
|
|
212
|
+
path=path,
|
|
213
|
+
method=method,
|
|
214
|
+
base_url=base_url,
|
|
215
|
+
params=params,
|
|
216
|
+
json=json,
|
|
217
|
+
content=content,
|
|
218
|
+
files=files,
|
|
219
|
+
headers=headers,
|
|
220
|
+
request_options=request_options,
|
|
221
|
+
retries=retries + 1,
|
|
222
|
+
omit=omit,
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
return response
|
|
226
|
+
|
|
227
|
+
@contextmanager
|
|
228
|
+
def stream(
|
|
229
|
+
self,
|
|
230
|
+
path: typing.Optional[str] = None,
|
|
231
|
+
*,
|
|
232
|
+
method: str,
|
|
233
|
+
base_url: typing.Optional[str] = None,
|
|
234
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
235
|
+
json: typing.Optional[typing.Any] = None,
|
|
236
|
+
data: typing.Optional[typing.Any] = None,
|
|
237
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
238
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
|
239
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
240
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
241
|
+
retries: int = 0,
|
|
242
|
+
omit: typing.Optional[typing.Any] = None,
|
|
243
|
+
) -> typing.Iterator[httpx.Response]:
|
|
244
|
+
base_url = self.get_base_url(base_url)
|
|
245
|
+
timeout = (
|
|
246
|
+
request_options.get("timeout_in_seconds")
|
|
247
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
248
|
+
else self.base_timeout
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
with self.httpx_client.stream(
|
|
252
|
+
method=method,
|
|
253
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
254
|
+
headers=jsonable_encoder(
|
|
255
|
+
remove_none_from_dict(
|
|
256
|
+
{
|
|
257
|
+
**self.base_headers,
|
|
258
|
+
**(headers if headers is not None else {}),
|
|
259
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
260
|
+
}
|
|
261
|
+
)
|
|
262
|
+
),
|
|
263
|
+
params=encode_query(
|
|
264
|
+
jsonable_encoder(
|
|
265
|
+
remove_none_from_dict(
|
|
266
|
+
remove_omit_from_dict(
|
|
267
|
+
{
|
|
268
|
+
**(params if params is not None else {}),
|
|
269
|
+
**(
|
|
270
|
+
request_options.get("additional_query_parameters", {})
|
|
271
|
+
if request_options is not None
|
|
272
|
+
else {}
|
|
273
|
+
),
|
|
274
|
+
},
|
|
275
|
+
omit,
|
|
276
|
+
)
|
|
277
|
+
)
|
|
278
|
+
)
|
|
279
|
+
),
|
|
280
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
|
281
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
|
282
|
+
content=content,
|
|
283
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
|
284
|
+
timeout=timeout,
|
|
285
|
+
) as stream:
|
|
286
|
+
yield stream
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
class AsyncHttpClient:
|
|
290
|
+
def __init__(
|
|
291
|
+
self,
|
|
292
|
+
*,
|
|
293
|
+
httpx_client: httpx.AsyncClient,
|
|
294
|
+
base_timeout: typing.Optional[float],
|
|
295
|
+
base_headers: typing.Dict[str, str],
|
|
296
|
+
base_url: typing.Optional[str] = None,
|
|
297
|
+
):
|
|
298
|
+
self.base_url = base_url
|
|
299
|
+
self.base_timeout = base_timeout
|
|
300
|
+
self.base_headers = base_headers
|
|
301
|
+
self.httpx_client = httpx_client
|
|
302
|
+
|
|
303
|
+
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
|
|
304
|
+
base_url = self.base_url if maybe_base_url is None else maybe_base_url
|
|
305
|
+
if base_url is None:
|
|
306
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
|
307
|
+
return base_url
|
|
308
|
+
|
|
309
|
+
async def request(
|
|
310
|
+
self,
|
|
311
|
+
path: typing.Optional[str] = None,
|
|
312
|
+
*,
|
|
313
|
+
method: str,
|
|
314
|
+
base_url: typing.Optional[str] = None,
|
|
315
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
316
|
+
json: typing.Optional[typing.Any] = None,
|
|
317
|
+
data: typing.Optional[typing.Any] = None,
|
|
318
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
319
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
|
320
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
321
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
322
|
+
retries: int = 0,
|
|
323
|
+
omit: typing.Optional[typing.Any] = None,
|
|
324
|
+
) -> httpx.Response:
|
|
325
|
+
base_url = self.get_base_url(base_url)
|
|
326
|
+
timeout = (
|
|
327
|
+
request_options.get("timeout_in_seconds")
|
|
328
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
329
|
+
else self.base_timeout
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
# Add the input to each of these and do None-safety checks
|
|
333
|
+
response = await self.httpx_client.request(
|
|
334
|
+
method=method,
|
|
335
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
336
|
+
headers=jsonable_encoder(
|
|
337
|
+
remove_none_from_dict(
|
|
338
|
+
{
|
|
339
|
+
**self.base_headers,
|
|
340
|
+
**(headers if headers is not None else {}),
|
|
341
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
342
|
+
}
|
|
343
|
+
)
|
|
344
|
+
),
|
|
345
|
+
params=encode_query(
|
|
346
|
+
jsonable_encoder(
|
|
347
|
+
remove_none_from_dict(
|
|
348
|
+
remove_omit_from_dict(
|
|
349
|
+
{
|
|
350
|
+
**(params if params is not None else {}),
|
|
351
|
+
**(
|
|
352
|
+
request_options.get("additional_query_parameters", {})
|
|
353
|
+
if request_options is not None
|
|
354
|
+
else {}
|
|
355
|
+
),
|
|
356
|
+
},
|
|
357
|
+
omit,
|
|
358
|
+
)
|
|
359
|
+
)
|
|
360
|
+
)
|
|
361
|
+
),
|
|
362
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
|
363
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
|
364
|
+
content=content,
|
|
365
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
|
366
|
+
timeout=timeout,
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
|
370
|
+
if _should_retry(response=response):
|
|
371
|
+
if max_retries > retries:
|
|
372
|
+
await asyncio.sleep(_retry_timeout(response=response, retries=retries))
|
|
373
|
+
return await self.request(
|
|
374
|
+
path=path,
|
|
375
|
+
method=method,
|
|
376
|
+
base_url=base_url,
|
|
377
|
+
params=params,
|
|
378
|
+
json=json,
|
|
379
|
+
content=content,
|
|
380
|
+
files=files,
|
|
381
|
+
headers=headers,
|
|
382
|
+
request_options=request_options,
|
|
383
|
+
retries=retries + 1,
|
|
384
|
+
omit=omit,
|
|
385
|
+
)
|
|
386
|
+
return response
|
|
387
|
+
|
|
388
|
+
@asynccontextmanager
|
|
389
|
+
async def stream(
|
|
390
|
+
self,
|
|
391
|
+
path: typing.Optional[str] = None,
|
|
392
|
+
*,
|
|
393
|
+
method: str,
|
|
394
|
+
base_url: typing.Optional[str] = None,
|
|
395
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
396
|
+
json: typing.Optional[typing.Any] = None,
|
|
397
|
+
data: typing.Optional[typing.Any] = None,
|
|
398
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
399
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
|
400
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
401
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
402
|
+
retries: int = 0,
|
|
403
|
+
omit: typing.Optional[typing.Any] = None,
|
|
404
|
+
) -> typing.AsyncIterator[httpx.Response]:
|
|
405
|
+
base_url = self.get_base_url(base_url)
|
|
406
|
+
timeout = (
|
|
407
|
+
request_options.get("timeout_in_seconds")
|
|
408
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
409
|
+
else self.base_timeout
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
async with self.httpx_client.stream(
|
|
413
|
+
method=method,
|
|
414
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
415
|
+
headers=jsonable_encoder(
|
|
416
|
+
remove_none_from_dict(
|
|
417
|
+
{
|
|
418
|
+
**self.base_headers,
|
|
419
|
+
**(headers if headers is not None else {}),
|
|
420
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
421
|
+
}
|
|
422
|
+
)
|
|
423
|
+
),
|
|
424
|
+
params=encode_query(
|
|
425
|
+
jsonable_encoder(
|
|
426
|
+
remove_none_from_dict(
|
|
427
|
+
remove_omit_from_dict(
|
|
428
|
+
{
|
|
429
|
+
**(params if params is not None else {}),
|
|
430
|
+
**(
|
|
431
|
+
request_options.get("additional_query_parameters", {})
|
|
432
|
+
if request_options is not None
|
|
433
|
+
else {}
|
|
434
|
+
),
|
|
435
|
+
},
|
|
436
|
+
omit=omit,
|
|
437
|
+
)
|
|
438
|
+
)
|
|
439
|
+
)
|
|
440
|
+
),
|
|
441
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
|
442
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
|
443
|
+
content=content,
|
|
444
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
|
445
|
+
timeout=timeout,
|
|
446
|
+
) as stream:
|
|
447
|
+
yield stream
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
jsonable_encoder converts a Python object to a JSON-friendly dict
|
|
5
|
+
(e.g. datetimes to strings, Pydantic models to dicts).
|
|
6
|
+
|
|
7
|
+
Taken from FastAPI, and made a bit simpler
|
|
8
|
+
https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import dataclasses
|
|
12
|
+
import datetime as dt
|
|
13
|
+
from collections import defaultdict
|
|
14
|
+
from enum import Enum
|
|
15
|
+
from pathlib import PurePath
|
|
16
|
+
from types import GeneratorType
|
|
17
|
+
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
|
|
18
|
+
|
|
19
|
+
from .datetime_utils import serialize_datetime
|
|
20
|
+
from .pydantic_utilities import pydantic_v1
|
|
21
|
+
|
|
22
|
+
SetIntStr = Set[Union[int, str]]
|
|
23
|
+
DictIntStrAny = Dict[Union[int, str], Any]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def generate_encoders_by_class_tuples(
|
|
27
|
+
type_encoder_map: Dict[Any, Callable[[Any], Any]]
|
|
28
|
+
) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]:
|
|
29
|
+
encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple)
|
|
30
|
+
for type_, encoder in type_encoder_map.items():
|
|
31
|
+
encoders_by_class_tuples[encoder] += (type_,)
|
|
32
|
+
return encoders_by_class_tuples
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic_v1.json.ENCODERS_BY_TYPE)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:
|
|
39
|
+
custom_encoder = custom_encoder or {}
|
|
40
|
+
if custom_encoder:
|
|
41
|
+
if type(obj) in custom_encoder:
|
|
42
|
+
return custom_encoder[type(obj)](obj)
|
|
43
|
+
else:
|
|
44
|
+
for encoder_type, encoder_instance in custom_encoder.items():
|
|
45
|
+
if isinstance(obj, encoder_type):
|
|
46
|
+
return encoder_instance(obj)
|
|
47
|
+
if isinstance(obj, pydantic_v1.BaseModel):
|
|
48
|
+
encoder = getattr(obj.__config__, "json_encoders", {})
|
|
49
|
+
if custom_encoder:
|
|
50
|
+
encoder.update(custom_encoder)
|
|
51
|
+
obj_dict = obj.dict(by_alias=True)
|
|
52
|
+
if "__root__" in obj_dict:
|
|
53
|
+
obj_dict = obj_dict["__root__"]
|
|
54
|
+
return jsonable_encoder(obj_dict, custom_encoder=encoder)
|
|
55
|
+
if dataclasses.is_dataclass(obj):
|
|
56
|
+
obj_dict = dataclasses.asdict(obj)
|
|
57
|
+
return jsonable_encoder(obj_dict, custom_encoder=custom_encoder)
|
|
58
|
+
if isinstance(obj, Enum):
|
|
59
|
+
return obj.value
|
|
60
|
+
if isinstance(obj, PurePath):
|
|
61
|
+
return str(obj)
|
|
62
|
+
if isinstance(obj, (str, int, float, type(None))):
|
|
63
|
+
return obj
|
|
64
|
+
if isinstance(obj, dt.datetime):
|
|
65
|
+
return serialize_datetime(obj)
|
|
66
|
+
if isinstance(obj, dt.date):
|
|
67
|
+
return str(obj)
|
|
68
|
+
if isinstance(obj, dict):
|
|
69
|
+
encoded_dict = {}
|
|
70
|
+
allowed_keys = set(obj.keys())
|
|
71
|
+
for key, value in obj.items():
|
|
72
|
+
if key in allowed_keys:
|
|
73
|
+
encoded_key = jsonable_encoder(key, custom_encoder=custom_encoder)
|
|
74
|
+
encoded_value = jsonable_encoder(value, custom_encoder=custom_encoder)
|
|
75
|
+
encoded_dict[encoded_key] = encoded_value
|
|
76
|
+
return encoded_dict
|
|
77
|
+
if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)):
|
|
78
|
+
encoded_list = []
|
|
79
|
+
for item in obj:
|
|
80
|
+
encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder))
|
|
81
|
+
return encoded_list
|
|
82
|
+
|
|
83
|
+
if type(obj) in pydantic_v1.json.ENCODERS_BY_TYPE:
|
|
84
|
+
return pydantic_v1.json.ENCODERS_BY_TYPE[type(obj)](obj)
|
|
85
|
+
for encoder, classes_tuple in encoders_by_class_tuples.items():
|
|
86
|
+
if isinstance(obj, classes_tuple):
|
|
87
|
+
return encoder(obj)
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
data = dict(obj)
|
|
91
|
+
except Exception as e:
|
|
92
|
+
errors: List[Exception] = []
|
|
93
|
+
errors.append(e)
|
|
94
|
+
try:
|
|
95
|
+
data = vars(obj)
|
|
96
|
+
except Exception as e:
|
|
97
|
+
errors.append(e)
|
|
98
|
+
raise ValueError(errors) from e
|
|
99
|
+
return jsonable_encoder(data, custom_encoder=custom_encoder)
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from typing_extensions import Self
|
|
6
|
+
|
|
7
|
+
from .pydantic_utilities import pydantic_v1
|
|
8
|
+
|
|
9
|
+
# Generic to represent the underlying type of the results within a page
|
|
10
|
+
T = typing.TypeVar("T")
|
|
11
|
+
|
|
12
|
+
# SDKs implement a Page ABC per-pagination request, the endpoint then retuns a pager that wraps this type
|
|
13
|
+
# for example, an endpoint will return SyncPager[UserPage] where UserPage implements the Page ABC. ex:
|
|
14
|
+
#
|
|
15
|
+
# SyncPager<InnerListType>(
|
|
16
|
+
# has_next=response.list_metadata.after is not None,
|
|
17
|
+
# items=response.data,
|
|
18
|
+
# # This should be the outer function that returns the SyncPager again
|
|
19
|
+
# get_next=lambda: list(..., cursor: response.cursor) (or list(..., offset: offset + 1))
|
|
20
|
+
# )
|
|
21
|
+
class BasePage(pydantic_v1.BaseModel, typing.Generic[T]):
|
|
22
|
+
has_next: bool
|
|
23
|
+
items: typing.Optional[typing.List[T]]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class SyncPage(BasePage, typing.Generic[T]):
|
|
27
|
+
get_next: typing.Optional[typing.Callable[[], typing.Optional[Self]]]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class AsyncPage(BasePage, typing.Generic[T]):
|
|
31
|
+
get_next: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Optional[Self]]]]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# ----------------------------
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class SyncPager(SyncPage[T], typing.Generic[T]):
|
|
38
|
+
# Here we type ignore the iterator to avoid a mypy error
|
|
39
|
+
# caused by the type conflict with Pydanitc's __iter__ method
|
|
40
|
+
# brought in by extending the base model
|
|
41
|
+
def __iter__(self) -> typing.Iterator[T]: # type: ignore
|
|
42
|
+
for page in self.iter_pages():
|
|
43
|
+
if page.items is not None:
|
|
44
|
+
for item in page.items:
|
|
45
|
+
yield item
|
|
46
|
+
|
|
47
|
+
def iter_pages(self) -> typing.Iterator[SyncPage[T]]:
|
|
48
|
+
page: typing.Union[SyncPager[T], None] = self
|
|
49
|
+
while True:
|
|
50
|
+
if page is not None:
|
|
51
|
+
yield page
|
|
52
|
+
if page.has_next and page.get_next is not None:
|
|
53
|
+
page = page.get_next()
|
|
54
|
+
if page is None or page.items is None or len(page.items) == 0:
|
|
55
|
+
return
|
|
56
|
+
else:
|
|
57
|
+
return
|
|
58
|
+
else:
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
def next_page(self) -> typing.Optional[SyncPage[T]]:
|
|
62
|
+
return self.get_next() if self.get_next is not None else None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class AsyncPager(AsyncPage[T], typing.Generic[T]):
|
|
66
|
+
async def __aiter__(self) -> typing.AsyncIterator[T]: # type: ignore
|
|
67
|
+
async for page in self.iter_pages():
|
|
68
|
+
if page.items is not None:
|
|
69
|
+
for item in page.items:
|
|
70
|
+
yield item
|
|
71
|
+
|
|
72
|
+
async def iter_pages(self) -> typing.AsyncIterator[AsyncPage[T]]:
|
|
73
|
+
page: typing.Union[AsyncPager[T], None] = self
|
|
74
|
+
while True:
|
|
75
|
+
if page is not None:
|
|
76
|
+
yield page
|
|
77
|
+
if page is not None and page.has_next and page.get_next is not None:
|
|
78
|
+
page = await page.get_next()
|
|
79
|
+
if page is None or page.items is None or len(page.items) == 0:
|
|
80
|
+
return
|
|
81
|
+
else:
|
|
82
|
+
return
|
|
83
|
+
else:
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
async def next_page(self) -> typing.Optional[AsyncPage[T]]:
|
|
87
|
+
return await self.get_next() if self.get_next is not None else None
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
import pydantic
|
|
6
|
+
|
|
7
|
+
IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
|
|
8
|
+
|
|
9
|
+
if IS_PYDANTIC_V2:
|
|
10
|
+
import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import
|
|
11
|
+
else:
|
|
12
|
+
import pydantic as pydantic_v1 # type: ignore # nopycln: import
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def deep_union_pydantic_dicts(
|
|
16
|
+
source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
|
|
17
|
+
) -> typing.Dict[str, typing.Any]:
|
|
18
|
+
for key, value in source.items():
|
|
19
|
+
if isinstance(value, dict):
|
|
20
|
+
node = destination.setdefault(key, {})
|
|
21
|
+
deep_union_pydantic_dicts(value, node)
|
|
22
|
+
else:
|
|
23
|
+
destination[key] = value
|
|
24
|
+
|
|
25
|
+
return destination
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
__all__ = ["pydantic_v1"]
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from collections import ChainMap
|
|
4
|
+
from typing import Any, Dict, Optional
|
|
5
|
+
|
|
6
|
+
from .pydantic_utilities import pydantic_v1
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict
|
|
10
|
+
def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> Dict[str, Any]:
|
|
11
|
+
result = {}
|
|
12
|
+
for k, v in dict_flat.items():
|
|
13
|
+
key = f"{key_prefix}[{k}]" if key_prefix is not None else k
|
|
14
|
+
if isinstance(v, dict):
|
|
15
|
+
result.update(traverse_query_dict(v, key))
|
|
16
|
+
else:
|
|
17
|
+
result[key] = v
|
|
18
|
+
return result
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def single_query_encoder(query_key: str, query_value: Any) -> Dict[str, Any]:
|
|
22
|
+
if isinstance(query_value, pydantic_v1.BaseModel) or isinstance(query_value, dict):
|
|
23
|
+
if isinstance(query_value, pydantic_v1.BaseModel):
|
|
24
|
+
obj_dict = query_value.dict(by_alias=True)
|
|
25
|
+
else:
|
|
26
|
+
obj_dict = query_value
|
|
27
|
+
return traverse_query_dict(obj_dict, query_key)
|
|
28
|
+
|
|
29
|
+
return {query_key: query_value}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def encode_query(query: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
|
|
33
|
+
return dict(ChainMap(*[single_query_encoder(k, v) for k, v in query.items()])) if query is not None else None
|