phenoml 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of phenoml might be problematic. Click here for more details.
- phenoml/__init__.py +30 -0
- phenoml/agent/__init__.py +58 -0
- phenoml/agent/client.py +820 -0
- phenoml/agent/errors/__init__.py +11 -0
- phenoml/agent/errors/bad_request_error.py +10 -0
- phenoml/agent/errors/forbidden_error.py +10 -0
- phenoml/agent/errors/internal_server_error.py +10 -0
- phenoml/agent/errors/not_found_error.py +10 -0
- phenoml/agent/errors/unauthorized_error.py +10 -0
- phenoml/agent/prompts/__init__.py +7 -0
- phenoml/agent/prompts/client.py +707 -0
- phenoml/agent/prompts/raw_client.py +1345 -0
- phenoml/agent/prompts/types/__init__.py +8 -0
- phenoml/agent/prompts/types/prompts_delete_response.py +20 -0
- phenoml/agent/prompts/types/prompts_list_response.py +22 -0
- phenoml/agent/raw_client.py +1501 -0
- phenoml/agent/types/__init__.py +45 -0
- phenoml/agent/types/agent_chat_response.py +33 -0
- phenoml/agent/types/agent_create_request_provider.py +13 -0
- phenoml/agent/types/agent_create_request_provider_item.py +7 -0
- phenoml/agent/types/agent_delete_response.py +20 -0
- phenoml/agent/types/agent_fhir_config.py +31 -0
- phenoml/agent/types/agent_list_response.py +22 -0
- phenoml/agent/types/agent_prompts_response.py +22 -0
- phenoml/agent/types/agent_response.py +22 -0
- phenoml/agent/types/agent_template.py +56 -0
- phenoml/agent/types/agent_template_provider.py +13 -0
- phenoml/agent/types/agent_template_provider_item.py +5 -0
- phenoml/agent/types/agent_update_request_provider.py +13 -0
- phenoml/agent/types/agent_update_request_provider_item.py +7 -0
- phenoml/agent/types/chat_fhir_client_config.py +31 -0
- phenoml/agent/types/json_patch.py +7 -0
- phenoml/agent/types/json_patch_operation.py +36 -0
- phenoml/agent/types/json_patch_operation_op.py +5 -0
- phenoml/agent/types/prompt_template.py +52 -0
- phenoml/agent/types/success_response.py +20 -0
- phenoml/authtoken/__init__.py +17 -0
- phenoml/authtoken/auth/__init__.py +7 -0
- phenoml/authtoken/auth/client.py +129 -0
- phenoml/authtoken/auth/raw_client.py +173 -0
- phenoml/authtoken/auth/types/__init__.py +7 -0
- phenoml/authtoken/auth/types/auth_generate_token_response.py +22 -0
- phenoml/authtoken/client.py +39 -0
- phenoml/authtoken/errors/__init__.py +8 -0
- phenoml/authtoken/errors/bad_request_error.py +10 -0
- phenoml/authtoken/errors/unauthorized_error.py +10 -0
- phenoml/authtoken/raw_client.py +13 -0
- phenoml/authtoken/types/__init__.py +8 -0
- phenoml/authtoken/types/bad_request_error_body.py +21 -0
- phenoml/authtoken/types/unauthorized_error_body.py +21 -0
- phenoml/client.py +168 -0
- phenoml/cohort/__init__.py +8 -0
- phenoml/cohort/client.py +113 -0
- phenoml/cohort/errors/__init__.py +9 -0
- phenoml/cohort/errors/bad_request_error.py +10 -0
- phenoml/cohort/errors/internal_server_error.py +10 -0
- phenoml/cohort/errors/unauthorized_error.py +10 -0
- phenoml/cohort/raw_client.py +185 -0
- phenoml/cohort/types/__init__.py +8 -0
- phenoml/cohort/types/cohort_response.py +33 -0
- phenoml/cohort/types/search_concept.py +37 -0
- phenoml/construe/__init__.py +45 -0
- phenoml/construe/client.py +399 -0
- phenoml/construe/errors/__init__.py +11 -0
- phenoml/construe/errors/bad_request_error.py +10 -0
- phenoml/construe/errors/conflict_error.py +10 -0
- phenoml/construe/errors/failed_dependency_error.py +10 -0
- phenoml/construe/errors/internal_server_error.py +10 -0
- phenoml/construe/errors/unauthorized_error.py +10 -0
- phenoml/construe/raw_client.py +706 -0
- phenoml/construe/types/__init__.py +41 -0
- phenoml/construe/types/bad_request_error_body.py +27 -0
- phenoml/construe/types/construe_cohort_request_config.py +37 -0
- phenoml/construe/types/construe_cohort_response.py +33 -0
- phenoml/construe/types/construe_cohort_response_queries_item.py +49 -0
- phenoml/construe/types/construe_cohort_response_queries_item_code_extract_results_item.py +31 -0
- phenoml/construe/types/construe_cohort_response_queries_item_code_extract_results_item_codes_item.py +32 -0
- phenoml/construe/types/construe_upload_code_system_response.py +19 -0
- phenoml/construe/types/extract_codes_result.py +22 -0
- phenoml/construe/types/extract_request_config.py +23 -0
- phenoml/construe/types/extract_request_config_chunking_method.py +5 -0
- phenoml/construe/types/extract_request_system.py +37 -0
- phenoml/construe/types/extracted_code_result.py +41 -0
- phenoml/construe/types/internal_server_error_body.py +27 -0
- phenoml/construe/types/unauthorized_error_body.py +27 -0
- phenoml/construe/types/upload_request_format.py +5 -0
- phenoml/core/__init__.py +52 -0
- phenoml/core/api_error.py +23 -0
- phenoml/core/client_wrapper.py +85 -0
- phenoml/core/datetime_utils.py +28 -0
- phenoml/core/file.py +67 -0
- phenoml/core/force_multipart.py +16 -0
- phenoml/core/http_client.py +543 -0
- phenoml/core/http_response.py +55 -0
- phenoml/core/jsonable_encoder.py +100 -0
- phenoml/core/pydantic_utilities.py +255 -0
- phenoml/core/query_encoder.py +58 -0
- phenoml/core/remove_none_from_dict.py +11 -0
- phenoml/core/request_options.py +35 -0
- phenoml/core/serialization.py +276 -0
- phenoml/environment.py +7 -0
- phenoml/lang2fhir/__init__.py +27 -0
- phenoml/lang2fhir/client.py +430 -0
- phenoml/lang2fhir/errors/__init__.py +11 -0
- phenoml/lang2fhir/errors/bad_request_error.py +10 -0
- phenoml/lang2fhir/errors/failed_dependency_error.py +10 -0
- phenoml/lang2fhir/errors/forbidden_error.py +10 -0
- phenoml/lang2fhir/errors/internal_server_error.py +10 -0
- phenoml/lang2fhir/errors/unauthorized_error.py +10 -0
- phenoml/lang2fhir/raw_client.py +788 -0
- phenoml/lang2fhir/types/__init__.py +19 -0
- phenoml/lang2fhir/types/create_request_resource.py +25 -0
- phenoml/lang2fhir/types/document_request_file_type.py +7 -0
- phenoml/lang2fhir/types/document_request_resource.py +5 -0
- phenoml/lang2fhir/types/fhir_resource.py +5 -0
- phenoml/lang2fhir/types/lang2fhir_upload_profile_response.py +23 -0
- phenoml/lang2fhir/types/search_response.py +33 -0
- phenoml/py.typed +0 -0
- phenoml/tools/__init__.py +33 -0
- phenoml/tools/client.py +392 -0
- phenoml/tools/errors/__init__.py +11 -0
- phenoml/tools/errors/bad_request_error.py +10 -0
- phenoml/tools/errors/failed_dependency_error.py +10 -0
- phenoml/tools/errors/forbidden_error.py +10 -0
- phenoml/tools/errors/internal_server_error.py +10 -0
- phenoml/tools/errors/unauthorized_error.py +10 -0
- phenoml/tools/raw_client.py +745 -0
- phenoml/tools/types/__init__.py +25 -0
- phenoml/tools/types/cohort_request_provider.py +5 -0
- phenoml/tools/types/cohort_response.py +49 -0
- phenoml/tools/types/fhir_client_config.py +31 -0
- phenoml/tools/types/lang2fhir_and_create_request_provider.py +7 -0
- phenoml/tools/types/lang2fhir_and_create_request_resource.py +25 -0
- phenoml/tools/types/lang2fhir_and_create_response.py +33 -0
- phenoml/tools/types/lang2fhir_and_search_request_provider.py +7 -0
- phenoml/tools/types/lang2fhir_and_search_response.py +40 -0
- phenoml/tools/types/search_concept.py +41 -0
- phenoml/version.py +3 -0
- phenoml/wrapper_client.py +123 -0
- phenoml-0.0.1.dist-info/LICENSE +21 -0
- phenoml-0.0.1.dist-info/METADATA +192 -0
- phenoml-0.0.1.dist-info/RECORD +143 -0
- phenoml-0.0.1.dist-info/WHEEL +4 -0
phenoml/core/file.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast
|
|
4
|
+
|
|
5
|
+
# File typing inspired by the flexibility of types within the httpx library
|
|
6
|
+
# https://github.com/encode/httpx/blob/master/httpx/_types.py
|
|
7
|
+
FileContent = Union[IO[bytes], bytes, str]
|
|
8
|
+
File = Union[
|
|
9
|
+
# file (or bytes)
|
|
10
|
+
FileContent,
|
|
11
|
+
# (filename, file (or bytes))
|
|
12
|
+
Tuple[Optional[str], FileContent],
|
|
13
|
+
# (filename, file (or bytes), content_type)
|
|
14
|
+
Tuple[Optional[str], FileContent, Optional[str]],
|
|
15
|
+
# (filename, file (or bytes), content_type, headers)
|
|
16
|
+
Tuple[
|
|
17
|
+
Optional[str],
|
|
18
|
+
FileContent,
|
|
19
|
+
Optional[str],
|
|
20
|
+
Mapping[str, str],
|
|
21
|
+
],
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def convert_file_dict_to_httpx_tuples(
|
|
26
|
+
d: Dict[str, Union[File, List[File]]],
|
|
27
|
+
) -> List[Tuple[str, File]]:
|
|
28
|
+
"""
|
|
29
|
+
The format we use is a list of tuples, where the first element is the
|
|
30
|
+
name of the file and the second is the file object. Typically HTTPX wants
|
|
31
|
+
a dict, but to be able to send lists of files, you have to use the list
|
|
32
|
+
approach (which also works for non-lists)
|
|
33
|
+
https://github.com/encode/httpx/pull/1032
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
httpx_tuples = []
|
|
37
|
+
for key, file_like in d.items():
|
|
38
|
+
if isinstance(file_like, list):
|
|
39
|
+
for file_like_item in file_like:
|
|
40
|
+
httpx_tuples.append((key, file_like_item))
|
|
41
|
+
else:
|
|
42
|
+
httpx_tuples.append((key, file_like))
|
|
43
|
+
return httpx_tuples
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def with_content_type(*, file: File, default_content_type: str) -> File:
|
|
47
|
+
"""
|
|
48
|
+
This function resolves to the file's content type, if provided, and defaults
|
|
49
|
+
to the default_content_type value if not.
|
|
50
|
+
"""
|
|
51
|
+
if isinstance(file, tuple):
|
|
52
|
+
if len(file) == 2:
|
|
53
|
+
filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore
|
|
54
|
+
return (filename, content, default_content_type)
|
|
55
|
+
elif len(file) == 3:
|
|
56
|
+
filename, content, file_content_type = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore
|
|
57
|
+
out_content_type = file_content_type or default_content_type
|
|
58
|
+
return (filename, content, out_content_type)
|
|
59
|
+
elif len(file) == 4:
|
|
60
|
+
filename, content, file_content_type, headers = cast( # type: ignore
|
|
61
|
+
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file
|
|
62
|
+
)
|
|
63
|
+
out_content_type = file_content_type or default_content_type
|
|
64
|
+
return (filename, content, out_content_type, headers)
|
|
65
|
+
else:
|
|
66
|
+
raise ValueError(f"Unexpected tuple length: {len(file)}")
|
|
67
|
+
return (None, file, default_content_type)
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ForceMultipartDict(dict):
|
|
5
|
+
"""
|
|
6
|
+
A dictionary subclass that always evaluates to True in boolean contexts.
|
|
7
|
+
|
|
8
|
+
This is used to force multipart/form-data encoding in HTTP requests even when
|
|
9
|
+
the dictionary is empty, which would normally evaluate to False.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __bool__(self):
|
|
13
|
+
return True
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
FORCE_MULTIPART = ForceMultipartDict()
|
|
@@ -0,0 +1,543 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import email.utils
|
|
5
|
+
import re
|
|
6
|
+
import time
|
|
7
|
+
import typing
|
|
8
|
+
import urllib.parse
|
|
9
|
+
from contextlib import asynccontextmanager, contextmanager
|
|
10
|
+
from random import random
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
from .file import File, convert_file_dict_to_httpx_tuples
|
|
14
|
+
from .force_multipart import FORCE_MULTIPART
|
|
15
|
+
from .jsonable_encoder import jsonable_encoder
|
|
16
|
+
from .query_encoder import encode_query
|
|
17
|
+
from .remove_none_from_dict import remove_none_from_dict
|
|
18
|
+
from .request_options import RequestOptions
|
|
19
|
+
from httpx._types import RequestFiles
|
|
20
|
+
|
|
21
|
+
INITIAL_RETRY_DELAY_SECONDS = 0.5
|
|
22
|
+
MAX_RETRY_DELAY_SECONDS = 10
|
|
23
|
+
MAX_RETRY_DELAY_SECONDS_FROM_HEADER = 30
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]:
|
|
27
|
+
"""
|
|
28
|
+
This function parses the `Retry-After` header in a HTTP response and returns the number of seconds to wait.
|
|
29
|
+
|
|
30
|
+
Inspired by the urllib3 retry implementation.
|
|
31
|
+
"""
|
|
32
|
+
retry_after_ms = response_headers.get("retry-after-ms")
|
|
33
|
+
if retry_after_ms is not None:
|
|
34
|
+
try:
|
|
35
|
+
return int(retry_after_ms) / 1000 if retry_after_ms > 0 else 0
|
|
36
|
+
except Exception:
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
retry_after = response_headers.get("retry-after")
|
|
40
|
+
if retry_after is None:
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
# Attempt to parse the header as an int.
|
|
44
|
+
if re.match(r"^\s*[0-9]+\s*$", retry_after):
|
|
45
|
+
seconds = float(retry_after)
|
|
46
|
+
# Fallback to parsing it as a date.
|
|
47
|
+
else:
|
|
48
|
+
retry_date_tuple = email.utils.parsedate_tz(retry_after)
|
|
49
|
+
if retry_date_tuple is None:
|
|
50
|
+
return None
|
|
51
|
+
if retry_date_tuple[9] is None: # Python 2
|
|
52
|
+
# Assume UTC if no timezone was specified
|
|
53
|
+
# On Python2.7, parsedate_tz returns None for a timezone offset
|
|
54
|
+
# instead of 0 if no timezone is given, where mktime_tz treats
|
|
55
|
+
# a None timezone offset as local time.
|
|
56
|
+
retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
|
|
57
|
+
|
|
58
|
+
retry_date = email.utils.mktime_tz(retry_date_tuple)
|
|
59
|
+
seconds = retry_date - time.time()
|
|
60
|
+
|
|
61
|
+
if seconds < 0:
|
|
62
|
+
seconds = 0
|
|
63
|
+
|
|
64
|
+
return seconds
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _retry_timeout(response: httpx.Response, retries: int) -> float:
|
|
68
|
+
"""
|
|
69
|
+
Determine the amount of time to wait before retrying a request.
|
|
70
|
+
This function begins by trying to parse a retry-after header from the response, and then proceeds to use exponential backoff
|
|
71
|
+
with a jitter to determine the number of seconds to wait.
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
# If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says.
|
|
75
|
+
retry_after = _parse_retry_after(response.headers)
|
|
76
|
+
if retry_after is not None and retry_after <= MAX_RETRY_DELAY_SECONDS_FROM_HEADER:
|
|
77
|
+
return retry_after
|
|
78
|
+
|
|
79
|
+
# Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS.
|
|
80
|
+
retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS)
|
|
81
|
+
|
|
82
|
+
# Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries.
|
|
83
|
+
timeout = retry_delay * (1 - 0.25 * random())
|
|
84
|
+
return timeout if timeout >= 0 else 0
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _should_retry(response: httpx.Response) -> bool:
|
|
88
|
+
retryable_400s = [429, 408, 409]
|
|
89
|
+
return response.status_code >= 500 or response.status_code in retryable_400s
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def remove_omit_from_dict(
|
|
93
|
+
original: typing.Dict[str, typing.Optional[typing.Any]],
|
|
94
|
+
omit: typing.Optional[typing.Any],
|
|
95
|
+
) -> typing.Dict[str, typing.Any]:
|
|
96
|
+
if omit is None:
|
|
97
|
+
return original
|
|
98
|
+
new: typing.Dict[str, typing.Any] = {}
|
|
99
|
+
for key, value in original.items():
|
|
100
|
+
if value is not omit:
|
|
101
|
+
new[key] = value
|
|
102
|
+
return new
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def maybe_filter_request_body(
|
|
106
|
+
data: typing.Optional[typing.Any],
|
|
107
|
+
request_options: typing.Optional[RequestOptions],
|
|
108
|
+
omit: typing.Optional[typing.Any],
|
|
109
|
+
) -> typing.Optional[typing.Any]:
|
|
110
|
+
if data is None:
|
|
111
|
+
return (
|
|
112
|
+
jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
|
|
113
|
+
if request_options is not None
|
|
114
|
+
else None
|
|
115
|
+
)
|
|
116
|
+
elif not isinstance(data, typing.Mapping):
|
|
117
|
+
data_content = jsonable_encoder(data)
|
|
118
|
+
else:
|
|
119
|
+
data_content = {
|
|
120
|
+
**(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore
|
|
121
|
+
**(
|
|
122
|
+
jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
|
|
123
|
+
if request_options is not None
|
|
124
|
+
else {}
|
|
125
|
+
),
|
|
126
|
+
}
|
|
127
|
+
return data_content
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
# Abstracted out for testing purposes
|
|
131
|
+
def get_request_body(
|
|
132
|
+
*,
|
|
133
|
+
json: typing.Optional[typing.Any],
|
|
134
|
+
data: typing.Optional[typing.Any],
|
|
135
|
+
request_options: typing.Optional[RequestOptions],
|
|
136
|
+
omit: typing.Optional[typing.Any],
|
|
137
|
+
) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]:
|
|
138
|
+
json_body = None
|
|
139
|
+
data_body = None
|
|
140
|
+
if data is not None:
|
|
141
|
+
data_body = maybe_filter_request_body(data, request_options, omit)
|
|
142
|
+
else:
|
|
143
|
+
# If both data and json are None, we send json data in the event extra properties are specified
|
|
144
|
+
json_body = maybe_filter_request_body(json, request_options, omit)
|
|
145
|
+
|
|
146
|
+
# If you have an empty JSON body, you should just send None
|
|
147
|
+
return (json_body if json_body != {} else None), data_body if data_body != {} else None
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
class HttpClient:
|
|
151
|
+
def __init__(
|
|
152
|
+
self,
|
|
153
|
+
*,
|
|
154
|
+
httpx_client: httpx.Client,
|
|
155
|
+
base_timeout: typing.Callable[[], typing.Optional[float]],
|
|
156
|
+
base_headers: typing.Callable[[], typing.Dict[str, str]],
|
|
157
|
+
base_url: typing.Optional[typing.Callable[[], str]] = None,
|
|
158
|
+
):
|
|
159
|
+
self.base_url = base_url
|
|
160
|
+
self.base_timeout = base_timeout
|
|
161
|
+
self.base_headers = base_headers
|
|
162
|
+
self.httpx_client = httpx_client
|
|
163
|
+
|
|
164
|
+
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
|
|
165
|
+
base_url = maybe_base_url
|
|
166
|
+
if self.base_url is not None and base_url is None:
|
|
167
|
+
base_url = self.base_url()
|
|
168
|
+
|
|
169
|
+
if base_url is None:
|
|
170
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
|
171
|
+
return base_url
|
|
172
|
+
|
|
173
|
+
def request(
|
|
174
|
+
self,
|
|
175
|
+
path: typing.Optional[str] = None,
|
|
176
|
+
*,
|
|
177
|
+
method: str,
|
|
178
|
+
base_url: typing.Optional[str] = None,
|
|
179
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
180
|
+
json: typing.Optional[typing.Any] = None,
|
|
181
|
+
data: typing.Optional[typing.Any] = None,
|
|
182
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
183
|
+
files: typing.Optional[
|
|
184
|
+
typing.Union[
|
|
185
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
186
|
+
typing.List[typing.Tuple[str, File]],
|
|
187
|
+
]
|
|
188
|
+
] = None,
|
|
189
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
190
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
191
|
+
retries: int = 2,
|
|
192
|
+
omit: typing.Optional[typing.Any] = None,
|
|
193
|
+
force_multipart: typing.Optional[bool] = None,
|
|
194
|
+
) -> httpx.Response:
|
|
195
|
+
base_url = self.get_base_url(base_url)
|
|
196
|
+
timeout = (
|
|
197
|
+
request_options.get("timeout_in_seconds")
|
|
198
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
199
|
+
else self.base_timeout()
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
203
|
+
|
|
204
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
205
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
206
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
207
|
+
else None
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
211
|
+
request_files = FORCE_MULTIPART
|
|
212
|
+
|
|
213
|
+
response = self.httpx_client.request(
|
|
214
|
+
method=method,
|
|
215
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
216
|
+
headers=jsonable_encoder(
|
|
217
|
+
remove_none_from_dict(
|
|
218
|
+
{
|
|
219
|
+
**self.base_headers(),
|
|
220
|
+
**(headers if headers is not None else {}),
|
|
221
|
+
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
|
|
222
|
+
}
|
|
223
|
+
)
|
|
224
|
+
),
|
|
225
|
+
params=encode_query(
|
|
226
|
+
jsonable_encoder(
|
|
227
|
+
remove_none_from_dict(
|
|
228
|
+
remove_omit_from_dict(
|
|
229
|
+
{
|
|
230
|
+
**(params if params is not None else {}),
|
|
231
|
+
**(
|
|
232
|
+
request_options.get("additional_query_parameters", {}) or {}
|
|
233
|
+
if request_options is not None
|
|
234
|
+
else {}
|
|
235
|
+
),
|
|
236
|
+
},
|
|
237
|
+
omit,
|
|
238
|
+
)
|
|
239
|
+
)
|
|
240
|
+
)
|
|
241
|
+
),
|
|
242
|
+
json=json_body,
|
|
243
|
+
data=data_body,
|
|
244
|
+
content=content,
|
|
245
|
+
files=request_files,
|
|
246
|
+
timeout=timeout,
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
|
250
|
+
if _should_retry(response=response):
|
|
251
|
+
if max_retries > retries:
|
|
252
|
+
time.sleep(_retry_timeout(response=response, retries=retries))
|
|
253
|
+
return self.request(
|
|
254
|
+
path=path,
|
|
255
|
+
method=method,
|
|
256
|
+
base_url=base_url,
|
|
257
|
+
params=params,
|
|
258
|
+
json=json,
|
|
259
|
+
content=content,
|
|
260
|
+
files=files,
|
|
261
|
+
headers=headers,
|
|
262
|
+
request_options=request_options,
|
|
263
|
+
retries=retries + 1,
|
|
264
|
+
omit=omit,
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
return response
|
|
268
|
+
|
|
269
|
+
@contextmanager
|
|
270
|
+
def stream(
|
|
271
|
+
self,
|
|
272
|
+
path: typing.Optional[str] = None,
|
|
273
|
+
*,
|
|
274
|
+
method: str,
|
|
275
|
+
base_url: typing.Optional[str] = None,
|
|
276
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
277
|
+
json: typing.Optional[typing.Any] = None,
|
|
278
|
+
data: typing.Optional[typing.Any] = None,
|
|
279
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
280
|
+
files: typing.Optional[
|
|
281
|
+
typing.Union[
|
|
282
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
283
|
+
typing.List[typing.Tuple[str, File]],
|
|
284
|
+
]
|
|
285
|
+
] = None,
|
|
286
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
287
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
288
|
+
retries: int = 2,
|
|
289
|
+
omit: typing.Optional[typing.Any] = None,
|
|
290
|
+
force_multipart: typing.Optional[bool] = None,
|
|
291
|
+
) -> typing.Iterator[httpx.Response]:
|
|
292
|
+
base_url = self.get_base_url(base_url)
|
|
293
|
+
timeout = (
|
|
294
|
+
request_options.get("timeout_in_seconds")
|
|
295
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
296
|
+
else self.base_timeout()
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
300
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
301
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
302
|
+
else None
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
306
|
+
request_files = FORCE_MULTIPART
|
|
307
|
+
|
|
308
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
309
|
+
|
|
310
|
+
with self.httpx_client.stream(
|
|
311
|
+
method=method,
|
|
312
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
313
|
+
headers=jsonable_encoder(
|
|
314
|
+
remove_none_from_dict(
|
|
315
|
+
{
|
|
316
|
+
**self.base_headers(),
|
|
317
|
+
**(headers if headers is not None else {}),
|
|
318
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
319
|
+
}
|
|
320
|
+
)
|
|
321
|
+
),
|
|
322
|
+
params=encode_query(
|
|
323
|
+
jsonable_encoder(
|
|
324
|
+
remove_none_from_dict(
|
|
325
|
+
remove_omit_from_dict(
|
|
326
|
+
{
|
|
327
|
+
**(params if params is not None else {}),
|
|
328
|
+
**(
|
|
329
|
+
request_options.get("additional_query_parameters", {})
|
|
330
|
+
if request_options is not None
|
|
331
|
+
else {}
|
|
332
|
+
),
|
|
333
|
+
},
|
|
334
|
+
omit,
|
|
335
|
+
)
|
|
336
|
+
)
|
|
337
|
+
)
|
|
338
|
+
),
|
|
339
|
+
json=json_body,
|
|
340
|
+
data=data_body,
|
|
341
|
+
content=content,
|
|
342
|
+
files=request_files,
|
|
343
|
+
timeout=timeout,
|
|
344
|
+
) as stream:
|
|
345
|
+
yield stream
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
class AsyncHttpClient:
|
|
349
|
+
def __init__(
|
|
350
|
+
self,
|
|
351
|
+
*,
|
|
352
|
+
httpx_client: httpx.AsyncClient,
|
|
353
|
+
base_timeout: typing.Callable[[], typing.Optional[float]],
|
|
354
|
+
base_headers: typing.Callable[[], typing.Dict[str, str]],
|
|
355
|
+
base_url: typing.Optional[typing.Callable[[], str]] = None,
|
|
356
|
+
):
|
|
357
|
+
self.base_url = base_url
|
|
358
|
+
self.base_timeout = base_timeout
|
|
359
|
+
self.base_headers = base_headers
|
|
360
|
+
self.httpx_client = httpx_client
|
|
361
|
+
|
|
362
|
+
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
|
|
363
|
+
base_url = maybe_base_url
|
|
364
|
+
if self.base_url is not None and base_url is None:
|
|
365
|
+
base_url = self.base_url()
|
|
366
|
+
|
|
367
|
+
if base_url is None:
|
|
368
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
|
369
|
+
return base_url
|
|
370
|
+
|
|
371
|
+
async def request(
|
|
372
|
+
self,
|
|
373
|
+
path: typing.Optional[str] = None,
|
|
374
|
+
*,
|
|
375
|
+
method: str,
|
|
376
|
+
base_url: typing.Optional[str] = None,
|
|
377
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
378
|
+
json: typing.Optional[typing.Any] = None,
|
|
379
|
+
data: typing.Optional[typing.Any] = None,
|
|
380
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
381
|
+
files: typing.Optional[
|
|
382
|
+
typing.Union[
|
|
383
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
384
|
+
typing.List[typing.Tuple[str, File]],
|
|
385
|
+
]
|
|
386
|
+
] = None,
|
|
387
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
388
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
389
|
+
retries: int = 2,
|
|
390
|
+
omit: typing.Optional[typing.Any] = None,
|
|
391
|
+
force_multipart: typing.Optional[bool] = None,
|
|
392
|
+
) -> httpx.Response:
|
|
393
|
+
base_url = self.get_base_url(base_url)
|
|
394
|
+
timeout = (
|
|
395
|
+
request_options.get("timeout_in_seconds")
|
|
396
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
397
|
+
else self.base_timeout()
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
401
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
402
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
403
|
+
else None
|
|
404
|
+
)
|
|
405
|
+
|
|
406
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
407
|
+
request_files = FORCE_MULTIPART
|
|
408
|
+
|
|
409
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
410
|
+
|
|
411
|
+
# Add the input to each of these and do None-safety checks
|
|
412
|
+
response = await self.httpx_client.request(
|
|
413
|
+
method=method,
|
|
414
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
415
|
+
headers=jsonable_encoder(
|
|
416
|
+
remove_none_from_dict(
|
|
417
|
+
{
|
|
418
|
+
**self.base_headers(),
|
|
419
|
+
**(headers if headers is not None else {}),
|
|
420
|
+
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
|
|
421
|
+
}
|
|
422
|
+
)
|
|
423
|
+
),
|
|
424
|
+
params=encode_query(
|
|
425
|
+
jsonable_encoder(
|
|
426
|
+
remove_none_from_dict(
|
|
427
|
+
remove_omit_from_dict(
|
|
428
|
+
{
|
|
429
|
+
**(params if params is not None else {}),
|
|
430
|
+
**(
|
|
431
|
+
request_options.get("additional_query_parameters", {}) or {}
|
|
432
|
+
if request_options is not None
|
|
433
|
+
else {}
|
|
434
|
+
),
|
|
435
|
+
},
|
|
436
|
+
omit,
|
|
437
|
+
)
|
|
438
|
+
)
|
|
439
|
+
)
|
|
440
|
+
),
|
|
441
|
+
json=json_body,
|
|
442
|
+
data=data_body,
|
|
443
|
+
content=content,
|
|
444
|
+
files=request_files,
|
|
445
|
+
timeout=timeout,
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
|
449
|
+
if _should_retry(response=response):
|
|
450
|
+
if max_retries > retries:
|
|
451
|
+
await asyncio.sleep(_retry_timeout(response=response, retries=retries))
|
|
452
|
+
return await self.request(
|
|
453
|
+
path=path,
|
|
454
|
+
method=method,
|
|
455
|
+
base_url=base_url,
|
|
456
|
+
params=params,
|
|
457
|
+
json=json,
|
|
458
|
+
content=content,
|
|
459
|
+
files=files,
|
|
460
|
+
headers=headers,
|
|
461
|
+
request_options=request_options,
|
|
462
|
+
retries=retries + 1,
|
|
463
|
+
omit=omit,
|
|
464
|
+
)
|
|
465
|
+
return response
|
|
466
|
+
|
|
467
|
+
@asynccontextmanager
|
|
468
|
+
async def stream(
|
|
469
|
+
self,
|
|
470
|
+
path: typing.Optional[str] = None,
|
|
471
|
+
*,
|
|
472
|
+
method: str,
|
|
473
|
+
base_url: typing.Optional[str] = None,
|
|
474
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
475
|
+
json: typing.Optional[typing.Any] = None,
|
|
476
|
+
data: typing.Optional[typing.Any] = None,
|
|
477
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
|
478
|
+
files: typing.Optional[
|
|
479
|
+
typing.Union[
|
|
480
|
+
typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
|
|
481
|
+
typing.List[typing.Tuple[str, File]],
|
|
482
|
+
]
|
|
483
|
+
] = None,
|
|
484
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
485
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
486
|
+
retries: int = 2,
|
|
487
|
+
omit: typing.Optional[typing.Any] = None,
|
|
488
|
+
force_multipart: typing.Optional[bool] = None,
|
|
489
|
+
) -> typing.AsyncIterator[httpx.Response]:
|
|
490
|
+
base_url = self.get_base_url(base_url)
|
|
491
|
+
timeout = (
|
|
492
|
+
request_options.get("timeout_in_seconds")
|
|
493
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
|
494
|
+
else self.base_timeout()
|
|
495
|
+
)
|
|
496
|
+
|
|
497
|
+
request_files: typing.Optional[RequestFiles] = (
|
|
498
|
+
convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
|
|
499
|
+
if (files is not None and files is not omit and isinstance(files, dict))
|
|
500
|
+
else None
|
|
501
|
+
)
|
|
502
|
+
|
|
503
|
+
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
504
|
+
request_files = FORCE_MULTIPART
|
|
505
|
+
|
|
506
|
+
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
507
|
+
|
|
508
|
+
async with self.httpx_client.stream(
|
|
509
|
+
method=method,
|
|
510
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
|
511
|
+
headers=jsonable_encoder(
|
|
512
|
+
remove_none_from_dict(
|
|
513
|
+
{
|
|
514
|
+
**self.base_headers(),
|
|
515
|
+
**(headers if headers is not None else {}),
|
|
516
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
517
|
+
}
|
|
518
|
+
)
|
|
519
|
+
),
|
|
520
|
+
params=encode_query(
|
|
521
|
+
jsonable_encoder(
|
|
522
|
+
remove_none_from_dict(
|
|
523
|
+
remove_omit_from_dict(
|
|
524
|
+
{
|
|
525
|
+
**(params if params is not None else {}),
|
|
526
|
+
**(
|
|
527
|
+
request_options.get("additional_query_parameters", {})
|
|
528
|
+
if request_options is not None
|
|
529
|
+
else {}
|
|
530
|
+
),
|
|
531
|
+
},
|
|
532
|
+
omit=omit,
|
|
533
|
+
)
|
|
534
|
+
)
|
|
535
|
+
)
|
|
536
|
+
),
|
|
537
|
+
json=json_body,
|
|
538
|
+
data=data_body,
|
|
539
|
+
content=content,
|
|
540
|
+
files=request_files,
|
|
541
|
+
timeout=timeout,
|
|
542
|
+
) as stream:
|
|
543
|
+
yield stream
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Generic, TypeVar
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T")
|
|
8
|
+
"""Generic to represent the underlying type of the data wrapped by the HTTP response."""
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class BaseHttpResponse:
|
|
12
|
+
"""Minimalist HTTP response wrapper that exposes response headers."""
|
|
13
|
+
|
|
14
|
+
_response: httpx.Response
|
|
15
|
+
|
|
16
|
+
def __init__(self, response: httpx.Response):
|
|
17
|
+
self._response = response
|
|
18
|
+
|
|
19
|
+
@property
|
|
20
|
+
def headers(self) -> Dict[str, str]:
|
|
21
|
+
return dict(self._response.headers)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class HttpResponse(Generic[T], BaseHttpResponse):
|
|
25
|
+
"""HTTP response wrapper that exposes response headers and data."""
|
|
26
|
+
|
|
27
|
+
_data: T
|
|
28
|
+
|
|
29
|
+
def __init__(self, response: httpx.Response, data: T):
|
|
30
|
+
super().__init__(response)
|
|
31
|
+
self._data = data
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def data(self) -> T:
|
|
35
|
+
return self._data
|
|
36
|
+
|
|
37
|
+
def close(self) -> None:
|
|
38
|
+
self._response.close()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class AsyncHttpResponse(Generic[T], BaseHttpResponse):
|
|
42
|
+
"""HTTP response wrapper that exposes response headers and data."""
|
|
43
|
+
|
|
44
|
+
_data: T
|
|
45
|
+
|
|
46
|
+
def __init__(self, response: httpx.Response, data: T):
|
|
47
|
+
super().__init__(response)
|
|
48
|
+
self._data = data
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def data(self) -> T:
|
|
52
|
+
return self._data
|
|
53
|
+
|
|
54
|
+
async def close(self) -> None:
|
|
55
|
+
await self._response.aclose()
|