athena-intelligence 0.1.125__py3-none-any.whl → 0.1.127__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. athena/__init__.py +7 -0
  2. athena/agents/client.py +88 -36
  3. athena/agents/drive/client.py +80 -32
  4. athena/agents/general/client.py +222 -91
  5. athena/agents/research/client.py +80 -32
  6. athena/agents/sql/client.py +80 -32
  7. athena/base_client.py +13 -11
  8. athena/client.py +161 -61
  9. athena/core/__init__.py +21 -4
  10. athena/core/client_wrapper.py +9 -10
  11. athena/core/file.py +37 -8
  12. athena/core/http_client.py +97 -41
  13. athena/core/jsonable_encoder.py +33 -31
  14. athena/core/pydantic_utilities.py +272 -4
  15. athena/core/query_encoder.py +38 -13
  16. athena/core/request_options.py +5 -2
  17. athena/core/serialization.py +272 -0
  18. athena/errors/internal_server_error.py +2 -3
  19. athena/errors/unauthorized_error.py +2 -3
  20. athena/errors/unprocessable_entity_error.py +2 -3
  21. athena/query/client.py +208 -58
  22. athena/tools/calendar/client.py +82 -30
  23. athena/tools/client.py +956 -188
  24. athena/tools/email/client.py +117 -43
  25. athena/tools/structured_data_extractor/client.py +118 -67
  26. athena/tools/tasks/client.py +41 -17
  27. athena/types/__init__.py +4 -0
  28. athena/types/asset_content_request_out.py +26 -0
  29. athena/types/asset_node.py +14 -24
  30. athena/types/asset_not_found_error.py +11 -21
  31. athena/types/asset_screenshot_response_out.py +43 -0
  32. athena/types/chunk.py +11 -21
  33. athena/types/chunk_content_item.py +21 -41
  34. athena/types/chunk_result.py +13 -23
  35. athena/types/custom_agent_response.py +12 -22
  36. athena/types/data_frame_request_out.py +11 -21
  37. athena/types/data_frame_unknown_format_error.py +11 -21
  38. athena/types/document_chunk.py +12 -22
  39. athena/types/drive_agent_response.py +12 -22
  40. athena/types/file_chunk_request_out.py +11 -21
  41. athena/types/file_too_large_error.py +11 -21
  42. athena/types/folder_response.py +11 -21
  43. athena/types/general_agent_config.py +12 -21
  44. athena/types/general_agent_config_enabled_tools_item.py +0 -1
  45. athena/types/general_agent_request.py +13 -23
  46. athena/types/general_agent_response.py +12 -22
  47. athena/types/image_url_content.py +11 -21
  48. athena/types/parent_folder_error.py +11 -21
  49. athena/types/prompt_message.py +12 -22
  50. athena/types/research_agent_response.py +12 -22
  51. athena/types/save_asset_request_out.py +11 -21
  52. athena/types/sql_agent_response.py +13 -23
  53. athena/types/structured_data_extractor_response.py +15 -25
  54. athena/types/text_content.py +11 -21
  55. athena/types/tool.py +1 -13
  56. athena/types/type.py +1 -21
  57. athena/version.py +0 -1
  58. {athena_intelligence-0.1.125.dist-info → athena_intelligence-0.1.127.dist-info}/METADATA +12 -4
  59. athena_intelligence-0.1.127.dist-info/RECORD +89 -0
  60. {athena_intelligence-0.1.125.dist-info → athena_intelligence-0.1.127.dist-info}/WHEEL +1 -1
  61. athena_intelligence-0.1.125.dist-info/RECORD +0 -86
athena/core/__init__.py CHANGED
@@ -3,28 +3,45 @@
3
3
  from .api_error import ApiError
4
4
  from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper
5
5
  from .datetime_utils import serialize_datetime
6
- from .file import File, convert_file_dict_to_httpx_tuples
6
+ from .file import File, convert_file_dict_to_httpx_tuples, with_content_type
7
7
  from .http_client import AsyncHttpClient, HttpClient
8
8
  from .jsonable_encoder import jsonable_encoder
9
- from .pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
9
+ from .pydantic_utilities import (
10
+ IS_PYDANTIC_V2,
11
+ UniversalBaseModel,
12
+ UniversalRootModel,
13
+ parse_obj_as,
14
+ universal_field_validator,
15
+ universal_root_validator,
16
+ update_forward_refs,
17
+ )
10
18
  from .query_encoder import encode_query
11
19
  from .remove_none_from_dict import remove_none_from_dict
12
20
  from .request_options import RequestOptions
21
+ from .serialization import FieldMetadata, convert_and_respect_annotation_metadata
13
22
 
14
23
  __all__ = [
15
24
  "ApiError",
16
25
  "AsyncClientWrapper",
17
26
  "AsyncHttpClient",
18
27
  "BaseClientWrapper",
28
+ "FieldMetadata",
19
29
  "File",
20
30
  "HttpClient",
31
+ "IS_PYDANTIC_V2",
21
32
  "RequestOptions",
22
33
  "SyncClientWrapper",
34
+ "UniversalBaseModel",
35
+ "UniversalRootModel",
36
+ "convert_and_respect_annotation_metadata",
23
37
  "convert_file_dict_to_httpx_tuples",
24
- "deep_union_pydantic_dicts",
25
38
  "encode_query",
26
39
  "jsonable_encoder",
27
- "pydantic_v1",
40
+ "parse_obj_as",
28
41
  "remove_none_from_dict",
29
42
  "serialize_datetime",
43
+ "universal_field_validator",
44
+ "universal_root_validator",
45
+ "update_forward_refs",
46
+ "with_content_type",
30
47
  ]
@@ -1,10 +1,9 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  import typing
4
-
5
4
  import httpx
6
-
7
- from .http_client import AsyncHttpClient, HttpClient
5
+ from .http_client import HttpClient
6
+ from .http_client import AsyncHttpClient
8
7
 
9
8
 
10
9
  class BaseClientWrapper:
@@ -17,7 +16,7 @@ class BaseClientWrapper:
17
16
  headers: typing.Dict[str, str] = {
18
17
  "X-Fern-Language": "Python",
19
18
  "X-Fern-SDK-Name": "athena-intelligence",
20
- "X-Fern-SDK-Version": "0.1.125",
19
+ "X-Fern-SDK-Version": "0.1.127",
21
20
  }
22
21
  headers["X-API-KEY"] = self.api_key
23
22
  return headers
@@ -36,9 +35,9 @@ class SyncClientWrapper(BaseClientWrapper):
36
35
  super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
37
36
  self.httpx_client = HttpClient(
38
37
  httpx_client=httpx_client,
39
- base_headers=self.get_headers(),
40
- base_timeout=self.get_timeout(),
41
- base_url=self.get_base_url(),
38
+ base_headers=self.get_headers,
39
+ base_timeout=self.get_timeout,
40
+ base_url=self.get_base_url,
42
41
  )
43
42
 
44
43
 
@@ -49,7 +48,7 @@ class AsyncClientWrapper(BaseClientWrapper):
49
48
  super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
50
49
  self.httpx_client = AsyncHttpClient(
51
50
  httpx_client=httpx_client,
52
- base_headers=self.get_headers(),
53
- base_timeout=self.get_timeout(),
54
- base_url=self.get_base_url(),
51
+ base_headers=self.get_headers,
52
+ base_timeout=self.get_timeout,
53
+ base_url=self.get_base_url,
55
54
  )
athena/core/file.py CHANGED
@@ -1,25 +1,30 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- import typing
3
+ from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast
4
4
 
5
5
  # File typing inspired by the flexibility of types within the httpx library
6
6
  # https://github.com/encode/httpx/blob/master/httpx/_types.py
7
- FileContent = typing.Union[typing.IO[bytes], bytes, str]
8
- File = typing.Union[
7
+ FileContent = Union[IO[bytes], bytes, str]
8
+ File = Union[
9
9
  # file (or bytes)
10
10
  FileContent,
11
11
  # (filename, file (or bytes))
12
- typing.Tuple[typing.Optional[str], FileContent],
12
+ Tuple[Optional[str], FileContent],
13
13
  # (filename, file (or bytes), content_type)
14
- typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]],
14
+ Tuple[Optional[str], FileContent, Optional[str]],
15
15
  # (filename, file (or bytes), content_type, headers)
16
- typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str], typing.Mapping[str, str]],
16
+ Tuple[
17
+ Optional[str],
18
+ FileContent,
19
+ Optional[str],
20
+ Mapping[str, str],
21
+ ],
17
22
  ]
18
23
 
19
24
 
20
25
  def convert_file_dict_to_httpx_tuples(
21
- d: typing.Dict[str, typing.Union[File, typing.List[File]]]
22
- ) -> typing.List[typing.Tuple[str, File]]:
26
+ d: Dict[str, Union[File, List[File]]],
27
+ ) -> List[Tuple[str, File]]:
23
28
  """
24
29
  The format we use is a list of tuples, where the first element is the
25
30
  name of the file and the second is the file object. Typically HTTPX wants
@@ -36,3 +41,27 @@ def convert_file_dict_to_httpx_tuples(
36
41
  else:
37
42
  httpx_tuples.append((key, file_like))
38
43
  return httpx_tuples
44
+
45
+
46
+ def with_content_type(*, file: File, default_content_type: str) -> File:
47
+ """
48
+ This function resolves to the file's content type, if provided, and defaults
49
+ to the default_content_type value if not.
50
+ """
51
+ if isinstance(file, tuple):
52
+ if len(file) == 2:
53
+ filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore
54
+ return (filename, content, default_content_type)
55
+ elif len(file) == 3:
56
+ filename, content, file_content_type = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore
57
+ out_content_type = file_content_type or default_content_type
58
+ return (filename, content, out_content_type)
59
+ elif len(file) == 4:
60
+ filename, content, file_content_type, headers = cast( # type: ignore
61
+ Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file
62
+ )
63
+ out_content_type = file_content_type or default_content_type
64
+ return (filename, content, out_content_type, headers)
65
+ else:
66
+ raise ValueError(f"Unexpected tuple length: {len(file)}")
67
+ return (None, file, default_content_type)
@@ -2,6 +2,7 @@
2
2
 
3
3
  import asyncio
4
4
  import email.utils
5
+ import json
5
6
  import re
6
7
  import time
7
8
  import typing
@@ -84,12 +85,13 @@ def _retry_timeout(response: httpx.Response, retries: int) -> float:
84
85
 
85
86
 
86
87
  def _should_retry(response: httpx.Response) -> bool:
87
- retriable_400s = [429, 408, 409]
88
- return response.status_code >= 500 or response.status_code in retriable_400s
88
+ retryable_400s = [429, 408, 409]
89
+ return response.status_code >= 500 or response.status_code in retryable_400s
89
90
 
90
91
 
91
92
  def remove_omit_from_dict(
92
- original: typing.Dict[str, typing.Optional[typing.Any]], omit: typing.Optional[typing.Any]
93
+ original: typing.Dict[str, typing.Optional[typing.Any]],
94
+ omit: typing.Optional[typing.Any],
93
95
  ) -> typing.Dict[str, typing.Any]:
94
96
  if omit is None:
95
97
  return original
@@ -106,14 +108,18 @@ def maybe_filter_request_body(
106
108
  omit: typing.Optional[typing.Any],
107
109
  ) -> typing.Optional[typing.Any]:
108
110
  if data is None:
109
- return None
111
+ return (
112
+ jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
113
+ if request_options is not None
114
+ else None
115
+ )
110
116
  elif not isinstance(data, typing.Mapping):
111
117
  data_content = jsonable_encoder(data)
112
118
  else:
113
119
  data_content = {
114
120
  **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore
115
121
  **(
116
- jsonable_encoder(request_options.get("additional_body_parameters", {}))
122
+ jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
117
123
  if request_options is not None
118
124
  else {}
119
125
  ),
@@ -121,14 +127,34 @@ def maybe_filter_request_body(
121
127
  return data_content
122
128
 
123
129
 
130
+ # Abstracted out for testing purposes
131
+ def get_request_body(
132
+ *,
133
+ json: typing.Optional[typing.Any],
134
+ data: typing.Optional[typing.Any],
135
+ request_options: typing.Optional[RequestOptions],
136
+ omit: typing.Optional[typing.Any],
137
+ ) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]:
138
+ json_body = None
139
+ data_body = None
140
+ if data is not None:
141
+ data_body = maybe_filter_request_body(data, request_options, omit)
142
+ else:
143
+ # If both data and json are None, we send json data in the event extra properties are specified
144
+ json_body = maybe_filter_request_body(json, request_options, omit)
145
+
146
+ # If you have an empty JSON body, you should just send None
147
+ return (json_body if json_body != {} else None), data_body if data_body != {} else None
148
+
149
+
124
150
  class HttpClient:
125
151
  def __init__(
126
152
  self,
127
153
  *,
128
154
  httpx_client: httpx.Client,
129
- base_timeout: typing.Optional[float],
130
- base_headers: typing.Dict[str, str],
131
- base_url: typing.Optional[str] = None,
155
+ base_timeout: typing.Callable[[], typing.Optional[float]],
156
+ base_headers: typing.Callable[[], typing.Dict[str, str]],
157
+ base_url: typing.Optional[typing.Callable[[], str]] = None,
132
158
  ):
133
159
  self.base_url = base_url
134
160
  self.base_timeout = base_timeout
@@ -136,7 +162,10 @@ class HttpClient:
136
162
  self.httpx_client = httpx_client
137
163
 
138
164
  def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
139
- base_url = self.base_url if maybe_base_url is None else maybe_base_url
165
+ base_url = maybe_base_url
166
+ if self.base_url is not None and base_url is None:
167
+ base_url = self.base_url()
168
+
140
169
  if base_url is None:
141
170
  raise ValueError("A base_url is required to make this request, please provide one and try again.")
142
171
  return base_url
@@ -154,25 +183,27 @@ class HttpClient:
154
183
  files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
155
184
  headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
156
185
  request_options: typing.Optional[RequestOptions] = None,
157
- retries: int = 0,
186
+ retries: int = 2,
158
187
  omit: typing.Optional[typing.Any] = None,
159
188
  ) -> httpx.Response:
160
189
  base_url = self.get_base_url(base_url)
161
190
  timeout = (
162
191
  request_options.get("timeout_in_seconds")
163
192
  if request_options is not None and request_options.get("timeout_in_seconds") is not None
164
- else self.base_timeout
193
+ else self.base_timeout()
165
194
  )
166
195
 
196
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
197
+
167
198
  response = self.httpx_client.request(
168
199
  method=method,
169
200
  url=urllib.parse.urljoin(f"{base_url}/", path),
170
201
  headers=jsonable_encoder(
171
202
  remove_none_from_dict(
172
203
  {
173
- **self.base_headers,
204
+ **self.base_headers(),
174
205
  **(headers if headers is not None else {}),
175
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
206
+ **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
176
207
  }
177
208
  )
178
209
  ),
@@ -183,7 +214,7 @@ class HttpClient:
183
214
  {
184
215
  **(params if params is not None else {}),
185
216
  **(
186
- request_options.get("additional_query_parameters", {})
217
+ request_options.get("additional_query_parameters", {}) or {}
187
218
  if request_options is not None
188
219
  else {}
189
220
  ),
@@ -193,10 +224,14 @@ class HttpClient:
193
224
  )
194
225
  )
195
226
  ),
196
- json=maybe_filter_request_body(json, request_options, omit),
197
- data=maybe_filter_request_body(data, request_options, omit),
227
+ json=json_body,
228
+ data=data_body,
198
229
  content=content,
199
- files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
230
+ files=(
231
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
232
+ if (files is not None and files is not omit)
233
+ else None
234
+ ),
200
235
  timeout=timeout,
201
236
  )
202
237
 
@@ -234,23 +269,25 @@ class HttpClient:
234
269
  files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
235
270
  headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
236
271
  request_options: typing.Optional[RequestOptions] = None,
237
- retries: int = 0,
272
+ retries: int = 2,
238
273
  omit: typing.Optional[typing.Any] = None,
239
274
  ) -> typing.Iterator[httpx.Response]:
240
275
  base_url = self.get_base_url(base_url)
241
276
  timeout = (
242
277
  request_options.get("timeout_in_seconds")
243
278
  if request_options is not None and request_options.get("timeout_in_seconds") is not None
244
- else self.base_timeout
279
+ else self.base_timeout()
245
280
  )
246
281
 
282
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
283
+
247
284
  with self.httpx_client.stream(
248
285
  method=method,
249
286
  url=urllib.parse.urljoin(f"{base_url}/", path),
250
287
  headers=jsonable_encoder(
251
288
  remove_none_from_dict(
252
289
  {
253
- **self.base_headers,
290
+ **self.base_headers(),
254
291
  **(headers if headers is not None else {}),
255
292
  **(request_options.get("additional_headers", {}) if request_options is not None else {}),
256
293
  }
@@ -273,10 +310,14 @@ class HttpClient:
273
310
  )
274
311
  )
275
312
  ),
276
- json=maybe_filter_request_body(json, request_options, omit),
277
- data=maybe_filter_request_body(data, request_options, omit),
313
+ json=json_body,
314
+ data=data_body,
278
315
  content=content,
279
- files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
316
+ files=(
317
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
318
+ if (files is not None and files is not omit)
319
+ else None
320
+ ),
280
321
  timeout=timeout,
281
322
  ) as stream:
282
323
  yield stream
@@ -287,9 +328,9 @@ class AsyncHttpClient:
287
328
  self,
288
329
  *,
289
330
  httpx_client: httpx.AsyncClient,
290
- base_timeout: typing.Optional[float],
291
- base_headers: typing.Dict[str, str],
292
- base_url: typing.Optional[str] = None,
331
+ base_timeout: typing.Callable[[], typing.Optional[float]],
332
+ base_headers: typing.Callable[[], typing.Dict[str, str]],
333
+ base_url: typing.Optional[typing.Callable[[], str]] = None,
293
334
  ):
294
335
  self.base_url = base_url
295
336
  self.base_timeout = base_timeout
@@ -297,7 +338,10 @@ class AsyncHttpClient:
297
338
  self.httpx_client = httpx_client
298
339
 
299
340
  def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
300
- base_url = self.base_url if maybe_base_url is None else maybe_base_url
341
+ base_url = maybe_base_url
342
+ if self.base_url is not None and base_url is None:
343
+ base_url = self.base_url()
344
+
301
345
  if base_url is None:
302
346
  raise ValueError("A base_url is required to make this request, please provide one and try again.")
303
347
  return base_url
@@ -315,16 +359,18 @@ class AsyncHttpClient:
315
359
  files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
316
360
  headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
317
361
  request_options: typing.Optional[RequestOptions] = None,
318
- retries: int = 0,
362
+ retries: int = 2,
319
363
  omit: typing.Optional[typing.Any] = None,
320
364
  ) -> httpx.Response:
321
365
  base_url = self.get_base_url(base_url)
322
366
  timeout = (
323
367
  request_options.get("timeout_in_seconds")
324
368
  if request_options is not None and request_options.get("timeout_in_seconds") is not None
325
- else self.base_timeout
369
+ else self.base_timeout()
326
370
  )
327
371
 
372
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
373
+
328
374
  # Add the input to each of these and do None-safety checks
329
375
  response = await self.httpx_client.request(
330
376
  method=method,
@@ -332,9 +378,9 @@ class AsyncHttpClient:
332
378
  headers=jsonable_encoder(
333
379
  remove_none_from_dict(
334
380
  {
335
- **self.base_headers,
381
+ **self.base_headers(),
336
382
  **(headers if headers is not None else {}),
337
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
383
+ **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
338
384
  }
339
385
  )
340
386
  ),
@@ -345,7 +391,7 @@ class AsyncHttpClient:
345
391
  {
346
392
  **(params if params is not None else {}),
347
393
  **(
348
- request_options.get("additional_query_parameters", {})
394
+ request_options.get("additional_query_parameters", {}) or {}
349
395
  if request_options is not None
350
396
  else {}
351
397
  ),
@@ -355,10 +401,14 @@ class AsyncHttpClient:
355
401
  )
356
402
  )
357
403
  ),
358
- json=maybe_filter_request_body(json, request_options, omit),
359
- data=maybe_filter_request_body(data, request_options, omit),
404
+ json=json_body,
405
+ data=data_body,
360
406
  content=content,
361
- files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
407
+ files=(
408
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
409
+ if files is not None
410
+ else None
411
+ ),
362
412
  timeout=timeout,
363
413
  )
364
414
 
@@ -395,23 +445,25 @@ class AsyncHttpClient:
395
445
  files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
396
446
  headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
397
447
  request_options: typing.Optional[RequestOptions] = None,
398
- retries: int = 0,
448
+ retries: int = 2,
399
449
  omit: typing.Optional[typing.Any] = None,
400
450
  ) -> typing.AsyncIterator[httpx.Response]:
401
451
  base_url = self.get_base_url(base_url)
402
452
  timeout = (
403
453
  request_options.get("timeout_in_seconds")
404
454
  if request_options is not None and request_options.get("timeout_in_seconds") is not None
405
- else self.base_timeout
455
+ else self.base_timeout()
406
456
  )
407
457
 
458
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
459
+
408
460
  async with self.httpx_client.stream(
409
461
  method=method,
410
462
  url=urllib.parse.urljoin(f"{base_url}/", path),
411
463
  headers=jsonable_encoder(
412
464
  remove_none_from_dict(
413
465
  {
414
- **self.base_headers,
466
+ **self.base_headers(),
415
467
  **(headers if headers is not None else {}),
416
468
  **(request_options.get("additional_headers", {}) if request_options is not None else {}),
417
469
  }
@@ -434,10 +486,14 @@ class AsyncHttpClient:
434
486
  )
435
487
  )
436
488
  ),
437
- json=maybe_filter_request_body(json, request_options, omit),
438
- data=maybe_filter_request_body(data, request_options, omit),
489
+ json=json_body,
490
+ data=data_body,
439
491
  content=content,
440
- files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
492
+ files=(
493
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
494
+ if files is not None
495
+ else None
496
+ ),
441
497
  timeout=timeout,
442
498
  ) as stream:
443
499
  yield stream
@@ -8,33 +8,27 @@ Taken from FastAPI, and made a bit simpler
8
8
  https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py
9
9
  """
10
10
 
11
+ import base64
11
12
  import dataclasses
12
13
  import datetime as dt
13
- from collections import defaultdict
14
14
  from enum import Enum
15
15
  from pathlib import PurePath
16
16
  from types import GeneratorType
17
- from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
17
+ from typing import Any, Callable, Dict, List, Optional, Set, Union
18
+
19
+ import pydantic
18
20
 
19
21
  from .datetime_utils import serialize_datetime
20
- from .pydantic_utilities import pydantic_v1
22
+ from .pydantic_utilities import (
23
+ IS_PYDANTIC_V2,
24
+ encode_by_type,
25
+ to_jsonable_with_fallback,
26
+ )
21
27
 
22
28
  SetIntStr = Set[Union[int, str]]
23
29
  DictIntStrAny = Dict[Union[int, str], Any]
24
30
 
25
31
 
26
- def generate_encoders_by_class_tuples(
27
- type_encoder_map: Dict[Any, Callable[[Any], Any]]
28
- ) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]:
29
- encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple)
30
- for type_, encoder in type_encoder_map.items():
31
- encoders_by_class_tuples[encoder] += (type_,)
32
- return encoders_by_class_tuples
33
-
34
-
35
- encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic_v1.json.ENCODERS_BY_TYPE)
36
-
37
-
38
32
  def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:
39
33
  custom_encoder = custom_encoder or {}
40
34
  if custom_encoder:
@@ -44,17 +38,24 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any]
44
38
  for encoder_type, encoder_instance in custom_encoder.items():
45
39
  if isinstance(obj, encoder_type):
46
40
  return encoder_instance(obj)
47
- if isinstance(obj, pydantic_v1.BaseModel):
48
- encoder = getattr(obj.__config__, "json_encoders", {})
41
+ if isinstance(obj, pydantic.BaseModel):
42
+ if IS_PYDANTIC_V2:
43
+ encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2
44
+ else:
45
+ encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1
49
46
  if custom_encoder:
50
47
  encoder.update(custom_encoder)
51
48
  obj_dict = obj.dict(by_alias=True)
52
49
  if "__root__" in obj_dict:
53
50
  obj_dict = obj_dict["__root__"]
51
+ if "root" in obj_dict:
52
+ obj_dict = obj_dict["root"]
54
53
  return jsonable_encoder(obj_dict, custom_encoder=encoder)
55
54
  if dataclasses.is_dataclass(obj):
56
- obj_dict = dataclasses.asdict(obj)
55
+ obj_dict = dataclasses.asdict(obj) # type: ignore
57
56
  return jsonable_encoder(obj_dict, custom_encoder=custom_encoder)
57
+ if isinstance(obj, bytes):
58
+ return base64.b64encode(obj).decode("utf-8")
58
59
  if isinstance(obj, Enum):
59
60
  return obj.value
60
61
  if isinstance(obj, PurePath):
@@ -80,20 +81,21 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any]
80
81
  encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder))
81
82
  return encoded_list
82
83
 
83
- if type(obj) in pydantic_v1.json.ENCODERS_BY_TYPE:
84
- return pydantic_v1.json.ENCODERS_BY_TYPE[type(obj)](obj)
85
- for encoder, classes_tuple in encoders_by_class_tuples.items():
86
- if isinstance(obj, classes_tuple):
87
- return encoder(obj)
84
+ def fallback_serializer(o: Any) -> Any:
85
+ attempt_encode = encode_by_type(o)
86
+ if attempt_encode is not None:
87
+ return attempt_encode
88
88
 
89
- try:
90
- data = dict(obj)
91
- except Exception as e:
92
- errors: List[Exception] = []
93
- errors.append(e)
94
89
  try:
95
- data = vars(obj)
90
+ data = dict(o)
96
91
  except Exception as e:
92
+ errors: List[Exception] = []
97
93
  errors.append(e)
98
- raise ValueError(errors) from e
99
- return jsonable_encoder(data, custom_encoder=custom_encoder)
94
+ try:
95
+ data = vars(o)
96
+ except Exception as e:
97
+ errors.append(e)
98
+ raise ValueError(errors) from e
99
+ return jsonable_encoder(data, custom_encoder=custom_encoder)
100
+
101
+ return to_jsonable_with_fallback(obj, fallback_serializer)