methodsdk 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. method_security/__init__.py +67 -0
  2. method_security/client.py +137 -0
  3. method_security/common/__init__.py +32 -0
  4. method_security/common/types/__init__.py +32 -0
  5. method_security/common/types/environment_id.py +3 -0
  6. method_security/core/__init__.py +103 -0
  7. method_security/core/api_error.py +23 -0
  8. method_security/core/client_wrapper.py +74 -0
  9. method_security/core/datetime_utils.py +28 -0
  10. method_security/core/file.py +67 -0
  11. method_security/core/force_multipart.py +18 -0
  12. method_security/core/http_client.py +543 -0
  13. method_security/core/http_response.py +55 -0
  14. method_security/core/jsonable_encoder.py +100 -0
  15. method_security/core/pydantic_utilities.py +258 -0
  16. method_security/core/query_encoder.py +58 -0
  17. method_security/core/remove_none_from_dict.py +11 -0
  18. method_security/core/request_options.py +35 -0
  19. method_security/core/serialization.py +276 -0
  20. method_security/issues/__init__.py +40 -0
  21. method_security/issues/client.py +107 -0
  22. method_security/issues/errors/__init__.py +32 -0
  23. method_security/issues/errors/issue_does_not_exist_error.py +11 -0
  24. method_security/issues/raw_client.py +118 -0
  25. method_security/issues/types/__init__.py +42 -0
  26. method_security/issues/types/issue.py +36 -0
  27. method_security/issues/types/issue_closed_reason.py +5 -0
  28. method_security/issues/types/issue_id.py +3 -0
  29. method_security/issues/types/issue_severity.py +5 -0
  30. method_security/issues/types/issue_status.py +5 -0
  31. method_security/objects/__init__.py +32 -0
  32. method_security/objects/types/__init__.py +32 -0
  33. method_security/objects/types/object_id.py +3 -0
  34. method_security/py.typed +0 -0
  35. method_security/version.py +3 -0
  36. methodsdk-0.0.3.dist-info/METADATA +182 -0
  37. methodsdk-0.0.3.dist-info/RECORD +38 -0
  38. methodsdk-0.0.3.dist-info/WHEEL +4 -0
@@ -0,0 +1,543 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import asyncio
4
+ import email.utils
5
+ import re
6
+ import time
7
+ import typing
8
+ import urllib.parse
9
+ from contextlib import asynccontextmanager, contextmanager
10
+ from random import random
11
+
12
+ import httpx
13
+ from .file import File, convert_file_dict_to_httpx_tuples
14
+ from .force_multipart import FORCE_MULTIPART
15
+ from .jsonable_encoder import jsonable_encoder
16
+ from .query_encoder import encode_query
17
+ from .remove_none_from_dict import remove_none_from_dict
18
+ from .request_options import RequestOptions
19
+ from httpx._types import RequestFiles
20
+
21
+ INITIAL_RETRY_DELAY_SECONDS = 0.5
22
+ MAX_RETRY_DELAY_SECONDS = 10
23
+ MAX_RETRY_DELAY_SECONDS_FROM_HEADER = 30
24
+
25
+
26
+ def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]:
27
+ """
28
+ This function parses the `Retry-After` header in a HTTP response and returns the number of seconds to wait.
29
+
30
+ Inspired by the urllib3 retry implementation.
31
+ """
32
+ retry_after_ms = response_headers.get("retry-after-ms")
33
+ if retry_after_ms is not None:
34
+ try:
35
+ return int(retry_after_ms) / 1000 if retry_after_ms > 0 else 0
36
+ except Exception:
37
+ pass
38
+
39
+ retry_after = response_headers.get("retry-after")
40
+ if retry_after is None:
41
+ return None
42
+
43
+ # Attempt to parse the header as an int.
44
+ if re.match(r"^\s*[0-9]+\s*$", retry_after):
45
+ seconds = float(retry_after)
46
+ # Fallback to parsing it as a date.
47
+ else:
48
+ retry_date_tuple = email.utils.parsedate_tz(retry_after)
49
+ if retry_date_tuple is None:
50
+ return None
51
+ if retry_date_tuple[9] is None: # Python 2
52
+ # Assume UTC if no timezone was specified
53
+ # On Python2.7, parsedate_tz returns None for a timezone offset
54
+ # instead of 0 if no timezone is given, where mktime_tz treats
55
+ # a None timezone offset as local time.
56
+ retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
57
+
58
+ retry_date = email.utils.mktime_tz(retry_date_tuple)
59
+ seconds = retry_date - time.time()
60
+
61
+ if seconds < 0:
62
+ seconds = 0
63
+
64
+ return seconds
65
+
66
+
67
+ def _retry_timeout(response: httpx.Response, retries: int) -> float:
68
+ """
69
+ Determine the amount of time to wait before retrying a request.
70
+ This function begins by trying to parse a retry-after header from the response, and then proceeds to use exponential backoff
71
+ with a jitter to determine the number of seconds to wait.
72
+ """
73
+
74
+ # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says.
75
+ retry_after = _parse_retry_after(response.headers)
76
+ if retry_after is not None and retry_after <= MAX_RETRY_DELAY_SECONDS_FROM_HEADER:
77
+ return retry_after
78
+
79
+ # Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS.
80
+ retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS)
81
+
82
+ # Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries.
83
+ timeout = retry_delay * (1 - 0.25 * random())
84
+ return timeout if timeout >= 0 else 0
85
+
86
+
87
+ def _should_retry(response: httpx.Response) -> bool:
88
+ retryable_400s = [429, 408, 409]
89
+ return response.status_code >= 500 or response.status_code in retryable_400s
90
+
91
+
92
+ def remove_omit_from_dict(
93
+ original: typing.Dict[str, typing.Optional[typing.Any]],
94
+ omit: typing.Optional[typing.Any],
95
+ ) -> typing.Dict[str, typing.Any]:
96
+ if omit is None:
97
+ return original
98
+ new: typing.Dict[str, typing.Any] = {}
99
+ for key, value in original.items():
100
+ if value is not omit:
101
+ new[key] = value
102
+ return new
103
+
104
+
105
+ def maybe_filter_request_body(
106
+ data: typing.Optional[typing.Any],
107
+ request_options: typing.Optional[RequestOptions],
108
+ omit: typing.Optional[typing.Any],
109
+ ) -> typing.Optional[typing.Any]:
110
+ if data is None:
111
+ return (
112
+ jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
113
+ if request_options is not None
114
+ else None
115
+ )
116
+ elif not isinstance(data, typing.Mapping):
117
+ data_content = jsonable_encoder(data)
118
+ else:
119
+ data_content = {
120
+ **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore
121
+ **(
122
+ jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
123
+ if request_options is not None
124
+ else {}
125
+ ),
126
+ }
127
+ return data_content
128
+
129
+
130
+ # Abstracted out for testing purposes
131
+ def get_request_body(
132
+ *,
133
+ json: typing.Optional[typing.Any],
134
+ data: typing.Optional[typing.Any],
135
+ request_options: typing.Optional[RequestOptions],
136
+ omit: typing.Optional[typing.Any],
137
+ ) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]:
138
+ json_body = None
139
+ data_body = None
140
+ if data is not None:
141
+ data_body = maybe_filter_request_body(data, request_options, omit)
142
+ else:
143
+ # If both data and json are None, we send json data in the event extra properties are specified
144
+ json_body = maybe_filter_request_body(json, request_options, omit)
145
+
146
+ # If you have an empty JSON body, you should just send None
147
+ return (json_body if json_body != {} else None), data_body if data_body != {} else None
148
+
149
+
150
+ class HttpClient:
151
+ def __init__(
152
+ self,
153
+ *,
154
+ httpx_client: httpx.Client,
155
+ base_timeout: typing.Callable[[], typing.Optional[float]],
156
+ base_headers: typing.Callable[[], typing.Dict[str, str]],
157
+ base_url: typing.Optional[typing.Callable[[], str]] = None,
158
+ ):
159
+ self.base_url = base_url
160
+ self.base_timeout = base_timeout
161
+ self.base_headers = base_headers
162
+ self.httpx_client = httpx_client
163
+
164
+ def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
165
+ base_url = maybe_base_url
166
+ if self.base_url is not None and base_url is None:
167
+ base_url = self.base_url()
168
+
169
+ if base_url is None:
170
+ raise ValueError("A base_url is required to make this request, please provide one and try again.")
171
+ return base_url
172
+
173
+ def request(
174
+ self,
175
+ path: typing.Optional[str] = None,
176
+ *,
177
+ method: str,
178
+ base_url: typing.Optional[str] = None,
179
+ params: typing.Optional[typing.Dict[str, typing.Any]] = None,
180
+ json: typing.Optional[typing.Any] = None,
181
+ data: typing.Optional[typing.Any] = None,
182
+ content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
183
+ files: typing.Optional[
184
+ typing.Union[
185
+ typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
186
+ typing.List[typing.Tuple[str, File]],
187
+ ]
188
+ ] = None,
189
+ headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
190
+ request_options: typing.Optional[RequestOptions] = None,
191
+ retries: int = 2,
192
+ omit: typing.Optional[typing.Any] = None,
193
+ force_multipart: typing.Optional[bool] = None,
194
+ ) -> httpx.Response:
195
+ base_url = self.get_base_url(base_url)
196
+ timeout = (
197
+ request_options.get("timeout_in_seconds")
198
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
199
+ else self.base_timeout()
200
+ )
201
+
202
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
203
+
204
+ request_files: typing.Optional[RequestFiles] = (
205
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
206
+ if (files is not None and files is not omit and isinstance(files, dict))
207
+ else None
208
+ )
209
+
210
+ if (request_files is None or len(request_files) == 0) and force_multipart:
211
+ request_files = FORCE_MULTIPART
212
+
213
+ response = self.httpx_client.request(
214
+ method=method,
215
+ url=urllib.parse.urljoin(f"{base_url}/", path),
216
+ headers=jsonable_encoder(
217
+ remove_none_from_dict(
218
+ {
219
+ **self.base_headers(),
220
+ **(headers if headers is not None else {}),
221
+ **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
222
+ }
223
+ )
224
+ ),
225
+ params=encode_query(
226
+ jsonable_encoder(
227
+ remove_none_from_dict(
228
+ remove_omit_from_dict(
229
+ {
230
+ **(params if params is not None else {}),
231
+ **(
232
+ request_options.get("additional_query_parameters", {}) or {}
233
+ if request_options is not None
234
+ else {}
235
+ ),
236
+ },
237
+ omit,
238
+ )
239
+ )
240
+ )
241
+ ),
242
+ json=json_body,
243
+ data=data_body,
244
+ content=content,
245
+ files=request_files,
246
+ timeout=timeout,
247
+ )
248
+
249
+ max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
250
+ if _should_retry(response=response):
251
+ if max_retries > retries:
252
+ time.sleep(_retry_timeout(response=response, retries=retries))
253
+ return self.request(
254
+ path=path,
255
+ method=method,
256
+ base_url=base_url,
257
+ params=params,
258
+ json=json,
259
+ content=content,
260
+ files=files,
261
+ headers=headers,
262
+ request_options=request_options,
263
+ retries=retries + 1,
264
+ omit=omit,
265
+ )
266
+
267
+ return response
268
+
269
+ @contextmanager
270
+ def stream(
271
+ self,
272
+ path: typing.Optional[str] = None,
273
+ *,
274
+ method: str,
275
+ base_url: typing.Optional[str] = None,
276
+ params: typing.Optional[typing.Dict[str, typing.Any]] = None,
277
+ json: typing.Optional[typing.Any] = None,
278
+ data: typing.Optional[typing.Any] = None,
279
+ content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
280
+ files: typing.Optional[
281
+ typing.Union[
282
+ typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
283
+ typing.List[typing.Tuple[str, File]],
284
+ ]
285
+ ] = None,
286
+ headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
287
+ request_options: typing.Optional[RequestOptions] = None,
288
+ retries: int = 2,
289
+ omit: typing.Optional[typing.Any] = None,
290
+ force_multipart: typing.Optional[bool] = None,
291
+ ) -> typing.Iterator[httpx.Response]:
292
+ base_url = self.get_base_url(base_url)
293
+ timeout = (
294
+ request_options.get("timeout_in_seconds")
295
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
296
+ else self.base_timeout()
297
+ )
298
+
299
+ request_files: typing.Optional[RequestFiles] = (
300
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
301
+ if (files is not None and files is not omit and isinstance(files, dict))
302
+ else None
303
+ )
304
+
305
+ if (request_files is None or len(request_files) == 0) and force_multipart:
306
+ request_files = FORCE_MULTIPART
307
+
308
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
309
+
310
+ with self.httpx_client.stream(
311
+ method=method,
312
+ url=urllib.parse.urljoin(f"{base_url}/", path),
313
+ headers=jsonable_encoder(
314
+ remove_none_from_dict(
315
+ {
316
+ **self.base_headers(),
317
+ **(headers if headers is not None else {}),
318
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
319
+ }
320
+ )
321
+ ),
322
+ params=encode_query(
323
+ jsonable_encoder(
324
+ remove_none_from_dict(
325
+ remove_omit_from_dict(
326
+ {
327
+ **(params if params is not None else {}),
328
+ **(
329
+ request_options.get("additional_query_parameters", {})
330
+ if request_options is not None
331
+ else {}
332
+ ),
333
+ },
334
+ omit,
335
+ )
336
+ )
337
+ )
338
+ ),
339
+ json=json_body,
340
+ data=data_body,
341
+ content=content,
342
+ files=request_files,
343
+ timeout=timeout,
344
+ ) as stream:
345
+ yield stream
346
+
347
+
348
+ class AsyncHttpClient:
349
+ def __init__(
350
+ self,
351
+ *,
352
+ httpx_client: httpx.AsyncClient,
353
+ base_timeout: typing.Callable[[], typing.Optional[float]],
354
+ base_headers: typing.Callable[[], typing.Dict[str, str]],
355
+ base_url: typing.Optional[typing.Callable[[], str]] = None,
356
+ ):
357
+ self.base_url = base_url
358
+ self.base_timeout = base_timeout
359
+ self.base_headers = base_headers
360
+ self.httpx_client = httpx_client
361
+
362
+ def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
363
+ base_url = maybe_base_url
364
+ if self.base_url is not None and base_url is None:
365
+ base_url = self.base_url()
366
+
367
+ if base_url is None:
368
+ raise ValueError("A base_url is required to make this request, please provide one and try again.")
369
+ return base_url
370
+
371
+ async def request(
372
+ self,
373
+ path: typing.Optional[str] = None,
374
+ *,
375
+ method: str,
376
+ base_url: typing.Optional[str] = None,
377
+ params: typing.Optional[typing.Dict[str, typing.Any]] = None,
378
+ json: typing.Optional[typing.Any] = None,
379
+ data: typing.Optional[typing.Any] = None,
380
+ content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
381
+ files: typing.Optional[
382
+ typing.Union[
383
+ typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
384
+ typing.List[typing.Tuple[str, File]],
385
+ ]
386
+ ] = None,
387
+ headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
388
+ request_options: typing.Optional[RequestOptions] = None,
389
+ retries: int = 2,
390
+ omit: typing.Optional[typing.Any] = None,
391
+ force_multipart: typing.Optional[bool] = None,
392
+ ) -> httpx.Response:
393
+ base_url = self.get_base_url(base_url)
394
+ timeout = (
395
+ request_options.get("timeout_in_seconds")
396
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
397
+ else self.base_timeout()
398
+ )
399
+
400
+ request_files: typing.Optional[RequestFiles] = (
401
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
402
+ if (files is not None and files is not omit and isinstance(files, dict))
403
+ else None
404
+ )
405
+
406
+ if (request_files is None or len(request_files) == 0) and force_multipart:
407
+ request_files = FORCE_MULTIPART
408
+
409
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
410
+
411
+ # Add the input to each of these and do None-safety checks
412
+ response = await self.httpx_client.request(
413
+ method=method,
414
+ url=urllib.parse.urljoin(f"{base_url}/", path),
415
+ headers=jsonable_encoder(
416
+ remove_none_from_dict(
417
+ {
418
+ **self.base_headers(),
419
+ **(headers if headers is not None else {}),
420
+ **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
421
+ }
422
+ )
423
+ ),
424
+ params=encode_query(
425
+ jsonable_encoder(
426
+ remove_none_from_dict(
427
+ remove_omit_from_dict(
428
+ {
429
+ **(params if params is not None else {}),
430
+ **(
431
+ request_options.get("additional_query_parameters", {}) or {}
432
+ if request_options is not None
433
+ else {}
434
+ ),
435
+ },
436
+ omit,
437
+ )
438
+ )
439
+ )
440
+ ),
441
+ json=json_body,
442
+ data=data_body,
443
+ content=content,
444
+ files=request_files,
445
+ timeout=timeout,
446
+ )
447
+
448
+ max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
449
+ if _should_retry(response=response):
450
+ if max_retries > retries:
451
+ await asyncio.sleep(_retry_timeout(response=response, retries=retries))
452
+ return await self.request(
453
+ path=path,
454
+ method=method,
455
+ base_url=base_url,
456
+ params=params,
457
+ json=json,
458
+ content=content,
459
+ files=files,
460
+ headers=headers,
461
+ request_options=request_options,
462
+ retries=retries + 1,
463
+ omit=omit,
464
+ )
465
+ return response
466
+
467
+ @asynccontextmanager
468
+ async def stream(
469
+ self,
470
+ path: typing.Optional[str] = None,
471
+ *,
472
+ method: str,
473
+ base_url: typing.Optional[str] = None,
474
+ params: typing.Optional[typing.Dict[str, typing.Any]] = None,
475
+ json: typing.Optional[typing.Any] = None,
476
+ data: typing.Optional[typing.Any] = None,
477
+ content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
478
+ files: typing.Optional[
479
+ typing.Union[
480
+ typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]],
481
+ typing.List[typing.Tuple[str, File]],
482
+ ]
483
+ ] = None,
484
+ headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
485
+ request_options: typing.Optional[RequestOptions] = None,
486
+ retries: int = 2,
487
+ omit: typing.Optional[typing.Any] = None,
488
+ force_multipart: typing.Optional[bool] = None,
489
+ ) -> typing.AsyncIterator[httpx.Response]:
490
+ base_url = self.get_base_url(base_url)
491
+ timeout = (
492
+ request_options.get("timeout_in_seconds")
493
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
494
+ else self.base_timeout()
495
+ )
496
+
497
+ request_files: typing.Optional[RequestFiles] = (
498
+ convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
499
+ if (files is not None and files is not omit and isinstance(files, dict))
500
+ else None
501
+ )
502
+
503
+ if (request_files is None or len(request_files) == 0) and force_multipart:
504
+ request_files = FORCE_MULTIPART
505
+
506
+ json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
507
+
508
+ async with self.httpx_client.stream(
509
+ method=method,
510
+ url=urllib.parse.urljoin(f"{base_url}/", path),
511
+ headers=jsonable_encoder(
512
+ remove_none_from_dict(
513
+ {
514
+ **self.base_headers(),
515
+ **(headers if headers is not None else {}),
516
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
517
+ }
518
+ )
519
+ ),
520
+ params=encode_query(
521
+ jsonable_encoder(
522
+ remove_none_from_dict(
523
+ remove_omit_from_dict(
524
+ {
525
+ **(params if params is not None else {}),
526
+ **(
527
+ request_options.get("additional_query_parameters", {})
528
+ if request_options is not None
529
+ else {}
530
+ ),
531
+ },
532
+ omit=omit,
533
+ )
534
+ )
535
+ )
536
+ ),
537
+ json=json_body,
538
+ data=data_body,
539
+ content=content,
540
+ files=request_files,
541
+ timeout=timeout,
542
+ ) as stream:
543
+ yield stream
@@ -0,0 +1,55 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from typing import Dict, Generic, TypeVar
4
+
5
+ import httpx
6
+
7
+ # Generic to represent the underlying type of the data wrapped by the HTTP response.
8
+ T = TypeVar("T")
9
+
10
+
11
+ class BaseHttpResponse:
12
+ """Minimalist HTTP response wrapper that exposes response headers."""
13
+
14
+ _response: httpx.Response
15
+
16
+ def __init__(self, response: httpx.Response):
17
+ self._response = response
18
+
19
+ @property
20
+ def headers(self) -> Dict[str, str]:
21
+ return dict(self._response.headers)
22
+
23
+
24
+ class HttpResponse(Generic[T], BaseHttpResponse):
25
+ """HTTP response wrapper that exposes response headers and data."""
26
+
27
+ _data: T
28
+
29
+ def __init__(self, response: httpx.Response, data: T):
30
+ super().__init__(response)
31
+ self._data = data
32
+
33
+ @property
34
+ def data(self) -> T:
35
+ return self._data
36
+
37
+ def close(self) -> None:
38
+ self._response.close()
39
+
40
+
41
+ class AsyncHttpResponse(Generic[T], BaseHttpResponse):
42
+ """HTTP response wrapper that exposes response headers and data."""
43
+
44
+ _data: T
45
+
46
+ def __init__(self, response: httpx.Response, data: T):
47
+ super().__init__(response)
48
+ self._data = data
49
+
50
+ @property
51
+ def data(self) -> T:
52
+ return self._data
53
+
54
+ async def close(self) -> None:
55
+ await self._response.aclose()
@@ -0,0 +1,100 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ """
4
+ jsonable_encoder converts a Python object to a JSON-friendly dict
5
+ (e.g. datetimes to strings, Pydantic models to dicts).
6
+
7
+ Taken from FastAPI, and made a bit simpler
8
+ https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py
9
+ """
10
+
11
+ import base64
12
+ import dataclasses
13
+ import datetime as dt
14
+ from enum import Enum
15
+ from pathlib import PurePath
16
+ from types import GeneratorType
17
+ from typing import Any, Callable, Dict, List, Optional, Set, Union
18
+
19
+ import pydantic
20
+ from .datetime_utils import serialize_datetime
21
+ from .pydantic_utilities import (
22
+ IS_PYDANTIC_V2,
23
+ encode_by_type,
24
+ to_jsonable_with_fallback,
25
+ )
26
+
27
+ SetIntStr = Set[Union[int, str]]
28
+ DictIntStrAny = Dict[Union[int, str], Any]
29
+
30
+
31
+ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:
32
+ custom_encoder = custom_encoder or {}
33
+ if custom_encoder:
34
+ if type(obj) in custom_encoder:
35
+ return custom_encoder[type(obj)](obj)
36
+ else:
37
+ for encoder_type, encoder_instance in custom_encoder.items():
38
+ if isinstance(obj, encoder_type):
39
+ return encoder_instance(obj)
40
+ if isinstance(obj, pydantic.BaseModel):
41
+ if IS_PYDANTIC_V2:
42
+ encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2
43
+ else:
44
+ encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1
45
+ if custom_encoder:
46
+ encoder.update(custom_encoder)
47
+ obj_dict = obj.dict(by_alias=True)
48
+ if "__root__" in obj_dict:
49
+ obj_dict = obj_dict["__root__"]
50
+ if "root" in obj_dict:
51
+ obj_dict = obj_dict["root"]
52
+ return jsonable_encoder(obj_dict, custom_encoder=encoder)
53
+ if dataclasses.is_dataclass(obj):
54
+ obj_dict = dataclasses.asdict(obj) # type: ignore
55
+ return jsonable_encoder(obj_dict, custom_encoder=custom_encoder)
56
+ if isinstance(obj, bytes):
57
+ return base64.b64encode(obj).decode("utf-8")
58
+ if isinstance(obj, Enum):
59
+ return obj.value
60
+ if isinstance(obj, PurePath):
61
+ return str(obj)
62
+ if isinstance(obj, (str, int, float, type(None))):
63
+ return obj
64
+ if isinstance(obj, dt.datetime):
65
+ return serialize_datetime(obj)
66
+ if isinstance(obj, dt.date):
67
+ return str(obj)
68
+ if isinstance(obj, dict):
69
+ encoded_dict = {}
70
+ allowed_keys = set(obj.keys())
71
+ for key, value in obj.items():
72
+ if key in allowed_keys:
73
+ encoded_key = jsonable_encoder(key, custom_encoder=custom_encoder)
74
+ encoded_value = jsonable_encoder(value, custom_encoder=custom_encoder)
75
+ encoded_dict[encoded_key] = encoded_value
76
+ return encoded_dict
77
+ if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)):
78
+ encoded_list = []
79
+ for item in obj:
80
+ encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder))
81
+ return encoded_list
82
+
83
+ def fallback_serializer(o: Any) -> Any:
84
+ attempt_encode = encode_by_type(o)
85
+ if attempt_encode is not None:
86
+ return attempt_encode
87
+
88
+ try:
89
+ data = dict(o)
90
+ except Exception as e:
91
+ errors: List[Exception] = []
92
+ errors.append(e)
93
+ try:
94
+ data = vars(o)
95
+ except Exception as e:
96
+ errors.append(e)
97
+ raise ValueError(errors) from e
98
+ return jsonable_encoder(data, custom_encoder=custom_encoder)
99
+
100
+ return to_jsonable_with_fallback(obj, fallback_serializer)