athena-intelligence 0.1.87__py3-none-any.whl → 0.1.89__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- athena/base_client.py +4 -4
- athena/chain/client.py +30 -138
- athena/client.py +126 -23
- athena/core/client_wrapper.py +13 -3
- athena/core/http_client.py +333 -20
- athena/core/remove_none_from_dict.py +2 -2
- athena/dataset/client.py +8 -64
- athena/message/client.py +20 -113
- athena/query/client.py +8 -54
- athena/report/client.py +10 -66
- athena/search/client.py +22 -68
- athena/snippet/client.py +10 -116
- athena/tools/client.py +112 -542
- athena/upload/client.py +2 -58
- athena/workflow/client.py +8 -54
- {athena_intelligence-0.1.87.dist-info → athena_intelligence-0.1.89.dist-info}/METADATA +1 -1
- {athena_intelligence-0.1.87.dist-info → athena_intelligence-0.1.89.dist-info}/RECORD +18 -18
- {athena_intelligence-0.1.87.dist-info → athena_intelligence-0.1.89.dist-info}/WHEEL +0 -0
athena/core/http_client.py
CHANGED
@@ -5,12 +5,18 @@ import email.utils
|
|
5
5
|
import re
|
6
6
|
import time
|
7
7
|
import typing
|
8
|
+
import urllib.parse
|
8
9
|
from contextlib import asynccontextmanager, contextmanager
|
9
|
-
from functools import wraps
|
10
10
|
from random import random
|
11
11
|
|
12
12
|
import httpx
|
13
13
|
|
14
|
+
from .file import File, convert_file_dict_to_httpx_tuples
|
15
|
+
from .jsonable_encoder import jsonable_encoder
|
16
|
+
from .query_encoder import encode_query
|
17
|
+
from .remove_none_from_dict import remove_none_from_dict
|
18
|
+
from .request_options import RequestOptions
|
19
|
+
|
14
20
|
INITIAL_RETRY_DELAY_SECONDS = 0.5
|
15
21
|
MAX_RETRY_DELAY_SECONDS = 10
|
16
22
|
MAX_RETRY_DELAY_SECONDS_FROM_HEADER = 30
|
@@ -82,49 +88,356 @@ def _should_retry(response: httpx.Response) -> bool:
|
|
82
88
|
return response.status_code >= 500 or response.status_code in retriable_400s
|
83
89
|
|
84
90
|
|
91
|
+
def remove_omit_from_dict(
|
92
|
+
original: typing.Dict[str, typing.Optional[typing.Any]], omit: typing.Optional[typing.Any]
|
93
|
+
) -> typing.Dict[str, typing.Any]:
|
94
|
+
if omit is None:
|
95
|
+
return original
|
96
|
+
new: typing.Dict[str, typing.Any] = {}
|
97
|
+
for key, value in original.items():
|
98
|
+
if value is not omit:
|
99
|
+
new[key] = value
|
100
|
+
return new
|
101
|
+
|
102
|
+
|
103
|
+
def maybe_filter_request_body(
|
104
|
+
data: typing.Optional[typing.Any],
|
105
|
+
request_options: typing.Optional[RequestOptions],
|
106
|
+
omit: typing.Optional[typing.Any],
|
107
|
+
) -> typing.Optional[typing.Any]:
|
108
|
+
if data is None:
|
109
|
+
return None
|
110
|
+
elif not isinstance(data, typing.Mapping):
|
111
|
+
data_content = jsonable_encoder(data)
|
112
|
+
else:
|
113
|
+
data_content = {
|
114
|
+
**(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore
|
115
|
+
**(
|
116
|
+
jsonable_encoder(request_options.get("additional_body_parameters", {}))
|
117
|
+
if request_options is not None
|
118
|
+
else {}
|
119
|
+
),
|
120
|
+
}
|
121
|
+
return data_content
|
122
|
+
|
123
|
+
|
85
124
|
class HttpClient:
|
86
|
-
def __init__(
|
125
|
+
def __init__(
|
126
|
+
self,
|
127
|
+
*,
|
128
|
+
httpx_client: httpx.Client,
|
129
|
+
base_timeout: typing.Optional[float],
|
130
|
+
base_headers: typing.Dict[str, str],
|
131
|
+
base_url: typing.Optional[str] = None,
|
132
|
+
):
|
133
|
+
self.base_url = base_url
|
134
|
+
self.base_timeout = base_timeout
|
135
|
+
self.base_headers = base_headers
|
87
136
|
self.httpx_client = httpx_client
|
88
137
|
|
89
|
-
|
90
|
-
|
138
|
+
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
|
139
|
+
base_url = self.base_url if maybe_base_url is None else maybe_base_url
|
140
|
+
if base_url is None:
|
141
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
142
|
+
return base_url
|
143
|
+
|
91
144
|
def request(
|
92
|
-
self,
|
145
|
+
self,
|
146
|
+
path: typing.Optional[str] = None,
|
147
|
+
*,
|
148
|
+
method: str,
|
149
|
+
base_url: typing.Optional[str] = None,
|
150
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
151
|
+
json: typing.Optional[typing.Any] = None,
|
152
|
+
data: typing.Optional[typing.Any] = None,
|
153
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
154
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
155
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
156
|
+
request_options: typing.Optional[RequestOptions] = None,
|
157
|
+
retries: int = 0,
|
158
|
+
omit: typing.Optional[typing.Any] = None,
|
93
159
|
) -> httpx.Response:
|
94
|
-
|
160
|
+
base_url = self.get_base_url(base_url)
|
161
|
+
timeout = (
|
162
|
+
request_options.get("timeout_in_seconds")
|
163
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
164
|
+
else self.base_timeout
|
165
|
+
)
|
166
|
+
|
167
|
+
response = self.httpx_client.request(
|
168
|
+
method=method,
|
169
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
170
|
+
headers=jsonable_encoder(
|
171
|
+
remove_none_from_dict(
|
172
|
+
{
|
173
|
+
**self.base_headers,
|
174
|
+
**(headers if headers is not None else {}),
|
175
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
176
|
+
}
|
177
|
+
)
|
178
|
+
),
|
179
|
+
params=encode_query(
|
180
|
+
jsonable_encoder(
|
181
|
+
remove_none_from_dict(
|
182
|
+
remove_omit_from_dict(
|
183
|
+
{
|
184
|
+
**(params if params is not None else {}),
|
185
|
+
**(
|
186
|
+
request_options.get("additional_query_parameters", {})
|
187
|
+
if request_options is not None
|
188
|
+
else {}
|
189
|
+
),
|
190
|
+
},
|
191
|
+
omit,
|
192
|
+
)
|
193
|
+
)
|
194
|
+
)
|
195
|
+
),
|
196
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
197
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
198
|
+
content=content,
|
199
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
200
|
+
timeout=timeout,
|
201
|
+
)
|
202
|
+
|
203
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
95
204
|
if _should_retry(response=response):
|
96
205
|
if max_retries > retries:
|
97
206
|
time.sleep(_retry_timeout(response=response, retries=retries))
|
98
|
-
return self.request(
|
207
|
+
return self.request(
|
208
|
+
path=path,
|
209
|
+
method=method,
|
210
|
+
base_url=base_url,
|
211
|
+
params=params,
|
212
|
+
json=json,
|
213
|
+
content=content,
|
214
|
+
files=files,
|
215
|
+
headers=headers,
|
216
|
+
request_options=request_options,
|
217
|
+
retries=retries + 1,
|
218
|
+
omit=omit,
|
219
|
+
)
|
220
|
+
|
99
221
|
return response
|
100
222
|
|
101
|
-
@wraps(httpx.Client.stream)
|
102
223
|
@contextmanager
|
103
|
-
def stream(
|
104
|
-
|
224
|
+
def stream(
|
225
|
+
self,
|
226
|
+
path: typing.Optional[str] = None,
|
227
|
+
*,
|
228
|
+
method: str,
|
229
|
+
base_url: typing.Optional[str] = None,
|
230
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
231
|
+
json: typing.Optional[typing.Any] = None,
|
232
|
+
data: typing.Optional[typing.Any] = None,
|
233
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
234
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
235
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
236
|
+
request_options: typing.Optional[RequestOptions] = None,
|
237
|
+
retries: int = 0,
|
238
|
+
omit: typing.Optional[typing.Any] = None,
|
239
|
+
) -> typing.Iterator[httpx.Response]:
|
240
|
+
base_url = self.get_base_url(base_url)
|
241
|
+
timeout = (
|
242
|
+
request_options.get("timeout_in_seconds")
|
243
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
244
|
+
else self.base_timeout
|
245
|
+
)
|
246
|
+
|
247
|
+
with self.httpx_client.stream(
|
248
|
+
method=method,
|
249
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
250
|
+
headers=jsonable_encoder(
|
251
|
+
remove_none_from_dict(
|
252
|
+
{
|
253
|
+
**self.base_headers,
|
254
|
+
**(headers if headers is not None else {}),
|
255
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
256
|
+
}
|
257
|
+
)
|
258
|
+
),
|
259
|
+
params=encode_query(
|
260
|
+
jsonable_encoder(
|
261
|
+
remove_none_from_dict(
|
262
|
+
remove_omit_from_dict(
|
263
|
+
{
|
264
|
+
**(params if params is not None else {}),
|
265
|
+
**(
|
266
|
+
request_options.get("additional_query_parameters", {})
|
267
|
+
if request_options is not None
|
268
|
+
else {}
|
269
|
+
),
|
270
|
+
},
|
271
|
+
omit,
|
272
|
+
)
|
273
|
+
)
|
274
|
+
)
|
275
|
+
),
|
276
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
277
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
278
|
+
content=content,
|
279
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
280
|
+
timeout=timeout,
|
281
|
+
) as stream:
|
105
282
|
yield stream
|
106
283
|
|
107
284
|
|
108
285
|
class AsyncHttpClient:
|
109
|
-
def __init__(
|
286
|
+
def __init__(
|
287
|
+
self,
|
288
|
+
*,
|
289
|
+
httpx_client: httpx.AsyncClient,
|
290
|
+
base_timeout: typing.Optional[float],
|
291
|
+
base_headers: typing.Dict[str, str],
|
292
|
+
base_url: typing.Optional[str] = None,
|
293
|
+
):
|
294
|
+
self.base_url = base_url
|
295
|
+
self.base_timeout = base_timeout
|
296
|
+
self.base_headers = base_headers
|
110
297
|
self.httpx_client = httpx_client
|
111
298
|
|
112
|
-
|
113
|
-
|
299
|
+
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
|
300
|
+
base_url = self.base_url if maybe_base_url is None else maybe_base_url
|
301
|
+
if base_url is None:
|
302
|
+
raise ValueError("A base_url is required to make this request, please provide one and try again.")
|
303
|
+
return base_url
|
304
|
+
|
114
305
|
async def request(
|
115
|
-
self,
|
306
|
+
self,
|
307
|
+
path: typing.Optional[str] = None,
|
308
|
+
*,
|
309
|
+
method: str,
|
310
|
+
base_url: typing.Optional[str] = None,
|
311
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
312
|
+
json: typing.Optional[typing.Any] = None,
|
313
|
+
data: typing.Optional[typing.Any] = None,
|
314
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
315
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
316
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
317
|
+
request_options: typing.Optional[RequestOptions] = None,
|
318
|
+
retries: int = 0,
|
319
|
+
omit: typing.Optional[typing.Any] = None,
|
116
320
|
) -> httpx.Response:
|
117
|
-
|
321
|
+
base_url = self.get_base_url(base_url)
|
322
|
+
timeout = (
|
323
|
+
request_options.get("timeout_in_seconds")
|
324
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
325
|
+
else self.base_timeout
|
326
|
+
)
|
327
|
+
|
328
|
+
# Add the input to each of these and do None-safety checks
|
329
|
+
response = await self.httpx_client.request(
|
330
|
+
method=method,
|
331
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
332
|
+
headers=jsonable_encoder(
|
333
|
+
remove_none_from_dict(
|
334
|
+
{
|
335
|
+
**self.base_headers,
|
336
|
+
**(headers if headers is not None else {}),
|
337
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
338
|
+
}
|
339
|
+
)
|
340
|
+
),
|
341
|
+
params=encode_query(
|
342
|
+
jsonable_encoder(
|
343
|
+
remove_none_from_dict(
|
344
|
+
remove_omit_from_dict(
|
345
|
+
{
|
346
|
+
**(params if params is not None else {}),
|
347
|
+
**(
|
348
|
+
request_options.get("additional_query_parameters", {})
|
349
|
+
if request_options is not None
|
350
|
+
else {}
|
351
|
+
),
|
352
|
+
},
|
353
|
+
omit,
|
354
|
+
)
|
355
|
+
)
|
356
|
+
)
|
357
|
+
),
|
358
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
359
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
360
|
+
content=content,
|
361
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
362
|
+
timeout=timeout,
|
363
|
+
)
|
364
|
+
|
365
|
+
max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
|
118
366
|
if _should_retry(response=response):
|
119
367
|
if max_retries > retries:
|
120
368
|
await asyncio.sleep(_retry_timeout(response=response, retries=retries))
|
121
|
-
return await self.request(
|
369
|
+
return await self.request(
|
370
|
+
path=path,
|
371
|
+
method=method,
|
372
|
+
base_url=base_url,
|
373
|
+
params=params,
|
374
|
+
json=json,
|
375
|
+
content=content,
|
376
|
+
files=files,
|
377
|
+
headers=headers,
|
378
|
+
request_options=request_options,
|
379
|
+
retries=retries + 1,
|
380
|
+
omit=omit,
|
381
|
+
)
|
122
382
|
return response
|
123
383
|
|
124
|
-
@wraps(httpx.AsyncClient.stream)
|
125
384
|
@asynccontextmanager
|
126
385
|
async def stream(
|
127
|
-
self,
|
128
|
-
|
129
|
-
|
386
|
+
self,
|
387
|
+
path: typing.Optional[str] = None,
|
388
|
+
*,
|
389
|
+
method: str,
|
390
|
+
base_url: typing.Optional[str] = None,
|
391
|
+
params: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
392
|
+
json: typing.Optional[typing.Any] = None,
|
393
|
+
data: typing.Optional[typing.Any] = None,
|
394
|
+
content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
|
395
|
+
files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
|
396
|
+
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
397
|
+
request_options: typing.Optional[RequestOptions] = None,
|
398
|
+
retries: int = 0,
|
399
|
+
omit: typing.Optional[typing.Any] = None,
|
400
|
+
) -> typing.AsyncIterator[httpx.Response]:
|
401
|
+
base_url = self.get_base_url(base_url)
|
402
|
+
timeout = (
|
403
|
+
request_options.get("timeout_in_seconds")
|
404
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
405
|
+
else self.base_timeout
|
406
|
+
)
|
407
|
+
|
408
|
+
async with self.httpx_client.stream(
|
409
|
+
method=method,
|
410
|
+
url=urllib.parse.urljoin(f"{base_url}/", path),
|
411
|
+
headers=jsonable_encoder(
|
412
|
+
remove_none_from_dict(
|
413
|
+
{
|
414
|
+
**self.base_headers,
|
415
|
+
**(headers if headers is not None else {}),
|
416
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
417
|
+
}
|
418
|
+
)
|
419
|
+
),
|
420
|
+
params=encode_query(
|
421
|
+
jsonable_encoder(
|
422
|
+
remove_none_from_dict(
|
423
|
+
remove_omit_from_dict(
|
424
|
+
{
|
425
|
+
**(params if params is not None else {}),
|
426
|
+
**(
|
427
|
+
request_options.get("additional_query_parameters", {})
|
428
|
+
if request_options is not None
|
429
|
+
else {}
|
430
|
+
),
|
431
|
+
},
|
432
|
+
omit=omit,
|
433
|
+
)
|
434
|
+
)
|
435
|
+
)
|
436
|
+
),
|
437
|
+
json=maybe_filter_request_body(json, request_options, omit),
|
438
|
+
data=maybe_filter_request_body(data, request_options, omit),
|
439
|
+
content=content,
|
440
|
+
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
|
441
|
+
timeout=timeout,
|
442
|
+
) as stream:
|
130
443
|
yield stream
|
@@ -1,9 +1,9 @@
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
2
2
|
|
3
|
-
from typing import Any, Dict, Optional
|
3
|
+
from typing import Any, Dict, Mapping, Optional
|
4
4
|
|
5
5
|
|
6
|
-
def remove_none_from_dict(original:
|
6
|
+
def remove_none_from_dict(original: Mapping[str, Optional[Any]]) -> Dict[str, Any]:
|
7
7
|
new: Dict[str, Any] = {}
|
8
8
|
for key, value in original.items():
|
9
9
|
if value is not None:
|
athena/dataset/client.py
CHANGED
@@ -1,15 +1,11 @@
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
2
2
|
|
3
3
|
import typing
|
4
|
-
import urllib.parse
|
5
4
|
from json.decoder import JSONDecodeError
|
6
5
|
|
7
6
|
from ..core.api_error import ApiError
|
8
7
|
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
9
|
-
from ..core.jsonable_encoder import jsonable_encoder
|
10
8
|
from ..core.pydantic_utilities import pydantic_v1
|
11
|
-
from ..core.query_encoder import encode_query
|
12
|
-
from ..core.remove_none_from_dict import remove_none_from_dict
|
13
9
|
from ..core.request_options import RequestOptions
|
14
10
|
from ..errors.unprocessable_entity_error import UnprocessableEntityError
|
15
11
|
from ..types.get_datasets_response import GetDatasetsResponse
|
@@ -25,7 +21,7 @@ class DatasetClient:
|
|
25
21
|
*,
|
26
22
|
page: typing.Optional[int] = None,
|
27
23
|
page_size: typing.Optional[int] = None,
|
28
|
-
request_options: typing.Optional[RequestOptions] = None
|
24
|
+
request_options: typing.Optional[RequestOptions] = None
|
29
25
|
) -> GetDatasetsResponse:
|
30
26
|
"""
|
31
27
|
Parameters
|
@@ -54,36 +50,10 @@ class DatasetClient:
|
|
54
50
|
client.dataset.get()
|
55
51
|
"""
|
56
52
|
_response = self._client_wrapper.httpx_client.request(
|
53
|
+
"api/v0/datasets",
|
57
54
|
method="GET",
|
58
|
-
|
59
|
-
|
60
|
-
jsonable_encoder(
|
61
|
-
remove_none_from_dict(
|
62
|
-
{
|
63
|
-
"page": page,
|
64
|
-
"page_size": page_size,
|
65
|
-
**(
|
66
|
-
request_options.get("additional_query_parameters", {})
|
67
|
-
if request_options is not None
|
68
|
-
else {}
|
69
|
-
),
|
70
|
-
}
|
71
|
-
)
|
72
|
-
)
|
73
|
-
),
|
74
|
-
headers=jsonable_encoder(
|
75
|
-
remove_none_from_dict(
|
76
|
-
{
|
77
|
-
**self._client_wrapper.get_headers(),
|
78
|
-
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
79
|
-
}
|
80
|
-
)
|
81
|
-
),
|
82
|
-
timeout=request_options.get("timeout_in_seconds")
|
83
|
-
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
84
|
-
else self._client_wrapper.get_timeout(),
|
85
|
-
retries=0,
|
86
|
-
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
55
|
+
params={"page": page, "page_size": page_size},
|
56
|
+
request_options=request_options,
|
87
57
|
)
|
88
58
|
if 200 <= _response.status_code < 300:
|
89
59
|
return pydantic_v1.parse_obj_as(GetDatasetsResponse, _response.json()) # type: ignore
|
@@ -107,7 +77,7 @@ class AsyncDatasetClient:
|
|
107
77
|
*,
|
108
78
|
page: typing.Optional[int] = None,
|
109
79
|
page_size: typing.Optional[int] = None,
|
110
|
-
request_options: typing.Optional[RequestOptions] = None
|
80
|
+
request_options: typing.Optional[RequestOptions] = None
|
111
81
|
) -> GetDatasetsResponse:
|
112
82
|
"""
|
113
83
|
Parameters
|
@@ -136,36 +106,10 @@ class AsyncDatasetClient:
|
|
136
106
|
await client.dataset.get()
|
137
107
|
"""
|
138
108
|
_response = await self._client_wrapper.httpx_client.request(
|
109
|
+
"api/v0/datasets",
|
139
110
|
method="GET",
|
140
|
-
|
141
|
-
|
142
|
-
jsonable_encoder(
|
143
|
-
remove_none_from_dict(
|
144
|
-
{
|
145
|
-
"page": page,
|
146
|
-
"page_size": page_size,
|
147
|
-
**(
|
148
|
-
request_options.get("additional_query_parameters", {})
|
149
|
-
if request_options is not None
|
150
|
-
else {}
|
151
|
-
),
|
152
|
-
}
|
153
|
-
)
|
154
|
-
)
|
155
|
-
),
|
156
|
-
headers=jsonable_encoder(
|
157
|
-
remove_none_from_dict(
|
158
|
-
{
|
159
|
-
**self._client_wrapper.get_headers(),
|
160
|
-
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
161
|
-
}
|
162
|
-
)
|
163
|
-
),
|
164
|
-
timeout=request_options.get("timeout_in_seconds")
|
165
|
-
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
166
|
-
else self._client_wrapper.get_timeout(),
|
167
|
-
retries=0,
|
168
|
-
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
111
|
+
params={"page": page, "page_size": page_size},
|
112
|
+
request_options=request_options,
|
169
113
|
)
|
170
114
|
if 200 <= _response.status_code < 300:
|
171
115
|
return pydantic_v1.parse_obj_as(GetDatasetsResponse, _response.json()) # type: ignore
|