cribl-control-plane 0.0.18__py3-none-any.whl → 0.0.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/distributed.py +187 -0
- cribl_control_plane/errors/healthstatus_error.py +1 -1
- cribl_control_plane/groups_sdk.py +1291 -0
- cribl_control_plane/lake.py +479 -0
- cribl_control_plane/models/__init__.py +602 -3
- cribl_control_plane/models/appmode.py +13 -0
- cribl_control_plane/models/cacheconnection.py +44 -0
- cribl_control_plane/models/cacheconnectionbackfillstatus.py +12 -0
- cribl_control_plane/models/cloudprovider.py +9 -0
- cribl_control_plane/models/commit.py +30 -0
- cribl_control_plane/models/configgroup.py +116 -0
- cribl_control_plane/models/configgroupcloud.py +48 -0
- cribl_control_plane/models/configgrouplookups.py +34 -0
- cribl_control_plane/models/createcribllakedatasetbylakeidop.py +48 -0
- cribl_control_plane/models/createpacksop.py +24 -0
- cribl_control_plane/models/createproductsgroupsbyproductop.py +54 -0
- cribl_control_plane/models/cribllakedataset.py +74 -0
- cribl_control_plane/models/datasetmetadata.py +39 -0
- cribl_control_plane/models/datasetmetadataruninfo.py +28 -0
- cribl_control_plane/models/deployrequest.py +18 -0
- cribl_control_plane/models/deployrequestlookups.py +28 -0
- cribl_control_plane/models/distributedsummary.py +63 -0
- cribl_control_plane/models/getcribllakedatasetbylakeidop.py +40 -0
- cribl_control_plane/models/getgroupsaclbyidop.py +63 -0
- cribl_control_plane/models/getgroupsbyidop.py +49 -0
- cribl_control_plane/models/getgroupsconfigversionbyidop.py +36 -0
- cribl_control_plane/models/getpacksop.py +40 -0
- cribl_control_plane/models/getproductsgroupsaclteamsbyproductandidop.py +78 -0
- cribl_control_plane/models/getproductsgroupsbyproductop.py +58 -0
- cribl_control_plane/models/getsummaryop.py +46 -0
- cribl_control_plane/models/getsummaryworkersop.py +39 -0
- cribl_control_plane/models/getworkersop.py +82 -0
- cribl_control_plane/models/hbcriblinfo.py +80 -0
- cribl_control_plane/models/hbleaderinfo.py +23 -0
- cribl_control_plane/models/healthstatus.py +3 -3
- cribl_control_plane/models/heartbeatmetadata.py +122 -0
- cribl_control_plane/models/lakedatasetsearchconfig.py +18 -0
- cribl_control_plane/models/lakehouseconnectiontype.py +9 -0
- cribl_control_plane/models/lookupversions.py +13 -0
- cribl_control_plane/models/masterworkerentry.py +84 -0
- cribl_control_plane/models/nodeactiveupgradestatus.py +10 -0
- cribl_control_plane/models/nodefailedupgradestatus.py +9 -0
- cribl_control_plane/models/nodeprovidedinfo.py +184 -0
- cribl_control_plane/models/nodeskippedupgradestatus.py +11 -0
- cribl_control_plane/models/nodeupgradestate.py +11 -0
- cribl_control_plane/models/nodeupgradestatus.py +30 -0
- cribl_control_plane/models/packinfo.py +73 -0
- cribl_control_plane/models/packinstallinfo.py +76 -0
- cribl_control_plane/models/packrequestbody.py +75 -0
- cribl_control_plane/models/rbacresource.py +14 -0
- cribl_control_plane/models/resourcepolicy.py +24 -0
- cribl_control_plane/models/restartresponse.py +26 -0
- cribl_control_plane/models/teamaccesscontrollist.py +18 -0
- cribl_control_plane/models/updategroupsdeploybyidop.py +46 -0
- cribl_control_plane/models/updatepacksop.py +37 -0
- cribl_control_plane/models/updateworkersrestartop.py +24 -0
- cribl_control_plane/models/useraccesscontrollist.py +18 -0
- cribl_control_plane/packs.py +623 -0
- cribl_control_plane/sdk.py +24 -0
- cribl_control_plane/teams.py +203 -0
- cribl_control_plane/workers_sdk.py +555 -0
- {cribl_control_plane-0.0.18.dist-info → cribl_control_plane-0.0.19.dist-info}/METADATA +42 -8
- {cribl_control_plane-0.0.18.dist-info → cribl_control_plane-0.0.19.dist-info}/RECORD +65 -8
- {cribl_control_plane-0.0.18.dist-info → cribl_control_plane-0.0.19.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,479 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from .basesdk import BaseSDK
|
|
4
|
+
from cribl_control_plane import errors, models, utils
|
|
5
|
+
from cribl_control_plane._hooks import HookContext
|
|
6
|
+
from cribl_control_plane.types import OptionalNullable, UNSET
|
|
7
|
+
from cribl_control_plane.utils import get_security_from_env
|
|
8
|
+
from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_response
|
|
9
|
+
from typing import Any, List, Mapping, Optional, Union
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Lake(BaseSDK):
|
|
13
|
+
r"""Actions related to Lake"""
|
|
14
|
+
|
|
15
|
+
def create_cribl_lake_dataset_by_lake_id(
|
|
16
|
+
self,
|
|
17
|
+
*,
|
|
18
|
+
lake_id: str,
|
|
19
|
+
id: str,
|
|
20
|
+
accelerated_fields: Optional[List[str]] = None,
|
|
21
|
+
bucket_name: Optional[str] = None,
|
|
22
|
+
cache_connection: Optional[
|
|
23
|
+
Union[models.CacheConnection, models.CacheConnectionTypedDict]
|
|
24
|
+
] = None,
|
|
25
|
+
deletion_started_at: Optional[float] = None,
|
|
26
|
+
description: Optional[str] = None,
|
|
27
|
+
format_: Optional[models.CriblLakeDatasetFormat] = None,
|
|
28
|
+
http_da_used: Optional[bool] = None,
|
|
29
|
+
retention_period_in_days: Optional[float] = None,
|
|
30
|
+
search_config: Optional[
|
|
31
|
+
Union[
|
|
32
|
+
models.LakeDatasetSearchConfig, models.LakeDatasetSearchConfigTypedDict
|
|
33
|
+
]
|
|
34
|
+
] = None,
|
|
35
|
+
storage_location_id: Optional[str] = None,
|
|
36
|
+
view_name: Optional[str] = None,
|
|
37
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
38
|
+
server_url: Optional[str] = None,
|
|
39
|
+
timeout_ms: Optional[int] = None,
|
|
40
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
41
|
+
) -> models.CreateCriblLakeDatasetByLakeIDResponse:
|
|
42
|
+
r"""Create a Dataset in the specified Lake
|
|
43
|
+
|
|
44
|
+
Create a Dataset in the specified Lake
|
|
45
|
+
|
|
46
|
+
:param lake_id: lake id that contains the Datasets
|
|
47
|
+
:param id:
|
|
48
|
+
:param accelerated_fields:
|
|
49
|
+
:param bucket_name:
|
|
50
|
+
:param cache_connection:
|
|
51
|
+
:param deletion_started_at:
|
|
52
|
+
:param description:
|
|
53
|
+
:param format_:
|
|
54
|
+
:param http_da_used:
|
|
55
|
+
:param retention_period_in_days:
|
|
56
|
+
:param search_config:
|
|
57
|
+
:param storage_location_id:
|
|
58
|
+
:param view_name:
|
|
59
|
+
:param retries: Override the default retry configuration for this method
|
|
60
|
+
:param server_url: Override the default server URL for this method
|
|
61
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
62
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
63
|
+
"""
|
|
64
|
+
base_url = None
|
|
65
|
+
url_variables = None
|
|
66
|
+
if timeout_ms is None:
|
|
67
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
68
|
+
|
|
69
|
+
if server_url is not None:
|
|
70
|
+
base_url = server_url
|
|
71
|
+
else:
|
|
72
|
+
base_url = self._get_url(base_url, url_variables)
|
|
73
|
+
|
|
74
|
+
request = models.CreateCriblLakeDatasetByLakeIDRequest(
|
|
75
|
+
lake_id=lake_id,
|
|
76
|
+
cribl_lake_dataset=models.CriblLakeDataset(
|
|
77
|
+
accelerated_fields=accelerated_fields,
|
|
78
|
+
bucket_name=bucket_name,
|
|
79
|
+
cache_connection=utils.get_pydantic_model(
|
|
80
|
+
cache_connection, Optional[models.CacheConnection]
|
|
81
|
+
),
|
|
82
|
+
deletion_started_at=deletion_started_at,
|
|
83
|
+
description=description,
|
|
84
|
+
format_=format_,
|
|
85
|
+
http_da_used=http_da_used,
|
|
86
|
+
id=id,
|
|
87
|
+
retention_period_in_days=retention_period_in_days,
|
|
88
|
+
search_config=utils.get_pydantic_model(
|
|
89
|
+
search_config, Optional[models.LakeDatasetSearchConfig]
|
|
90
|
+
),
|
|
91
|
+
storage_location_id=storage_location_id,
|
|
92
|
+
view_name=view_name,
|
|
93
|
+
),
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
req = self._build_request(
|
|
97
|
+
method="POST",
|
|
98
|
+
path="/products/lake/lakes/{lakeId}/datasets",
|
|
99
|
+
base_url=base_url,
|
|
100
|
+
url_variables=url_variables,
|
|
101
|
+
request=request,
|
|
102
|
+
request_body_required=True,
|
|
103
|
+
request_has_path_params=True,
|
|
104
|
+
request_has_query_params=True,
|
|
105
|
+
user_agent_header="user-agent",
|
|
106
|
+
accept_header_value="application/json",
|
|
107
|
+
http_headers=http_headers,
|
|
108
|
+
security=self.sdk_configuration.security,
|
|
109
|
+
get_serialized_body=lambda: utils.serialize_request_body(
|
|
110
|
+
request.cribl_lake_dataset,
|
|
111
|
+
False,
|
|
112
|
+
False,
|
|
113
|
+
"json",
|
|
114
|
+
models.CriblLakeDataset,
|
|
115
|
+
),
|
|
116
|
+
timeout_ms=timeout_ms,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if retries == UNSET:
|
|
120
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
121
|
+
retries = self.sdk_configuration.retry_config
|
|
122
|
+
|
|
123
|
+
retry_config = None
|
|
124
|
+
if isinstance(retries, utils.RetryConfig):
|
|
125
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
126
|
+
|
|
127
|
+
http_res = self.do_request(
|
|
128
|
+
hook_ctx=HookContext(
|
|
129
|
+
config=self.sdk_configuration,
|
|
130
|
+
base_url=base_url or "",
|
|
131
|
+
operation_id="createCriblLakeDatasetByLakeId",
|
|
132
|
+
oauth2_scopes=[],
|
|
133
|
+
security_source=get_security_from_env(
|
|
134
|
+
self.sdk_configuration.security, models.Security
|
|
135
|
+
),
|
|
136
|
+
),
|
|
137
|
+
request=req,
|
|
138
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
139
|
+
retry_config=retry_config,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
response_data: Any = None
|
|
143
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
144
|
+
return unmarshal_json_response(
|
|
145
|
+
models.CreateCriblLakeDatasetByLakeIDResponse, http_res
|
|
146
|
+
)
|
|
147
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
148
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
149
|
+
raise errors.Error(response_data, http_res)
|
|
150
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
151
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
152
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
153
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
154
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
155
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
156
|
+
|
|
157
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
158
|
+
|
|
159
|
+
async def create_cribl_lake_dataset_by_lake_id_async(
|
|
160
|
+
self,
|
|
161
|
+
*,
|
|
162
|
+
lake_id: str,
|
|
163
|
+
id: str,
|
|
164
|
+
accelerated_fields: Optional[List[str]] = None,
|
|
165
|
+
bucket_name: Optional[str] = None,
|
|
166
|
+
cache_connection: Optional[
|
|
167
|
+
Union[models.CacheConnection, models.CacheConnectionTypedDict]
|
|
168
|
+
] = None,
|
|
169
|
+
deletion_started_at: Optional[float] = None,
|
|
170
|
+
description: Optional[str] = None,
|
|
171
|
+
format_: Optional[models.CriblLakeDatasetFormat] = None,
|
|
172
|
+
http_da_used: Optional[bool] = None,
|
|
173
|
+
retention_period_in_days: Optional[float] = None,
|
|
174
|
+
search_config: Optional[
|
|
175
|
+
Union[
|
|
176
|
+
models.LakeDatasetSearchConfig, models.LakeDatasetSearchConfigTypedDict
|
|
177
|
+
]
|
|
178
|
+
] = None,
|
|
179
|
+
storage_location_id: Optional[str] = None,
|
|
180
|
+
view_name: Optional[str] = None,
|
|
181
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
182
|
+
server_url: Optional[str] = None,
|
|
183
|
+
timeout_ms: Optional[int] = None,
|
|
184
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
185
|
+
) -> models.CreateCriblLakeDatasetByLakeIDResponse:
|
|
186
|
+
r"""Create a Dataset in the specified Lake
|
|
187
|
+
|
|
188
|
+
Create a Dataset in the specified Lake
|
|
189
|
+
|
|
190
|
+
:param lake_id: lake id that contains the Datasets
|
|
191
|
+
:param id:
|
|
192
|
+
:param accelerated_fields:
|
|
193
|
+
:param bucket_name:
|
|
194
|
+
:param cache_connection:
|
|
195
|
+
:param deletion_started_at:
|
|
196
|
+
:param description:
|
|
197
|
+
:param format_:
|
|
198
|
+
:param http_da_used:
|
|
199
|
+
:param retention_period_in_days:
|
|
200
|
+
:param search_config:
|
|
201
|
+
:param storage_location_id:
|
|
202
|
+
:param view_name:
|
|
203
|
+
:param retries: Override the default retry configuration for this method
|
|
204
|
+
:param server_url: Override the default server URL for this method
|
|
205
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
206
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
207
|
+
"""
|
|
208
|
+
base_url = None
|
|
209
|
+
url_variables = None
|
|
210
|
+
if timeout_ms is None:
|
|
211
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
212
|
+
|
|
213
|
+
if server_url is not None:
|
|
214
|
+
base_url = server_url
|
|
215
|
+
else:
|
|
216
|
+
base_url = self._get_url(base_url, url_variables)
|
|
217
|
+
|
|
218
|
+
request = models.CreateCriblLakeDatasetByLakeIDRequest(
|
|
219
|
+
lake_id=lake_id,
|
|
220
|
+
cribl_lake_dataset=models.CriblLakeDataset(
|
|
221
|
+
accelerated_fields=accelerated_fields,
|
|
222
|
+
bucket_name=bucket_name,
|
|
223
|
+
cache_connection=utils.get_pydantic_model(
|
|
224
|
+
cache_connection, Optional[models.CacheConnection]
|
|
225
|
+
),
|
|
226
|
+
deletion_started_at=deletion_started_at,
|
|
227
|
+
description=description,
|
|
228
|
+
format_=format_,
|
|
229
|
+
http_da_used=http_da_used,
|
|
230
|
+
id=id,
|
|
231
|
+
retention_period_in_days=retention_period_in_days,
|
|
232
|
+
search_config=utils.get_pydantic_model(
|
|
233
|
+
search_config, Optional[models.LakeDatasetSearchConfig]
|
|
234
|
+
),
|
|
235
|
+
storage_location_id=storage_location_id,
|
|
236
|
+
view_name=view_name,
|
|
237
|
+
),
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
req = self._build_request_async(
|
|
241
|
+
method="POST",
|
|
242
|
+
path="/products/lake/lakes/{lakeId}/datasets",
|
|
243
|
+
base_url=base_url,
|
|
244
|
+
url_variables=url_variables,
|
|
245
|
+
request=request,
|
|
246
|
+
request_body_required=True,
|
|
247
|
+
request_has_path_params=True,
|
|
248
|
+
request_has_query_params=True,
|
|
249
|
+
user_agent_header="user-agent",
|
|
250
|
+
accept_header_value="application/json",
|
|
251
|
+
http_headers=http_headers,
|
|
252
|
+
security=self.sdk_configuration.security,
|
|
253
|
+
get_serialized_body=lambda: utils.serialize_request_body(
|
|
254
|
+
request.cribl_lake_dataset,
|
|
255
|
+
False,
|
|
256
|
+
False,
|
|
257
|
+
"json",
|
|
258
|
+
models.CriblLakeDataset,
|
|
259
|
+
),
|
|
260
|
+
timeout_ms=timeout_ms,
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
if retries == UNSET:
|
|
264
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
265
|
+
retries = self.sdk_configuration.retry_config
|
|
266
|
+
|
|
267
|
+
retry_config = None
|
|
268
|
+
if isinstance(retries, utils.RetryConfig):
|
|
269
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
270
|
+
|
|
271
|
+
http_res = await self.do_request_async(
|
|
272
|
+
hook_ctx=HookContext(
|
|
273
|
+
config=self.sdk_configuration,
|
|
274
|
+
base_url=base_url or "",
|
|
275
|
+
operation_id="createCriblLakeDatasetByLakeId",
|
|
276
|
+
oauth2_scopes=[],
|
|
277
|
+
security_source=get_security_from_env(
|
|
278
|
+
self.sdk_configuration.security, models.Security
|
|
279
|
+
),
|
|
280
|
+
),
|
|
281
|
+
request=req,
|
|
282
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
283
|
+
retry_config=retry_config,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
response_data: Any = None
|
|
287
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
288
|
+
return unmarshal_json_response(
|
|
289
|
+
models.CreateCriblLakeDatasetByLakeIDResponse, http_res
|
|
290
|
+
)
|
|
291
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
292
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
293
|
+
raise errors.Error(response_data, http_res)
|
|
294
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
295
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
296
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
297
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
298
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
299
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
300
|
+
|
|
301
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
302
|
+
|
|
303
|
+
def get_cribl_lake_dataset_by_lake_id(
|
|
304
|
+
self,
|
|
305
|
+
*,
|
|
306
|
+
lake_id: str,
|
|
307
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
308
|
+
server_url: Optional[str] = None,
|
|
309
|
+
timeout_ms: Optional[int] = None,
|
|
310
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
311
|
+
) -> models.GetCriblLakeDatasetByLakeIDResponse:
|
|
312
|
+
r"""Get the list of Dataset contained in the specified Lake
|
|
313
|
+
|
|
314
|
+
Get the list of Dataset contained in the specified Lake
|
|
315
|
+
|
|
316
|
+
:param lake_id: lake id that contains the Datasets
|
|
317
|
+
:param retries: Override the default retry configuration for this method
|
|
318
|
+
:param server_url: Override the default server URL for this method
|
|
319
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
320
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
321
|
+
"""
|
|
322
|
+
base_url = None
|
|
323
|
+
url_variables = None
|
|
324
|
+
if timeout_ms is None:
|
|
325
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
326
|
+
|
|
327
|
+
if server_url is not None:
|
|
328
|
+
base_url = server_url
|
|
329
|
+
else:
|
|
330
|
+
base_url = self._get_url(base_url, url_variables)
|
|
331
|
+
|
|
332
|
+
request = models.GetCriblLakeDatasetByLakeIDRequest(
|
|
333
|
+
lake_id=lake_id,
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
req = self._build_request(
|
|
337
|
+
method="GET",
|
|
338
|
+
path="/products/lake/lakes/{lakeId}/datasets",
|
|
339
|
+
base_url=base_url,
|
|
340
|
+
url_variables=url_variables,
|
|
341
|
+
request=request,
|
|
342
|
+
request_body_required=False,
|
|
343
|
+
request_has_path_params=True,
|
|
344
|
+
request_has_query_params=True,
|
|
345
|
+
user_agent_header="user-agent",
|
|
346
|
+
accept_header_value="application/json",
|
|
347
|
+
http_headers=http_headers,
|
|
348
|
+
security=self.sdk_configuration.security,
|
|
349
|
+
timeout_ms=timeout_ms,
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
if retries == UNSET:
|
|
353
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
354
|
+
retries = self.sdk_configuration.retry_config
|
|
355
|
+
|
|
356
|
+
retry_config = None
|
|
357
|
+
if isinstance(retries, utils.RetryConfig):
|
|
358
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
359
|
+
|
|
360
|
+
http_res = self.do_request(
|
|
361
|
+
hook_ctx=HookContext(
|
|
362
|
+
config=self.sdk_configuration,
|
|
363
|
+
base_url=base_url or "",
|
|
364
|
+
operation_id="getCriblLakeDatasetByLakeId",
|
|
365
|
+
oauth2_scopes=[],
|
|
366
|
+
security_source=get_security_from_env(
|
|
367
|
+
self.sdk_configuration.security, models.Security
|
|
368
|
+
),
|
|
369
|
+
),
|
|
370
|
+
request=req,
|
|
371
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
372
|
+
retry_config=retry_config,
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
response_data: Any = None
|
|
376
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
377
|
+
return unmarshal_json_response(
|
|
378
|
+
models.GetCriblLakeDatasetByLakeIDResponse, http_res
|
|
379
|
+
)
|
|
380
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
381
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
382
|
+
raise errors.Error(response_data, http_res)
|
|
383
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
384
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
385
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
386
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
387
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
388
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
389
|
+
|
|
390
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
391
|
+
|
|
392
|
+
async def get_cribl_lake_dataset_by_lake_id_async(
|
|
393
|
+
self,
|
|
394
|
+
*,
|
|
395
|
+
lake_id: str,
|
|
396
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
397
|
+
server_url: Optional[str] = None,
|
|
398
|
+
timeout_ms: Optional[int] = None,
|
|
399
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
400
|
+
) -> models.GetCriblLakeDatasetByLakeIDResponse:
|
|
401
|
+
r"""Get the list of Dataset contained in the specified Lake
|
|
402
|
+
|
|
403
|
+
Get the list of Dataset contained in the specified Lake
|
|
404
|
+
|
|
405
|
+
:param lake_id: lake id that contains the Datasets
|
|
406
|
+
:param retries: Override the default retry configuration for this method
|
|
407
|
+
:param server_url: Override the default server URL for this method
|
|
408
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
409
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
410
|
+
"""
|
|
411
|
+
base_url = None
|
|
412
|
+
url_variables = None
|
|
413
|
+
if timeout_ms is None:
|
|
414
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
415
|
+
|
|
416
|
+
if server_url is not None:
|
|
417
|
+
base_url = server_url
|
|
418
|
+
else:
|
|
419
|
+
base_url = self._get_url(base_url, url_variables)
|
|
420
|
+
|
|
421
|
+
request = models.GetCriblLakeDatasetByLakeIDRequest(
|
|
422
|
+
lake_id=lake_id,
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
req = self._build_request_async(
|
|
426
|
+
method="GET",
|
|
427
|
+
path="/products/lake/lakes/{lakeId}/datasets",
|
|
428
|
+
base_url=base_url,
|
|
429
|
+
url_variables=url_variables,
|
|
430
|
+
request=request,
|
|
431
|
+
request_body_required=False,
|
|
432
|
+
request_has_path_params=True,
|
|
433
|
+
request_has_query_params=True,
|
|
434
|
+
user_agent_header="user-agent",
|
|
435
|
+
accept_header_value="application/json",
|
|
436
|
+
http_headers=http_headers,
|
|
437
|
+
security=self.sdk_configuration.security,
|
|
438
|
+
timeout_ms=timeout_ms,
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
if retries == UNSET:
|
|
442
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
443
|
+
retries = self.sdk_configuration.retry_config
|
|
444
|
+
|
|
445
|
+
retry_config = None
|
|
446
|
+
if isinstance(retries, utils.RetryConfig):
|
|
447
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
448
|
+
|
|
449
|
+
http_res = await self.do_request_async(
|
|
450
|
+
hook_ctx=HookContext(
|
|
451
|
+
config=self.sdk_configuration,
|
|
452
|
+
base_url=base_url or "",
|
|
453
|
+
operation_id="getCriblLakeDatasetByLakeId",
|
|
454
|
+
oauth2_scopes=[],
|
|
455
|
+
security_source=get_security_from_env(
|
|
456
|
+
self.sdk_configuration.security, models.Security
|
|
457
|
+
),
|
|
458
|
+
),
|
|
459
|
+
request=req,
|
|
460
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
461
|
+
retry_config=retry_config,
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
response_data: Any = None
|
|
465
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
466
|
+
return unmarshal_json_response(
|
|
467
|
+
models.GetCriblLakeDatasetByLakeIDResponse, http_res
|
|
468
|
+
)
|
|
469
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
470
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
471
|
+
raise errors.Error(response_data, http_res)
|
|
472
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
473
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
474
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
475
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
476
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
477
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
478
|
+
|
|
479
|
+
raise errors.APIError("Unexpected response received", http_res)
|