cribl-control-plane 0.0.18__py3-none-any.whl → 0.0.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (70) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/distributed.py +187 -0
  3. cribl_control_plane/errors/healthstatus_error.py +1 -1
  4. cribl_control_plane/groups_sdk.py +1291 -0
  5. cribl_control_plane/lake.py +1141 -0
  6. cribl_control_plane/models/__init__.py +672 -3
  7. cribl_control_plane/models/appmode.py +13 -0
  8. cribl_control_plane/models/cacheconnection.py +44 -0
  9. cribl_control_plane/models/cacheconnectionbackfillstatus.py +12 -0
  10. cribl_control_plane/models/cloudprovider.py +9 -0
  11. cribl_control_plane/models/commit.py +30 -0
  12. cribl_control_plane/models/configgroup.py +116 -0
  13. cribl_control_plane/models/configgroupcloud.py +48 -0
  14. cribl_control_plane/models/configgrouplookups.py +34 -0
  15. cribl_control_plane/models/createcribllakedatasetbylakeidop.py +48 -0
  16. cribl_control_plane/models/createpacksop.py +24 -0
  17. cribl_control_plane/models/createproductsgroupsbyproductop.py +54 -0
  18. cribl_control_plane/models/cribllakedataset.py +74 -0
  19. cribl_control_plane/models/datasetmetadata.py +39 -0
  20. cribl_control_plane/models/datasetmetadataruninfo.py +28 -0
  21. cribl_control_plane/models/deletecribllakedatasetbylakeidandidop.py +47 -0
  22. cribl_control_plane/models/deletepacksbyidop.py +37 -0
  23. cribl_control_plane/models/deployrequest.py +18 -0
  24. cribl_control_plane/models/deployrequestlookups.py +28 -0
  25. cribl_control_plane/models/distributedsummary.py +63 -0
  26. cribl_control_plane/models/getcribllakedatasetbylakeidandidop.py +47 -0
  27. cribl_control_plane/models/getcribllakedatasetbylakeidop.py +40 -0
  28. cribl_control_plane/models/getgroupsaclbyidop.py +63 -0
  29. cribl_control_plane/models/getgroupsbyidop.py +49 -0
  30. cribl_control_plane/models/getgroupsconfigversionbyidop.py +36 -0
  31. cribl_control_plane/models/getpacksop.py +40 -0
  32. cribl_control_plane/models/getproductsgroupsaclteamsbyproductandidop.py +78 -0
  33. cribl_control_plane/models/getproductsgroupsbyproductop.py +58 -0
  34. cribl_control_plane/models/getsummaryop.py +46 -0
  35. cribl_control_plane/models/getsummaryworkersop.py +39 -0
  36. cribl_control_plane/models/getworkersop.py +82 -0
  37. cribl_control_plane/models/hbcriblinfo.py +80 -0
  38. cribl_control_plane/models/hbleaderinfo.py +23 -0
  39. cribl_control_plane/models/healthstatus.py +3 -3
  40. cribl_control_plane/models/heartbeatmetadata.py +122 -0
  41. cribl_control_plane/models/lakedatasetsearchconfig.py +18 -0
  42. cribl_control_plane/models/lakehouseconnectiontype.py +9 -0
  43. cribl_control_plane/models/lookupversions.py +13 -0
  44. cribl_control_plane/models/masterworkerentry.py +84 -0
  45. cribl_control_plane/models/nodeactiveupgradestatus.py +10 -0
  46. cribl_control_plane/models/nodefailedupgradestatus.py +9 -0
  47. cribl_control_plane/models/nodeprovidedinfo.py +184 -0
  48. cribl_control_plane/models/nodeskippedupgradestatus.py +11 -0
  49. cribl_control_plane/models/nodeupgradestate.py +11 -0
  50. cribl_control_plane/models/nodeupgradestatus.py +30 -0
  51. cribl_control_plane/models/packinfo.py +73 -0
  52. cribl_control_plane/models/packinstallinfo.py +76 -0
  53. cribl_control_plane/models/packrequestbody.py +75 -0
  54. cribl_control_plane/models/rbacresource.py +14 -0
  55. cribl_control_plane/models/resourcepolicy.py +24 -0
  56. cribl_control_plane/models/restartresponse.py +26 -0
  57. cribl_control_plane/models/teamaccesscontrollist.py +18 -0
  58. cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +57 -0
  59. cribl_control_plane/models/updategroupsdeploybyidop.py +46 -0
  60. cribl_control_plane/models/updatepacksbyidop.py +65 -0
  61. cribl_control_plane/models/updatepacksop.py +37 -0
  62. cribl_control_plane/models/updateworkersrestartop.py +24 -0
  63. cribl_control_plane/models/useraccesscontrollist.py +18 -0
  64. cribl_control_plane/packs.py +989 -0
  65. cribl_control_plane/sdk.py +24 -0
  66. cribl_control_plane/teams.py +203 -0
  67. cribl_control_plane/workers_sdk.py +555 -0
  68. {cribl_control_plane-0.0.18.dist-info → cribl_control_plane-0.0.20.dist-info}/METADATA +47 -8
  69. {cribl_control_plane-0.0.18.dist-info → cribl_control_plane-0.0.20.dist-info}/RECORD +70 -8
  70. {cribl_control_plane-0.0.18.dist-info → cribl_control_plane-0.0.20.dist-info}/WHEEL +0 -0
@@ -0,0 +1,1141 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from .basesdk import BaseSDK
4
+ from cribl_control_plane import errors, models, utils
5
+ from cribl_control_plane._hooks import HookContext
6
+ from cribl_control_plane.types import OptionalNullable, UNSET
7
+ from cribl_control_plane.utils import get_security_from_env
8
+ from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_response
9
+ from typing import Any, List, Mapping, Optional, Union
10
+
11
+
12
+ class Lake(BaseSDK):
13
+ r"""Actions related to Lake"""
14
+
15
+ def create_cribl_lake_dataset_by_lake_id(
16
+ self,
17
+ *,
18
+ lake_id: str,
19
+ id: str,
20
+ accelerated_fields: Optional[List[str]] = None,
21
+ bucket_name: Optional[str] = None,
22
+ cache_connection: Optional[
23
+ Union[models.CacheConnection, models.CacheConnectionTypedDict]
24
+ ] = None,
25
+ deletion_started_at: Optional[float] = None,
26
+ description: Optional[str] = None,
27
+ format_: Optional[models.CriblLakeDatasetFormat] = None,
28
+ http_da_used: Optional[bool] = None,
29
+ retention_period_in_days: Optional[float] = None,
30
+ search_config: Optional[
31
+ Union[
32
+ models.LakeDatasetSearchConfig, models.LakeDatasetSearchConfigTypedDict
33
+ ]
34
+ ] = None,
35
+ storage_location_id: Optional[str] = None,
36
+ view_name: Optional[str] = None,
37
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
38
+ server_url: Optional[str] = None,
39
+ timeout_ms: Optional[int] = None,
40
+ http_headers: Optional[Mapping[str, str]] = None,
41
+ ) -> models.CreateCriblLakeDatasetByLakeIDResponse:
42
+ r"""Create a Dataset in the specified Lake
43
+
44
+ Create a Dataset in the specified Lake
45
+
46
+ :param lake_id: lake id that contains the Datasets
47
+ :param id:
48
+ :param accelerated_fields:
49
+ :param bucket_name:
50
+ :param cache_connection:
51
+ :param deletion_started_at:
52
+ :param description:
53
+ :param format_:
54
+ :param http_da_used:
55
+ :param retention_period_in_days:
56
+ :param search_config:
57
+ :param storage_location_id:
58
+ :param view_name:
59
+ :param retries: Override the default retry configuration for this method
60
+ :param server_url: Override the default server URL for this method
61
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
62
+ :param http_headers: Additional headers to set or replace on requests.
63
+ """
64
+ base_url = None
65
+ url_variables = None
66
+ if timeout_ms is None:
67
+ timeout_ms = self.sdk_configuration.timeout_ms
68
+
69
+ if server_url is not None:
70
+ base_url = server_url
71
+ else:
72
+ base_url = self._get_url(base_url, url_variables)
73
+
74
+ request = models.CreateCriblLakeDatasetByLakeIDRequest(
75
+ lake_id=lake_id,
76
+ cribl_lake_dataset=models.CriblLakeDataset(
77
+ accelerated_fields=accelerated_fields,
78
+ bucket_name=bucket_name,
79
+ cache_connection=utils.get_pydantic_model(
80
+ cache_connection, Optional[models.CacheConnection]
81
+ ),
82
+ deletion_started_at=deletion_started_at,
83
+ description=description,
84
+ format_=format_,
85
+ http_da_used=http_da_used,
86
+ id=id,
87
+ retention_period_in_days=retention_period_in_days,
88
+ search_config=utils.get_pydantic_model(
89
+ search_config, Optional[models.LakeDatasetSearchConfig]
90
+ ),
91
+ storage_location_id=storage_location_id,
92
+ view_name=view_name,
93
+ ),
94
+ )
95
+
96
+ req = self._build_request(
97
+ method="POST",
98
+ path="/products/lake/lakes/{lakeId}/datasets",
99
+ base_url=base_url,
100
+ url_variables=url_variables,
101
+ request=request,
102
+ request_body_required=True,
103
+ request_has_path_params=True,
104
+ request_has_query_params=True,
105
+ user_agent_header="user-agent",
106
+ accept_header_value="application/json",
107
+ http_headers=http_headers,
108
+ security=self.sdk_configuration.security,
109
+ get_serialized_body=lambda: utils.serialize_request_body(
110
+ request.cribl_lake_dataset,
111
+ False,
112
+ False,
113
+ "json",
114
+ models.CriblLakeDataset,
115
+ ),
116
+ timeout_ms=timeout_ms,
117
+ )
118
+
119
+ if retries == UNSET:
120
+ if self.sdk_configuration.retry_config is not UNSET:
121
+ retries = self.sdk_configuration.retry_config
122
+
123
+ retry_config = None
124
+ if isinstance(retries, utils.RetryConfig):
125
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
126
+
127
+ http_res = self.do_request(
128
+ hook_ctx=HookContext(
129
+ config=self.sdk_configuration,
130
+ base_url=base_url or "",
131
+ operation_id="createCriblLakeDatasetByLakeId",
132
+ oauth2_scopes=[],
133
+ security_source=get_security_from_env(
134
+ self.sdk_configuration.security, models.Security
135
+ ),
136
+ ),
137
+ request=req,
138
+ error_status_codes=["401", "4XX", "500", "5XX"],
139
+ retry_config=retry_config,
140
+ )
141
+
142
+ response_data: Any = None
143
+ if utils.match_response(http_res, "200", "application/json"):
144
+ return unmarshal_json_response(
145
+ models.CreateCriblLakeDatasetByLakeIDResponse, http_res
146
+ )
147
+ if utils.match_response(http_res, "500", "application/json"):
148
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
149
+ raise errors.Error(response_data, http_res)
150
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
151
+ http_res_text = utils.stream_to_text(http_res)
152
+ raise errors.APIError("API error occurred", http_res, http_res_text)
153
+ if utils.match_response(http_res, "5XX", "*"):
154
+ http_res_text = utils.stream_to_text(http_res)
155
+ raise errors.APIError("API error occurred", http_res, http_res_text)
156
+
157
+ raise errors.APIError("Unexpected response received", http_res)
158
+
159
+ async def create_cribl_lake_dataset_by_lake_id_async(
160
+ self,
161
+ *,
162
+ lake_id: str,
163
+ id: str,
164
+ accelerated_fields: Optional[List[str]] = None,
165
+ bucket_name: Optional[str] = None,
166
+ cache_connection: Optional[
167
+ Union[models.CacheConnection, models.CacheConnectionTypedDict]
168
+ ] = None,
169
+ deletion_started_at: Optional[float] = None,
170
+ description: Optional[str] = None,
171
+ format_: Optional[models.CriblLakeDatasetFormat] = None,
172
+ http_da_used: Optional[bool] = None,
173
+ retention_period_in_days: Optional[float] = None,
174
+ search_config: Optional[
175
+ Union[
176
+ models.LakeDatasetSearchConfig, models.LakeDatasetSearchConfigTypedDict
177
+ ]
178
+ ] = None,
179
+ storage_location_id: Optional[str] = None,
180
+ view_name: Optional[str] = None,
181
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
182
+ server_url: Optional[str] = None,
183
+ timeout_ms: Optional[int] = None,
184
+ http_headers: Optional[Mapping[str, str]] = None,
185
+ ) -> models.CreateCriblLakeDatasetByLakeIDResponse:
186
+ r"""Create a Dataset in the specified Lake
187
+
188
+ Create a Dataset in the specified Lake
189
+
190
+ :param lake_id: lake id that contains the Datasets
191
+ :param id:
192
+ :param accelerated_fields:
193
+ :param bucket_name:
194
+ :param cache_connection:
195
+ :param deletion_started_at:
196
+ :param description:
197
+ :param format_:
198
+ :param http_da_used:
199
+ :param retention_period_in_days:
200
+ :param search_config:
201
+ :param storage_location_id:
202
+ :param view_name:
203
+ :param retries: Override the default retry configuration for this method
204
+ :param server_url: Override the default server URL for this method
205
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
206
+ :param http_headers: Additional headers to set or replace on requests.
207
+ """
208
+ base_url = None
209
+ url_variables = None
210
+ if timeout_ms is None:
211
+ timeout_ms = self.sdk_configuration.timeout_ms
212
+
213
+ if server_url is not None:
214
+ base_url = server_url
215
+ else:
216
+ base_url = self._get_url(base_url, url_variables)
217
+
218
+ request = models.CreateCriblLakeDatasetByLakeIDRequest(
219
+ lake_id=lake_id,
220
+ cribl_lake_dataset=models.CriblLakeDataset(
221
+ accelerated_fields=accelerated_fields,
222
+ bucket_name=bucket_name,
223
+ cache_connection=utils.get_pydantic_model(
224
+ cache_connection, Optional[models.CacheConnection]
225
+ ),
226
+ deletion_started_at=deletion_started_at,
227
+ description=description,
228
+ format_=format_,
229
+ http_da_used=http_da_used,
230
+ id=id,
231
+ retention_period_in_days=retention_period_in_days,
232
+ search_config=utils.get_pydantic_model(
233
+ search_config, Optional[models.LakeDatasetSearchConfig]
234
+ ),
235
+ storage_location_id=storage_location_id,
236
+ view_name=view_name,
237
+ ),
238
+ )
239
+
240
+ req = self._build_request_async(
241
+ method="POST",
242
+ path="/products/lake/lakes/{lakeId}/datasets",
243
+ base_url=base_url,
244
+ url_variables=url_variables,
245
+ request=request,
246
+ request_body_required=True,
247
+ request_has_path_params=True,
248
+ request_has_query_params=True,
249
+ user_agent_header="user-agent",
250
+ accept_header_value="application/json",
251
+ http_headers=http_headers,
252
+ security=self.sdk_configuration.security,
253
+ get_serialized_body=lambda: utils.serialize_request_body(
254
+ request.cribl_lake_dataset,
255
+ False,
256
+ False,
257
+ "json",
258
+ models.CriblLakeDataset,
259
+ ),
260
+ timeout_ms=timeout_ms,
261
+ )
262
+
263
+ if retries == UNSET:
264
+ if self.sdk_configuration.retry_config is not UNSET:
265
+ retries = self.sdk_configuration.retry_config
266
+
267
+ retry_config = None
268
+ if isinstance(retries, utils.RetryConfig):
269
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
270
+
271
+ http_res = await self.do_request_async(
272
+ hook_ctx=HookContext(
273
+ config=self.sdk_configuration,
274
+ base_url=base_url or "",
275
+ operation_id="createCriblLakeDatasetByLakeId",
276
+ oauth2_scopes=[],
277
+ security_source=get_security_from_env(
278
+ self.sdk_configuration.security, models.Security
279
+ ),
280
+ ),
281
+ request=req,
282
+ error_status_codes=["401", "4XX", "500", "5XX"],
283
+ retry_config=retry_config,
284
+ )
285
+
286
+ response_data: Any = None
287
+ if utils.match_response(http_res, "200", "application/json"):
288
+ return unmarshal_json_response(
289
+ models.CreateCriblLakeDatasetByLakeIDResponse, http_res
290
+ )
291
+ if utils.match_response(http_res, "500", "application/json"):
292
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
293
+ raise errors.Error(response_data, http_res)
294
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
295
+ http_res_text = await utils.stream_to_text_async(http_res)
296
+ raise errors.APIError("API error occurred", http_res, http_res_text)
297
+ if utils.match_response(http_res, "5XX", "*"):
298
+ http_res_text = await utils.stream_to_text_async(http_res)
299
+ raise errors.APIError("API error occurred", http_res, http_res_text)
300
+
301
+ raise errors.APIError("Unexpected response received", http_res)
302
+
303
+ def get_cribl_lake_dataset_by_lake_id(
304
+ self,
305
+ *,
306
+ lake_id: str,
307
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
308
+ server_url: Optional[str] = None,
309
+ timeout_ms: Optional[int] = None,
310
+ http_headers: Optional[Mapping[str, str]] = None,
311
+ ) -> models.GetCriblLakeDatasetByLakeIDResponse:
312
+ r"""Get the list of Dataset contained in the specified Lake
313
+
314
+ Get the list of Dataset contained in the specified Lake
315
+
316
+ :param lake_id: lake id that contains the Datasets
317
+ :param retries: Override the default retry configuration for this method
318
+ :param server_url: Override the default server URL for this method
319
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
320
+ :param http_headers: Additional headers to set or replace on requests.
321
+ """
322
+ base_url = None
323
+ url_variables = None
324
+ if timeout_ms is None:
325
+ timeout_ms = self.sdk_configuration.timeout_ms
326
+
327
+ if server_url is not None:
328
+ base_url = server_url
329
+ else:
330
+ base_url = self._get_url(base_url, url_variables)
331
+
332
+ request = models.GetCriblLakeDatasetByLakeIDRequest(
333
+ lake_id=lake_id,
334
+ )
335
+
336
+ req = self._build_request(
337
+ method="GET",
338
+ path="/products/lake/lakes/{lakeId}/datasets",
339
+ base_url=base_url,
340
+ url_variables=url_variables,
341
+ request=request,
342
+ request_body_required=False,
343
+ request_has_path_params=True,
344
+ request_has_query_params=True,
345
+ user_agent_header="user-agent",
346
+ accept_header_value="application/json",
347
+ http_headers=http_headers,
348
+ security=self.sdk_configuration.security,
349
+ timeout_ms=timeout_ms,
350
+ )
351
+
352
+ if retries == UNSET:
353
+ if self.sdk_configuration.retry_config is not UNSET:
354
+ retries = self.sdk_configuration.retry_config
355
+
356
+ retry_config = None
357
+ if isinstance(retries, utils.RetryConfig):
358
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
359
+
360
+ http_res = self.do_request(
361
+ hook_ctx=HookContext(
362
+ config=self.sdk_configuration,
363
+ base_url=base_url or "",
364
+ operation_id="getCriblLakeDatasetByLakeId",
365
+ oauth2_scopes=[],
366
+ security_source=get_security_from_env(
367
+ self.sdk_configuration.security, models.Security
368
+ ),
369
+ ),
370
+ request=req,
371
+ error_status_codes=["401", "4XX", "500", "5XX"],
372
+ retry_config=retry_config,
373
+ )
374
+
375
+ response_data: Any = None
376
+ if utils.match_response(http_res, "200", "application/json"):
377
+ return unmarshal_json_response(
378
+ models.GetCriblLakeDatasetByLakeIDResponse, http_res
379
+ )
380
+ if utils.match_response(http_res, "500", "application/json"):
381
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
382
+ raise errors.Error(response_data, http_res)
383
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
384
+ http_res_text = utils.stream_to_text(http_res)
385
+ raise errors.APIError("API error occurred", http_res, http_res_text)
386
+ if utils.match_response(http_res, "5XX", "*"):
387
+ http_res_text = utils.stream_to_text(http_res)
388
+ raise errors.APIError("API error occurred", http_res, http_res_text)
389
+
390
+ raise errors.APIError("Unexpected response received", http_res)
391
+
392
+ async def get_cribl_lake_dataset_by_lake_id_async(
393
+ self,
394
+ *,
395
+ lake_id: str,
396
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
397
+ server_url: Optional[str] = None,
398
+ timeout_ms: Optional[int] = None,
399
+ http_headers: Optional[Mapping[str, str]] = None,
400
+ ) -> models.GetCriblLakeDatasetByLakeIDResponse:
401
+ r"""Get the list of Dataset contained in the specified Lake
402
+
403
+ Get the list of Dataset contained in the specified Lake
404
+
405
+ :param lake_id: lake id that contains the Datasets
406
+ :param retries: Override the default retry configuration for this method
407
+ :param server_url: Override the default server URL for this method
408
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
409
+ :param http_headers: Additional headers to set or replace on requests.
410
+ """
411
+ base_url = None
412
+ url_variables = None
413
+ if timeout_ms is None:
414
+ timeout_ms = self.sdk_configuration.timeout_ms
415
+
416
+ if server_url is not None:
417
+ base_url = server_url
418
+ else:
419
+ base_url = self._get_url(base_url, url_variables)
420
+
421
+ request = models.GetCriblLakeDatasetByLakeIDRequest(
422
+ lake_id=lake_id,
423
+ )
424
+
425
+ req = self._build_request_async(
426
+ method="GET",
427
+ path="/products/lake/lakes/{lakeId}/datasets",
428
+ base_url=base_url,
429
+ url_variables=url_variables,
430
+ request=request,
431
+ request_body_required=False,
432
+ request_has_path_params=True,
433
+ request_has_query_params=True,
434
+ user_agent_header="user-agent",
435
+ accept_header_value="application/json",
436
+ http_headers=http_headers,
437
+ security=self.sdk_configuration.security,
438
+ timeout_ms=timeout_ms,
439
+ )
440
+
441
+ if retries == UNSET:
442
+ if self.sdk_configuration.retry_config is not UNSET:
443
+ retries = self.sdk_configuration.retry_config
444
+
445
+ retry_config = None
446
+ if isinstance(retries, utils.RetryConfig):
447
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
448
+
449
+ http_res = await self.do_request_async(
450
+ hook_ctx=HookContext(
451
+ config=self.sdk_configuration,
452
+ base_url=base_url or "",
453
+ operation_id="getCriblLakeDatasetByLakeId",
454
+ oauth2_scopes=[],
455
+ security_source=get_security_from_env(
456
+ self.sdk_configuration.security, models.Security
457
+ ),
458
+ ),
459
+ request=req,
460
+ error_status_codes=["401", "4XX", "500", "5XX"],
461
+ retry_config=retry_config,
462
+ )
463
+
464
+ response_data: Any = None
465
+ if utils.match_response(http_res, "200", "application/json"):
466
+ return unmarshal_json_response(
467
+ models.GetCriblLakeDatasetByLakeIDResponse, http_res
468
+ )
469
+ if utils.match_response(http_res, "500", "application/json"):
470
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
471
+ raise errors.Error(response_data, http_res)
472
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
473
+ http_res_text = await utils.stream_to_text_async(http_res)
474
+ raise errors.APIError("API error occurred", http_res, http_res_text)
475
+ if utils.match_response(http_res, "5XX", "*"):
476
+ http_res_text = await utils.stream_to_text_async(http_res)
477
+ raise errors.APIError("API error occurred", http_res, http_res_text)
478
+
479
+ raise errors.APIError("Unexpected response received", http_res)
480
+
481
+ def delete_cribl_lake_dataset_by_lake_id_and_id(
482
+ self,
483
+ *,
484
+ lake_id: str,
485
+ id: str,
486
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
487
+ server_url: Optional[str] = None,
488
+ timeout_ms: Optional[int] = None,
489
+ http_headers: Optional[Mapping[str, str]] = None,
490
+ ) -> models.DeleteCriblLakeDatasetByLakeIDAndIDResponse:
491
+ r"""Delete a Dataset in the specified Lake
492
+
493
+ Delete a Dataset in the specified Lake
494
+
495
+ :param lake_id: lake id that contains the Datasets
496
+ :param id: dataset id to delete
497
+ :param retries: Override the default retry configuration for this method
498
+ :param server_url: Override the default server URL for this method
499
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
500
+ :param http_headers: Additional headers to set or replace on requests.
501
+ """
502
+ base_url = None
503
+ url_variables = None
504
+ if timeout_ms is None:
505
+ timeout_ms = self.sdk_configuration.timeout_ms
506
+
507
+ if server_url is not None:
508
+ base_url = server_url
509
+ else:
510
+ base_url = self._get_url(base_url, url_variables)
511
+
512
+ request = models.DeleteCriblLakeDatasetByLakeIDAndIDRequest(
513
+ lake_id=lake_id,
514
+ id=id,
515
+ )
516
+
517
+ req = self._build_request(
518
+ method="DELETE",
519
+ path="/products/lake/lakes/{lakeId}/datasets/{id}",
520
+ base_url=base_url,
521
+ url_variables=url_variables,
522
+ request=request,
523
+ request_body_required=False,
524
+ request_has_path_params=True,
525
+ request_has_query_params=True,
526
+ user_agent_header="user-agent",
527
+ accept_header_value="application/json",
528
+ http_headers=http_headers,
529
+ security=self.sdk_configuration.security,
530
+ timeout_ms=timeout_ms,
531
+ )
532
+
533
+ if retries == UNSET:
534
+ if self.sdk_configuration.retry_config is not UNSET:
535
+ retries = self.sdk_configuration.retry_config
536
+
537
+ retry_config = None
538
+ if isinstance(retries, utils.RetryConfig):
539
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
540
+
541
+ http_res = self.do_request(
542
+ hook_ctx=HookContext(
543
+ config=self.sdk_configuration,
544
+ base_url=base_url or "",
545
+ operation_id="deleteCriblLakeDatasetByLakeIdAndId",
546
+ oauth2_scopes=[],
547
+ security_source=get_security_from_env(
548
+ self.sdk_configuration.security, models.Security
549
+ ),
550
+ ),
551
+ request=req,
552
+ error_status_codes=["401", "4XX", "500", "5XX"],
553
+ retry_config=retry_config,
554
+ )
555
+
556
+ response_data: Any = None
557
+ if utils.match_response(http_res, "200", "application/json"):
558
+ return unmarshal_json_response(
559
+ models.DeleteCriblLakeDatasetByLakeIDAndIDResponse, http_res
560
+ )
561
+ if utils.match_response(http_res, "500", "application/json"):
562
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
563
+ raise errors.Error(response_data, http_res)
564
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
565
+ http_res_text = utils.stream_to_text(http_res)
566
+ raise errors.APIError("API error occurred", http_res, http_res_text)
567
+ if utils.match_response(http_res, "5XX", "*"):
568
+ http_res_text = utils.stream_to_text(http_res)
569
+ raise errors.APIError("API error occurred", http_res, http_res_text)
570
+
571
+ raise errors.APIError("Unexpected response received", http_res)
572
+
573
+ async def delete_cribl_lake_dataset_by_lake_id_and_id_async(
574
+ self,
575
+ *,
576
+ lake_id: str,
577
+ id: str,
578
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
579
+ server_url: Optional[str] = None,
580
+ timeout_ms: Optional[int] = None,
581
+ http_headers: Optional[Mapping[str, str]] = None,
582
+ ) -> models.DeleteCriblLakeDatasetByLakeIDAndIDResponse:
583
+ r"""Delete a Dataset in the specified Lake
584
+
585
+ Delete a Dataset in the specified Lake
586
+
587
+ :param lake_id: lake id that contains the Datasets
588
+ :param id: dataset id to delete
589
+ :param retries: Override the default retry configuration for this method
590
+ :param server_url: Override the default server URL for this method
591
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
592
+ :param http_headers: Additional headers to set or replace on requests.
593
+ """
594
+ base_url = None
595
+ url_variables = None
596
+ if timeout_ms is None:
597
+ timeout_ms = self.sdk_configuration.timeout_ms
598
+
599
+ if server_url is not None:
600
+ base_url = server_url
601
+ else:
602
+ base_url = self._get_url(base_url, url_variables)
603
+
604
+ request = models.DeleteCriblLakeDatasetByLakeIDAndIDRequest(
605
+ lake_id=lake_id,
606
+ id=id,
607
+ )
608
+
609
+ req = self._build_request_async(
610
+ method="DELETE",
611
+ path="/products/lake/lakes/{lakeId}/datasets/{id}",
612
+ base_url=base_url,
613
+ url_variables=url_variables,
614
+ request=request,
615
+ request_body_required=False,
616
+ request_has_path_params=True,
617
+ request_has_query_params=True,
618
+ user_agent_header="user-agent",
619
+ accept_header_value="application/json",
620
+ http_headers=http_headers,
621
+ security=self.sdk_configuration.security,
622
+ timeout_ms=timeout_ms,
623
+ )
624
+
625
+ if retries == UNSET:
626
+ if self.sdk_configuration.retry_config is not UNSET:
627
+ retries = self.sdk_configuration.retry_config
628
+
629
+ retry_config = None
630
+ if isinstance(retries, utils.RetryConfig):
631
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
632
+
633
+ http_res = await self.do_request_async(
634
+ hook_ctx=HookContext(
635
+ config=self.sdk_configuration,
636
+ base_url=base_url or "",
637
+ operation_id="deleteCriblLakeDatasetByLakeIdAndId",
638
+ oauth2_scopes=[],
639
+ security_source=get_security_from_env(
640
+ self.sdk_configuration.security, models.Security
641
+ ),
642
+ ),
643
+ request=req,
644
+ error_status_codes=["401", "4XX", "500", "5XX"],
645
+ retry_config=retry_config,
646
+ )
647
+
648
+ response_data: Any = None
649
+ if utils.match_response(http_res, "200", "application/json"):
650
+ return unmarshal_json_response(
651
+ models.DeleteCriblLakeDatasetByLakeIDAndIDResponse, http_res
652
+ )
653
+ if utils.match_response(http_res, "500", "application/json"):
654
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
655
+ raise errors.Error(response_data, http_res)
656
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
657
+ http_res_text = await utils.stream_to_text_async(http_res)
658
+ raise errors.APIError("API error occurred", http_res, http_res_text)
659
+ if utils.match_response(http_res, "5XX", "*"):
660
+ http_res_text = await utils.stream_to_text_async(http_res)
661
+ raise errors.APIError("API error occurred", http_res, http_res_text)
662
+
663
+ raise errors.APIError("Unexpected response received", http_res)
664
+
665
+ def get_cribl_lake_dataset_by_lake_id_and_id(
666
+ self,
667
+ *,
668
+ lake_id: str,
669
+ id: str,
670
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
671
+ server_url: Optional[str] = None,
672
+ timeout_ms: Optional[int] = None,
673
+ http_headers: Optional[Mapping[str, str]] = None,
674
+ ) -> models.GetCriblLakeDatasetByLakeIDAndIDResponse:
675
+ r"""Get a Dataset in the specified Lake
676
+
677
+ Get a Dataset in the specified Lake
678
+
679
+ :param lake_id: lake id that contains the Datasets
680
+ :param id: dataset id to get
681
+ :param retries: Override the default retry configuration for this method
682
+ :param server_url: Override the default server URL for this method
683
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
684
+ :param http_headers: Additional headers to set or replace on requests.
685
+ """
686
+ base_url = None
687
+ url_variables = None
688
+ if timeout_ms is None:
689
+ timeout_ms = self.sdk_configuration.timeout_ms
690
+
691
+ if server_url is not None:
692
+ base_url = server_url
693
+ else:
694
+ base_url = self._get_url(base_url, url_variables)
695
+
696
+ request = models.GetCriblLakeDatasetByLakeIDAndIDRequest(
697
+ lake_id=lake_id,
698
+ id=id,
699
+ )
700
+
701
+ req = self._build_request(
702
+ method="GET",
703
+ path="/products/lake/lakes/{lakeId}/datasets/{id}",
704
+ base_url=base_url,
705
+ url_variables=url_variables,
706
+ request=request,
707
+ request_body_required=False,
708
+ request_has_path_params=True,
709
+ request_has_query_params=True,
710
+ user_agent_header="user-agent",
711
+ accept_header_value="application/json",
712
+ http_headers=http_headers,
713
+ security=self.sdk_configuration.security,
714
+ timeout_ms=timeout_ms,
715
+ )
716
+
717
+ if retries == UNSET:
718
+ if self.sdk_configuration.retry_config is not UNSET:
719
+ retries = self.sdk_configuration.retry_config
720
+
721
+ retry_config = None
722
+ if isinstance(retries, utils.RetryConfig):
723
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
724
+
725
+ http_res = self.do_request(
726
+ hook_ctx=HookContext(
727
+ config=self.sdk_configuration,
728
+ base_url=base_url or "",
729
+ operation_id="getCriblLakeDatasetByLakeIdAndId",
730
+ oauth2_scopes=[],
731
+ security_source=get_security_from_env(
732
+ self.sdk_configuration.security, models.Security
733
+ ),
734
+ ),
735
+ request=req,
736
+ error_status_codes=["401", "4XX", "500", "5XX"],
737
+ retry_config=retry_config,
738
+ )
739
+
740
+ response_data: Any = None
741
+ if utils.match_response(http_res, "200", "application/json"):
742
+ return unmarshal_json_response(
743
+ models.GetCriblLakeDatasetByLakeIDAndIDResponse, http_res
744
+ )
745
+ if utils.match_response(http_res, "500", "application/json"):
746
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
747
+ raise errors.Error(response_data, http_res)
748
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
749
+ http_res_text = utils.stream_to_text(http_res)
750
+ raise errors.APIError("API error occurred", http_res, http_res_text)
751
+ if utils.match_response(http_res, "5XX", "*"):
752
+ http_res_text = utils.stream_to_text(http_res)
753
+ raise errors.APIError("API error occurred", http_res, http_res_text)
754
+
755
+ raise errors.APIError("Unexpected response received", http_res)
756
+
757
+ async def get_cribl_lake_dataset_by_lake_id_and_id_async(
758
+ self,
759
+ *,
760
+ lake_id: str,
761
+ id: str,
762
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
763
+ server_url: Optional[str] = None,
764
+ timeout_ms: Optional[int] = None,
765
+ http_headers: Optional[Mapping[str, str]] = None,
766
+ ) -> models.GetCriblLakeDatasetByLakeIDAndIDResponse:
767
+ r"""Get a Dataset in the specified Lake
768
+
769
+ Get a Dataset in the specified Lake
770
+
771
+ :param lake_id: lake id that contains the Datasets
772
+ :param id: dataset id to get
773
+ :param retries: Override the default retry configuration for this method
774
+ :param server_url: Override the default server URL for this method
775
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
776
+ :param http_headers: Additional headers to set or replace on requests.
777
+ """
778
+ base_url = None
779
+ url_variables = None
780
+ if timeout_ms is None:
781
+ timeout_ms = self.sdk_configuration.timeout_ms
782
+
783
+ if server_url is not None:
784
+ base_url = server_url
785
+ else:
786
+ base_url = self._get_url(base_url, url_variables)
787
+
788
+ request = models.GetCriblLakeDatasetByLakeIDAndIDRequest(
789
+ lake_id=lake_id,
790
+ id=id,
791
+ )
792
+
793
+ req = self._build_request_async(
794
+ method="GET",
795
+ path="/products/lake/lakes/{lakeId}/datasets/{id}",
796
+ base_url=base_url,
797
+ url_variables=url_variables,
798
+ request=request,
799
+ request_body_required=False,
800
+ request_has_path_params=True,
801
+ request_has_query_params=True,
802
+ user_agent_header="user-agent",
803
+ accept_header_value="application/json",
804
+ http_headers=http_headers,
805
+ security=self.sdk_configuration.security,
806
+ timeout_ms=timeout_ms,
807
+ )
808
+
809
+ if retries == UNSET:
810
+ if self.sdk_configuration.retry_config is not UNSET:
811
+ retries = self.sdk_configuration.retry_config
812
+
813
+ retry_config = None
814
+ if isinstance(retries, utils.RetryConfig):
815
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
816
+
817
+ http_res = await self.do_request_async(
818
+ hook_ctx=HookContext(
819
+ config=self.sdk_configuration,
820
+ base_url=base_url or "",
821
+ operation_id="getCriblLakeDatasetByLakeIdAndId",
822
+ oauth2_scopes=[],
823
+ security_source=get_security_from_env(
824
+ self.sdk_configuration.security, models.Security
825
+ ),
826
+ ),
827
+ request=req,
828
+ error_status_codes=["401", "4XX", "500", "5XX"],
829
+ retry_config=retry_config,
830
+ )
831
+
832
+ response_data: Any = None
833
+ if utils.match_response(http_res, "200", "application/json"):
834
+ return unmarshal_json_response(
835
+ models.GetCriblLakeDatasetByLakeIDAndIDResponse, http_res
836
+ )
837
+ if utils.match_response(http_res, "500", "application/json"):
838
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
839
+ raise errors.Error(response_data, http_res)
840
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
841
+ http_res_text = await utils.stream_to_text_async(http_res)
842
+ raise errors.APIError("API error occurred", http_res, http_res_text)
843
+ if utils.match_response(http_res, "5XX", "*"):
844
+ http_res_text = await utils.stream_to_text_async(http_res)
845
+ raise errors.APIError("API error occurred", http_res, http_res_text)
846
+
847
+ raise errors.APIError("Unexpected response received", http_res)
848
+
849
+ def update_cribl_lake_dataset_by_lake_id_and_id(
850
+ self,
851
+ *,
852
+ lake_id: str,
853
+ id_param: str,
854
+ id: str,
855
+ accelerated_fields: Optional[List[str]] = None,
856
+ bucket_name: Optional[str] = None,
857
+ cache_connection: Optional[
858
+ Union[models.CacheConnection, models.CacheConnectionTypedDict]
859
+ ] = None,
860
+ deletion_started_at: Optional[float] = None,
861
+ description: Optional[str] = None,
862
+ format_: Optional[models.CriblLakeDatasetFormat] = None,
863
+ http_da_used: Optional[bool] = None,
864
+ retention_period_in_days: Optional[float] = None,
865
+ search_config: Optional[
866
+ Union[
867
+ models.LakeDatasetSearchConfig, models.LakeDatasetSearchConfigTypedDict
868
+ ]
869
+ ] = None,
870
+ storage_location_id: Optional[str] = None,
871
+ view_name: Optional[str] = None,
872
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
873
+ server_url: Optional[str] = None,
874
+ timeout_ms: Optional[int] = None,
875
+ http_headers: Optional[Mapping[str, str]] = None,
876
+ ) -> models.UpdateCriblLakeDatasetByLakeIDAndIDResponse:
877
+ r"""Update a Dataset in the specified Lake
878
+
879
+ Update a Dataset in the specified Lake
880
+
881
+ :param lake_id: lake id that contains the Datasets
882
+ :param id_param: dataset id to update
883
+ :param id:
884
+ :param accelerated_fields:
885
+ :param bucket_name:
886
+ :param cache_connection:
887
+ :param deletion_started_at:
888
+ :param description:
889
+ :param format_:
890
+ :param http_da_used:
891
+ :param retention_period_in_days:
892
+ :param search_config:
893
+ :param storage_location_id:
894
+ :param view_name:
895
+ :param retries: Override the default retry configuration for this method
896
+ :param server_url: Override the default server URL for this method
897
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
898
+ :param http_headers: Additional headers to set or replace on requests.
899
+ """
900
+ base_url = None
901
+ url_variables = None
902
+ if timeout_ms is None:
903
+ timeout_ms = self.sdk_configuration.timeout_ms
904
+
905
+ if server_url is not None:
906
+ base_url = server_url
907
+ else:
908
+ base_url = self._get_url(base_url, url_variables)
909
+
910
+ request = models.UpdateCriblLakeDatasetByLakeIDAndIDRequest(
911
+ lake_id=lake_id,
912
+ id_param=id_param,
913
+ cribl_lake_dataset=models.CriblLakeDataset(
914
+ accelerated_fields=accelerated_fields,
915
+ bucket_name=bucket_name,
916
+ cache_connection=utils.get_pydantic_model(
917
+ cache_connection, Optional[models.CacheConnection]
918
+ ),
919
+ deletion_started_at=deletion_started_at,
920
+ description=description,
921
+ format_=format_,
922
+ http_da_used=http_da_used,
923
+ id=id,
924
+ retention_period_in_days=retention_period_in_days,
925
+ search_config=utils.get_pydantic_model(
926
+ search_config, Optional[models.LakeDatasetSearchConfig]
927
+ ),
928
+ storage_location_id=storage_location_id,
929
+ view_name=view_name,
930
+ ),
931
+ )
932
+
933
+ req = self._build_request(
934
+ method="PATCH",
935
+ path="/products/lake/lakes/{lakeId}/datasets/{id}",
936
+ base_url=base_url,
937
+ url_variables=url_variables,
938
+ request=request,
939
+ request_body_required=True,
940
+ request_has_path_params=True,
941
+ request_has_query_params=True,
942
+ user_agent_header="user-agent",
943
+ accept_header_value="application/json",
944
+ http_headers=http_headers,
945
+ security=self.sdk_configuration.security,
946
+ get_serialized_body=lambda: utils.serialize_request_body(
947
+ request.cribl_lake_dataset,
948
+ False,
949
+ False,
950
+ "json",
951
+ models.CriblLakeDataset,
952
+ ),
953
+ timeout_ms=timeout_ms,
954
+ )
955
+
956
+ if retries == UNSET:
957
+ if self.sdk_configuration.retry_config is not UNSET:
958
+ retries = self.sdk_configuration.retry_config
959
+
960
+ retry_config = None
961
+ if isinstance(retries, utils.RetryConfig):
962
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
963
+
964
+ http_res = self.do_request(
965
+ hook_ctx=HookContext(
966
+ config=self.sdk_configuration,
967
+ base_url=base_url or "",
968
+ operation_id="updateCriblLakeDatasetByLakeIdAndId",
969
+ oauth2_scopes=[],
970
+ security_source=get_security_from_env(
971
+ self.sdk_configuration.security, models.Security
972
+ ),
973
+ ),
974
+ request=req,
975
+ error_status_codes=["401", "4XX", "500", "5XX"],
976
+ retry_config=retry_config,
977
+ )
978
+
979
+ response_data: Any = None
980
+ if utils.match_response(http_res, "200", "application/json"):
981
+ return unmarshal_json_response(
982
+ models.UpdateCriblLakeDatasetByLakeIDAndIDResponse, http_res
983
+ )
984
+ if utils.match_response(http_res, "500", "application/json"):
985
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
986
+ raise errors.Error(response_data, http_res)
987
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
988
+ http_res_text = utils.stream_to_text(http_res)
989
+ raise errors.APIError("API error occurred", http_res, http_res_text)
990
+ if utils.match_response(http_res, "5XX", "*"):
991
+ http_res_text = utils.stream_to_text(http_res)
992
+ raise errors.APIError("API error occurred", http_res, http_res_text)
993
+
994
+ raise errors.APIError("Unexpected response received", http_res)
995
+
996
+ async def update_cribl_lake_dataset_by_lake_id_and_id_async(
997
+ self,
998
+ *,
999
+ lake_id: str,
1000
+ id_param: str,
1001
+ id: str,
1002
+ accelerated_fields: Optional[List[str]] = None,
1003
+ bucket_name: Optional[str] = None,
1004
+ cache_connection: Optional[
1005
+ Union[models.CacheConnection, models.CacheConnectionTypedDict]
1006
+ ] = None,
1007
+ deletion_started_at: Optional[float] = None,
1008
+ description: Optional[str] = None,
1009
+ format_: Optional[models.CriblLakeDatasetFormat] = None,
1010
+ http_da_used: Optional[bool] = None,
1011
+ retention_period_in_days: Optional[float] = None,
1012
+ search_config: Optional[
1013
+ Union[
1014
+ models.LakeDatasetSearchConfig, models.LakeDatasetSearchConfigTypedDict
1015
+ ]
1016
+ ] = None,
1017
+ storage_location_id: Optional[str] = None,
1018
+ view_name: Optional[str] = None,
1019
+ retries: OptionalNullable[utils.RetryConfig] = UNSET,
1020
+ server_url: Optional[str] = None,
1021
+ timeout_ms: Optional[int] = None,
1022
+ http_headers: Optional[Mapping[str, str]] = None,
1023
+ ) -> models.UpdateCriblLakeDatasetByLakeIDAndIDResponse:
1024
+ r"""Update a Dataset in the specified Lake
1025
+
1026
+ Update a Dataset in the specified Lake
1027
+
1028
+ :param lake_id: lake id that contains the Datasets
1029
+ :param id_param: dataset id to update
1030
+ :param id:
1031
+ :param accelerated_fields:
1032
+ :param bucket_name:
1033
+ :param cache_connection:
1034
+ :param deletion_started_at:
1035
+ :param description:
1036
+ :param format_:
1037
+ :param http_da_used:
1038
+ :param retention_period_in_days:
1039
+ :param search_config:
1040
+ :param storage_location_id:
1041
+ :param view_name:
1042
+ :param retries: Override the default retry configuration for this method
1043
+ :param server_url: Override the default server URL for this method
1044
+ :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
1045
+ :param http_headers: Additional headers to set or replace on requests.
1046
+ """
1047
+ base_url = None
1048
+ url_variables = None
1049
+ if timeout_ms is None:
1050
+ timeout_ms = self.sdk_configuration.timeout_ms
1051
+
1052
+ if server_url is not None:
1053
+ base_url = server_url
1054
+ else:
1055
+ base_url = self._get_url(base_url, url_variables)
1056
+
1057
+ request = models.UpdateCriblLakeDatasetByLakeIDAndIDRequest(
1058
+ lake_id=lake_id,
1059
+ id_param=id_param,
1060
+ cribl_lake_dataset=models.CriblLakeDataset(
1061
+ accelerated_fields=accelerated_fields,
1062
+ bucket_name=bucket_name,
1063
+ cache_connection=utils.get_pydantic_model(
1064
+ cache_connection, Optional[models.CacheConnection]
1065
+ ),
1066
+ deletion_started_at=deletion_started_at,
1067
+ description=description,
1068
+ format_=format_,
1069
+ http_da_used=http_da_used,
1070
+ id=id,
1071
+ retention_period_in_days=retention_period_in_days,
1072
+ search_config=utils.get_pydantic_model(
1073
+ search_config, Optional[models.LakeDatasetSearchConfig]
1074
+ ),
1075
+ storage_location_id=storage_location_id,
1076
+ view_name=view_name,
1077
+ ),
1078
+ )
1079
+
1080
+ req = self._build_request_async(
1081
+ method="PATCH",
1082
+ path="/products/lake/lakes/{lakeId}/datasets/{id}",
1083
+ base_url=base_url,
1084
+ url_variables=url_variables,
1085
+ request=request,
1086
+ request_body_required=True,
1087
+ request_has_path_params=True,
1088
+ request_has_query_params=True,
1089
+ user_agent_header="user-agent",
1090
+ accept_header_value="application/json",
1091
+ http_headers=http_headers,
1092
+ security=self.sdk_configuration.security,
1093
+ get_serialized_body=lambda: utils.serialize_request_body(
1094
+ request.cribl_lake_dataset,
1095
+ False,
1096
+ False,
1097
+ "json",
1098
+ models.CriblLakeDataset,
1099
+ ),
1100
+ timeout_ms=timeout_ms,
1101
+ )
1102
+
1103
+ if retries == UNSET:
1104
+ if self.sdk_configuration.retry_config is not UNSET:
1105
+ retries = self.sdk_configuration.retry_config
1106
+
1107
+ retry_config = None
1108
+ if isinstance(retries, utils.RetryConfig):
1109
+ retry_config = (retries, ["429", "500", "502", "503", "504"])
1110
+
1111
+ http_res = await self.do_request_async(
1112
+ hook_ctx=HookContext(
1113
+ config=self.sdk_configuration,
1114
+ base_url=base_url or "",
1115
+ operation_id="updateCriblLakeDatasetByLakeIdAndId",
1116
+ oauth2_scopes=[],
1117
+ security_source=get_security_from_env(
1118
+ self.sdk_configuration.security, models.Security
1119
+ ),
1120
+ ),
1121
+ request=req,
1122
+ error_status_codes=["401", "4XX", "500", "5XX"],
1123
+ retry_config=retry_config,
1124
+ )
1125
+
1126
+ response_data: Any = None
1127
+ if utils.match_response(http_res, "200", "application/json"):
1128
+ return unmarshal_json_response(
1129
+ models.UpdateCriblLakeDatasetByLakeIDAndIDResponse, http_res
1130
+ )
1131
+ if utils.match_response(http_res, "500", "application/json"):
1132
+ response_data = unmarshal_json_response(errors.ErrorData, http_res)
1133
+ raise errors.Error(response_data, http_res)
1134
+ if utils.match_response(http_res, ["401", "4XX"], "*"):
1135
+ http_res_text = await utils.stream_to_text_async(http_res)
1136
+ raise errors.APIError("API error occurred", http_res, http_res_text)
1137
+ if utils.match_response(http_res, "5XX", "*"):
1138
+ http_res_text = await utils.stream_to_text_async(http_res)
1139
+ raise errors.APIError("API error occurred", http_res, http_res_text)
1140
+
1141
+ raise errors.APIError("Unexpected response received", http_res)