cribl-control-plane 0.0.21a1__py3-none-any.whl → 0.0.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

@@ -1857,6 +1857,12 @@ if TYPE_CHECKING:
1857
1857
  DeleteCriblLakeDatasetByLakeIDAndIDResponse,
1858
1858
  DeleteCriblLakeDatasetByLakeIDAndIDResponseTypedDict,
1859
1859
  )
1860
+ from .deletegroupsbyidop import (
1861
+ DeleteGroupsByIDRequest,
1862
+ DeleteGroupsByIDRequestTypedDict,
1863
+ DeleteGroupsByIDResponse,
1864
+ DeleteGroupsByIDResponseTypedDict,
1865
+ )
1860
1866
  from .deleteinputbyidop import (
1861
1867
  DeleteInputByIDRequest,
1862
1868
  DeleteInputByIDRequestTypedDict,
@@ -4711,6 +4717,12 @@ if TYPE_CHECKING:
4711
4717
  UpdateCriblLakeDatasetByLakeIDAndIDResponse,
4712
4718
  UpdateCriblLakeDatasetByLakeIDAndIDResponseTypedDict,
4713
4719
  )
4720
+ from .updategroupsbyidop import (
4721
+ UpdateGroupsByIDRequest,
4722
+ UpdateGroupsByIDRequestTypedDict,
4723
+ UpdateGroupsByIDResponse,
4724
+ UpdateGroupsByIDResponseTypedDict,
4725
+ )
4714
4726
  from .updategroupsdeploybyidop import (
4715
4727
  UpdateGroupsDeployByIDRequest,
4716
4728
  UpdateGroupsDeployByIDRequestTypedDict,
@@ -5877,6 +5889,10 @@ __all__ = [
5877
5889
  "DeleteCriblLakeDatasetByLakeIDAndIDRequestTypedDict",
5878
5890
  "DeleteCriblLakeDatasetByLakeIDAndIDResponse",
5879
5891
  "DeleteCriblLakeDatasetByLakeIDAndIDResponseTypedDict",
5892
+ "DeleteGroupsByIDRequest",
5893
+ "DeleteGroupsByIDRequestTypedDict",
5894
+ "DeleteGroupsByIDResponse",
5895
+ "DeleteGroupsByIDResponseTypedDict",
5880
5896
  "DeleteInputByIDRequest",
5881
5897
  "DeleteInputByIDRequestTypedDict",
5882
5898
  "DeleteInputByIDResponse",
@@ -9141,6 +9157,10 @@ __all__ = [
9141
9157
  "UpdateCriblLakeDatasetByLakeIDAndIDRequestTypedDict",
9142
9158
  "UpdateCriblLakeDatasetByLakeIDAndIDResponse",
9143
9159
  "UpdateCriblLakeDatasetByLakeIDAndIDResponseTypedDict",
9160
+ "UpdateGroupsByIDRequest",
9161
+ "UpdateGroupsByIDRequestTypedDict",
9162
+ "UpdateGroupsByIDResponse",
9163
+ "UpdateGroupsByIDResponseTypedDict",
9144
9164
  "UpdateGroupsDeployByIDRequest",
9145
9165
  "UpdateGroupsDeployByIDRequestTypedDict",
9146
9166
  "UpdateGroupsDeployByIDResponse",
@@ -11012,6 +11032,10 @@ _dynamic_imports: dict[str, str] = {
11012
11032
  "DeleteCriblLakeDatasetByLakeIDAndIDRequestTypedDict": ".deletecribllakedatasetbylakeidandidop",
11013
11033
  "DeleteCriblLakeDatasetByLakeIDAndIDResponse": ".deletecribllakedatasetbylakeidandidop",
11014
11034
  "DeleteCriblLakeDatasetByLakeIDAndIDResponseTypedDict": ".deletecribllakedatasetbylakeidandidop",
11035
+ "DeleteGroupsByIDRequest": ".deletegroupsbyidop",
11036
+ "DeleteGroupsByIDRequestTypedDict": ".deletegroupsbyidop",
11037
+ "DeleteGroupsByIDResponse": ".deletegroupsbyidop",
11038
+ "DeleteGroupsByIDResponseTypedDict": ".deletegroupsbyidop",
11015
11039
  "DeleteInputByIDRequest": ".deleteinputbyidop",
11016
11040
  "DeleteInputByIDRequestTypedDict": ".deleteinputbyidop",
11017
11041
  "DeleteInputByIDResponse": ".deleteinputbyidop",
@@ -13560,6 +13584,10 @@ _dynamic_imports: dict[str, str] = {
13560
13584
  "UpdateCriblLakeDatasetByLakeIDAndIDRequestTypedDict": ".updatecribllakedatasetbylakeidandidop",
13561
13585
  "UpdateCriblLakeDatasetByLakeIDAndIDResponse": ".updatecribllakedatasetbylakeidandidop",
13562
13586
  "UpdateCriblLakeDatasetByLakeIDAndIDResponseTypedDict": ".updatecribllakedatasetbylakeidandidop",
13587
+ "UpdateGroupsByIDRequest": ".updategroupsbyidop",
13588
+ "UpdateGroupsByIDRequestTypedDict": ".updategroupsbyidop",
13589
+ "UpdateGroupsByIDResponse": ".updategroupsbyidop",
13590
+ "UpdateGroupsByIDResponseTypedDict": ".updategroupsbyidop",
13563
13591
  "UpdateGroupsDeployByIDRequest": ".updategroupsdeploybyidop",
13564
13592
  "UpdateGroupsDeployByIDRequestTypedDict": ".updategroupsdeploybyidop",
13565
13593
  "UpdateGroupsDeployByIDResponse": ".updategroupsdeploybyidop",
@@ -0,0 +1,37 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import FieldMetadata, PathParamMetadata
7
+ from typing import List, Optional
8
+ from typing_extensions import Annotated, NotRequired, TypedDict
9
+
10
+
11
+ class DeleteGroupsByIDRequestTypedDict(TypedDict):
12
+ id: str
13
+ r"""Group ID"""
14
+
15
+
16
+ class DeleteGroupsByIDRequest(BaseModel):
17
+ id: Annotated[
18
+ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
19
+ ]
20
+ r"""Group ID"""
21
+
22
+
23
+ class DeleteGroupsByIDResponseTypedDict(TypedDict):
24
+ r"""a list of ConfigGroup objects"""
25
+
26
+ count: NotRequired[int]
27
+ r"""number of items present in the items array"""
28
+ items: NotRequired[List[ConfigGroupTypedDict]]
29
+
30
+
31
+ class DeleteGroupsByIDResponse(BaseModel):
32
+ r"""a list of ConfigGroup objects"""
33
+
34
+ count: Optional[int] = None
35
+ r"""number of items present in the items array"""
36
+
37
+ items: Optional[List[ConfigGroup]] = None
@@ -0,0 +1,48 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import FieldMetadata, PathParamMetadata, RequestMetadata
7
+ import pydantic
8
+ from typing import List, Optional
9
+ from typing_extensions import Annotated, NotRequired, TypedDict
10
+
11
+
12
+ class UpdateGroupsByIDRequestTypedDict(TypedDict):
13
+ id_param: str
14
+ r"""Group ID"""
15
+ config_group: ConfigGroupTypedDict
16
+ r"""ConfigGroup object"""
17
+
18
+
19
+ class UpdateGroupsByIDRequest(BaseModel):
20
+ id_param: Annotated[
21
+ str,
22
+ pydantic.Field(alias="id"),
23
+ FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
24
+ ]
25
+ r"""Group ID"""
26
+
27
+ config_group: Annotated[
28
+ ConfigGroup,
29
+ FieldMetadata(request=RequestMetadata(media_type="application/json")),
30
+ ]
31
+ r"""ConfigGroup object"""
32
+
33
+
34
+ class UpdateGroupsByIDResponseTypedDict(TypedDict):
35
+ r"""a list of ConfigGroup objects"""
36
+
37
+ count: NotRequired[int]
38
+ r"""number of items present in the items array"""
39
+ items: NotRequired[List[ConfigGroupTypedDict]]
40
+
41
+
42
+ class UpdateGroupsByIDResponse(BaseModel):
43
+ r"""a list of ConfigGroup objects"""
44
+
45
+ count: Optional[int] = None
46
+ r"""number of items present in the items array"""
47
+
48
+ items: Optional[List[ConfigGroup]] = None
@@ -3,31 +3,16 @@
3
3
  from __future__ import annotations
4
4
  from cribl_control_plane.types import BaseModel
5
5
  from cribl_control_plane.utils import FieldMetadata, QueryParamMetadata
6
- import io
7
- from typing import Any, Dict, IO, List, Optional, Union
6
+ from typing import Any, Dict, List, Optional
8
7
  from typing_extensions import Annotated, NotRequired, TypedDict
9
8
 
10
9
 
11
10
  class UpdatePacksRequestTypedDict(TypedDict):
12
- size: int
13
- r"""Size of the pack file in bytes"""
14
- request_body: Union[bytes, IO[bytes], io.BufferedReader]
15
- r"""file data"""
16
11
  filename: NotRequired[str]
17
12
  r"""the file to upload"""
18
13
 
19
14
 
20
15
  class UpdatePacksRequest(BaseModel):
21
- size: Annotated[
22
- int, FieldMetadata(query=QueryParamMetadata(style="form", explode=True))
23
- ]
24
- r"""Size of the pack file in bytes"""
25
-
26
- request_body: Annotated[
27
- Union[bytes, IO[bytes], io.BufferedReader], FieldMetadata(request=True)
28
- ]
29
- r"""file data"""
30
-
31
16
  filename: Annotated[
32
17
  Optional[str],
33
18
  FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
@@ -6,8 +6,7 @@ from cribl_control_plane._hooks import HookContext
6
6
  from cribl_control_plane.types import OptionalNullable, UNSET
7
7
  from cribl_control_plane.utils import get_security_from_env
8
8
  from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_response
9
- import io
10
- from typing import Any, IO, List, Mapping, Optional, Union
9
+ from typing import Any, List, Mapping, Optional, Union
11
10
 
12
11
 
13
12
  class Packs(BaseSDK):
@@ -452,8 +451,6 @@ class Packs(BaseSDK):
452
451
  def update_packs(
453
452
  self,
454
453
  *,
455
- size: int,
456
- request_body: Union[bytes, IO[bytes], io.BufferedReader],
457
454
  filename: Optional[str] = None,
458
455
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
459
456
  server_url: Optional[str] = None,
@@ -464,8 +461,6 @@ class Packs(BaseSDK):
464
461
 
465
462
  Upload Pack
466
463
 
467
- :param size: Size of the pack file in bytes
468
- :param request_body: file data
469
464
  :param filename: the file to upload
470
465
  :param retries: Override the default retry configuration for this method
471
466
  :param server_url: Override the default server URL for this method
@@ -484,8 +479,6 @@ class Packs(BaseSDK):
484
479
 
485
480
  request = models.UpdatePacksRequest(
486
481
  filename=filename,
487
- size=size,
488
- request_body=request_body,
489
482
  )
490
483
 
491
484
  req = self._build_request(
@@ -494,20 +487,13 @@ class Packs(BaseSDK):
494
487
  base_url=base_url,
495
488
  url_variables=url_variables,
496
489
  request=request,
497
- request_body_required=True,
490
+ request_body_required=False,
498
491
  request_has_path_params=False,
499
492
  request_has_query_params=True,
500
493
  user_agent_header="user-agent",
501
494
  accept_header_value="application/json",
502
495
  http_headers=http_headers,
503
496
  security=self.sdk_configuration.security,
504
- get_serialized_body=lambda: utils.serialize_request_body(
505
- request.request_body,
506
- False,
507
- False,
508
- "raw",
509
- Union[bytes, IO[bytes], io.BufferedReader],
510
- ),
511
497
  timeout_ms=timeout_ms,
512
498
  )
513
499
 
@@ -552,8 +538,6 @@ class Packs(BaseSDK):
552
538
  async def update_packs_async(
553
539
  self,
554
540
  *,
555
- size: int,
556
- request_body: Union[bytes, IO[bytes], io.BufferedReader],
557
541
  filename: Optional[str] = None,
558
542
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
559
543
  server_url: Optional[str] = None,
@@ -564,8 +548,6 @@ class Packs(BaseSDK):
564
548
 
565
549
  Upload Pack
566
550
 
567
- :param size: Size of the pack file in bytes
568
- :param request_body: file data
569
551
  :param filename: the file to upload
570
552
  :param retries: Override the default retry configuration for this method
571
553
  :param server_url: Override the default server URL for this method
@@ -584,8 +566,6 @@ class Packs(BaseSDK):
584
566
 
585
567
  request = models.UpdatePacksRequest(
586
568
  filename=filename,
587
- size=size,
588
- request_body=request_body,
589
569
  )
590
570
 
591
571
  req = self._build_request_async(
@@ -594,20 +574,13 @@ class Packs(BaseSDK):
594
574
  base_url=base_url,
595
575
  url_variables=url_variables,
596
576
  request=request,
597
- request_body_required=True,
577
+ request_body_required=False,
598
578
  request_has_path_params=False,
599
579
  request_has_query_params=True,
600
580
  user_agent_header="user-agent",
601
581
  accept_header_value="application/json",
602
582
  http_headers=http_headers,
603
583
  security=self.sdk_configuration.security,
604
- get_serialized_body=lambda: utils.serialize_request_body(
605
- request.request_body,
606
- False,
607
- False,
608
- "raw",
609
- Union[bytes, IO[bytes], io.BufferedReader],
610
- ),
611
584
  timeout_ms=timeout_ms,
612
585
  )
613
586
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: cribl-control-plane
3
- Version: 0.0.21a1
3
+ Version: 0.0.22
4
4
  Summary: Python Client SDK Generated by Speakeasy.
5
5
  Author: Speakeasy
6
6
  Requires-Python: >=3.9.2
@@ -31,7 +31,6 @@ Cribl API Reference: This API Reference lists available REST endpoints, along wi
31
31
  * [SDK Example Usage](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/#sdk-example-usage)
32
32
  * [Authentication](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/#authentication)
33
33
  * [Available Resources and Operations](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/#available-resources-and-operations)
34
- * [File uploads](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/#file-uploads)
35
34
  * [Retries](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/#retries)
36
35
  * [Error Handling](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/#error-handling)
37
36
  * [Custom HTTP Client](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/#custom-http-client)
@@ -126,7 +125,40 @@ with CriblControlPlane(
126
125
  ),
127
126
  ) as ccp_client:
128
127
 
129
- res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>")
128
+ res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>", accelerated_fields=[
129
+ "<value 1>",
130
+ "<value 2>",
131
+ ], bucket_name="<value>", cache_connection={
132
+ "accelerated_fields": [
133
+ "<value 1>",
134
+ "<value 2>",
135
+ ],
136
+ "backfill_status": models.CacheConnectionBackfillStatus.PENDING,
137
+ "cache_ref": "<value>",
138
+ "created_at": 7795.06,
139
+ "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
140
+ "migration_query_id": "<id>",
141
+ "retention_in_days": 1466.58,
142
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
143
+ "datatypes": [
144
+ "<value 1>",
145
+ ],
146
+ "metadata": {
147
+ "earliest": "<value>",
148
+ "enable_acceleration": True,
149
+ "field_list": [
150
+ "<value 1>",
151
+ "<value 2>",
152
+ ],
153
+ "latest_run_info": {
154
+ "earliest_scanned_time": 4334.7,
155
+ "finished_at": 6811.22,
156
+ "latest_scanned_time": 5303.3,
157
+ "object_count": 9489.04,
158
+ },
159
+ "scan_mode": models.ScanMode.DETAILED,
160
+ },
161
+ }, storage_location_id="<id>", view_name="<value>")
130
162
 
131
163
  # Handle response
132
164
  print(res)
@@ -150,7 +182,40 @@ async def main():
150
182
  ),
151
183
  ) as ccp_client:
152
184
 
153
- res = await ccp_client.lake.create_cribl_lake_dataset_by_lake_id_async(lake_id="<id>", id="<id>")
185
+ res = await ccp_client.lake.create_cribl_lake_dataset_by_lake_id_async(lake_id="<id>", id="<id>", accelerated_fields=[
186
+ "<value 1>",
187
+ "<value 2>",
188
+ ], bucket_name="<value>", cache_connection={
189
+ "accelerated_fields": [
190
+ "<value 1>",
191
+ "<value 2>",
192
+ ],
193
+ "backfill_status": models.CacheConnectionBackfillStatus.PENDING,
194
+ "cache_ref": "<value>",
195
+ "created_at": 7795.06,
196
+ "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
197
+ "migration_query_id": "<id>",
198
+ "retention_in_days": 1466.58,
199
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
200
+ "datatypes": [
201
+ "<value 1>",
202
+ ],
203
+ "metadata": {
204
+ "earliest": "<value>",
205
+ "enable_acceleration": True,
206
+ "field_list": [
207
+ "<value 1>",
208
+ "<value 2>",
209
+ ],
210
+ "latest_run_info": {
211
+ "earliest_scanned_time": 4334.7,
212
+ "finished_at": 6811.22,
213
+ "latest_scanned_time": 5303.3,
214
+ "object_count": 9489.04,
215
+ },
216
+ "scan_mode": models.ScanMode.DETAILED,
217
+ },
218
+ }, storage_location_id="<id>", view_name="<value>")
154
219
 
155
220
  # Handle response
156
221
  print(res)
@@ -184,7 +249,40 @@ with CriblControlPlane(
184
249
  ),
185
250
  ) as ccp_client:
186
251
 
187
- res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>")
252
+ res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>", accelerated_fields=[
253
+ "<value 1>",
254
+ "<value 2>",
255
+ ], bucket_name="<value>", cache_connection={
256
+ "accelerated_fields": [
257
+ "<value 1>",
258
+ "<value 2>",
259
+ ],
260
+ "backfill_status": models.CacheConnectionBackfillStatus.PENDING,
261
+ "cache_ref": "<value>",
262
+ "created_at": 7795.06,
263
+ "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
264
+ "migration_query_id": "<id>",
265
+ "retention_in_days": 1466.58,
266
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
267
+ "datatypes": [
268
+ "<value 1>",
269
+ ],
270
+ "metadata": {
271
+ "earliest": "<value>",
272
+ "enable_acceleration": True,
273
+ "field_list": [
274
+ "<value 1>",
275
+ "<value 2>",
276
+ ],
277
+ "latest_run_info": {
278
+ "earliest_scanned_time": 4334.7,
279
+ "finished_at": 6811.22,
280
+ "latest_scanned_time": 5303.3,
281
+ "object_count": 9489.04,
282
+ },
283
+ "scan_mode": models.ScanMode.DETAILED,
284
+ },
285
+ }, storage_location_id="<id>", view_name="<value>")
188
286
 
189
287
  # Handle response
190
288
  print(res)
@@ -224,8 +322,10 @@ with CriblControlPlane(
224
322
  * [get_groups_config_version_by_id](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#get_groups_config_version_by_id) - Get effective bundle version for given Group
225
323
  * [create_products_groups_by_product](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#create_products_groups_by_product) - Create a Fleet or Worker Group
226
324
  * [get_products_groups_by_product](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#get_products_groups_by_product) - Get a list of ConfigGroup objects
227
- * [update_groups_deploy_by_id](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#update_groups_deploy_by_id) - Deploy commits for a Fleet or Worker Group
325
+ * [delete_groups_by_id](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#delete_groups_by_id) - Delete a Fleet or Worker Group
228
326
  * [get_groups_by_id](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#get_groups_by_id) - Get a specific ConfigGroup object
327
+ * [update_groups_by_id](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#update_groups_by_id) - Update a Fleet or Worker Group
328
+ * [update_groups_deploy_by_id](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#update_groups_deploy_by_id) - Deploy commits for a Fleet or Worker Group
229
329
  * [get_groups_acl_by_id](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/groupssdk/README.md#get_groups_acl_by_id) - ACL of members with permissions for resources in this Group
230
330
 
231
331
  ### [health](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/docs/sdks/health/README.md)
@@ -302,36 +402,6 @@ with CriblControlPlane(
302
402
  </details>
303
403
  <!-- End Available Resources and Operations [operations] -->
304
404
 
305
- <!-- Start File uploads [file-upload] -->
306
- ## File uploads
307
-
308
- Certain SDK methods accept file objects as part of a request body or multi-part request. It is possible and typically recommended to upload files as a stream rather than reading the entire contents into memory. This avoids excessive memory consumption and potentially crashing with out-of-memory errors when working with very large files. The following example demonstrates how to attach a file stream to a request.
309
-
310
- > [!TIP]
311
- >
312
- > For endpoints that handle file uploads bytes arrays can also be used. However, using streams is recommended for large files.
313
- >
314
-
315
- ```python
316
- from cribl_control_plane import CriblControlPlane, models
317
- import os
318
-
319
-
320
- with CriblControlPlane(
321
- server_url="https://api.example.com",
322
- security=models.Security(
323
- bearer_auth=os.getenv("CRIBLCONTROLPLANE_BEARER_AUTH", ""),
324
- ),
325
- ) as ccp_client:
326
-
327
- res = ccp_client.packs.update_packs(size=779474, request_body=open("example.file", "rb"))
328
-
329
- # Handle response
330
- print(res)
331
-
332
- ```
333
- <!-- End File uploads [file-upload] -->
334
-
335
405
  <!-- Start Retries [retries] -->
336
406
  ## Retries
337
407
 
@@ -351,7 +421,40 @@ with CriblControlPlane(
351
421
  ),
352
422
  ) as ccp_client:
353
423
 
354
- res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>",
424
+ res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>", accelerated_fields=[
425
+ "<value 1>",
426
+ "<value 2>",
427
+ ], bucket_name="<value>", cache_connection={
428
+ "accelerated_fields": [
429
+ "<value 1>",
430
+ "<value 2>",
431
+ ],
432
+ "backfill_status": models.CacheConnectionBackfillStatus.PENDING,
433
+ "cache_ref": "<value>",
434
+ "created_at": 7795.06,
435
+ "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
436
+ "migration_query_id": "<id>",
437
+ "retention_in_days": 1466.58,
438
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
439
+ "datatypes": [
440
+ "<value 1>",
441
+ ],
442
+ "metadata": {
443
+ "earliest": "<value>",
444
+ "enable_acceleration": True,
445
+ "field_list": [
446
+ "<value 1>",
447
+ "<value 2>",
448
+ ],
449
+ "latest_run_info": {
450
+ "earliest_scanned_time": 4334.7,
451
+ "finished_at": 6811.22,
452
+ "latest_scanned_time": 5303.3,
453
+ "object_count": 9489.04,
454
+ },
455
+ "scan_mode": models.ScanMode.DETAILED,
456
+ },
457
+ }, storage_location_id="<id>", view_name="<value>",
355
458
  RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False))
356
459
 
357
460
  # Handle response
@@ -374,7 +477,40 @@ with CriblControlPlane(
374
477
  ),
375
478
  ) as ccp_client:
376
479
 
377
- res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>")
480
+ res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>", accelerated_fields=[
481
+ "<value 1>",
482
+ "<value 2>",
483
+ ], bucket_name="<value>", cache_connection={
484
+ "accelerated_fields": [
485
+ "<value 1>",
486
+ "<value 2>",
487
+ ],
488
+ "backfill_status": models.CacheConnectionBackfillStatus.PENDING,
489
+ "cache_ref": "<value>",
490
+ "created_at": 7795.06,
491
+ "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
492
+ "migration_query_id": "<id>",
493
+ "retention_in_days": 1466.58,
494
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
495
+ "datatypes": [
496
+ "<value 1>",
497
+ ],
498
+ "metadata": {
499
+ "earliest": "<value>",
500
+ "enable_acceleration": True,
501
+ "field_list": [
502
+ "<value 1>",
503
+ "<value 2>",
504
+ ],
505
+ "latest_run_info": {
506
+ "earliest_scanned_time": 4334.7,
507
+ "finished_at": 6811.22,
508
+ "latest_scanned_time": 5303.3,
509
+ "object_count": 9489.04,
510
+ },
511
+ "scan_mode": models.ScanMode.DETAILED,
512
+ },
513
+ }, storage_location_id="<id>", view_name="<value>")
378
514
 
379
515
  # Handle response
380
516
  print(res)
@@ -411,7 +547,40 @@ with CriblControlPlane(
411
547
  res = None
412
548
  try:
413
549
 
414
- res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>")
550
+ res = ccp_client.lake.create_cribl_lake_dataset_by_lake_id(lake_id="<id>", id="<id>", accelerated_fields=[
551
+ "<value 1>",
552
+ "<value 2>",
553
+ ], bucket_name="<value>", cache_connection={
554
+ "accelerated_fields": [
555
+ "<value 1>",
556
+ "<value 2>",
557
+ ],
558
+ "backfill_status": models.CacheConnectionBackfillStatus.PENDING,
559
+ "cache_ref": "<value>",
560
+ "created_at": 7795.06,
561
+ "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
562
+ "migration_query_id": "<id>",
563
+ "retention_in_days": 1466.58,
564
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
565
+ "datatypes": [
566
+ "<value 1>",
567
+ ],
568
+ "metadata": {
569
+ "earliest": "<value>",
570
+ "enable_acceleration": True,
571
+ "field_list": [
572
+ "<value 1>",
573
+ "<value 2>",
574
+ ],
575
+ "latest_run_info": {
576
+ "earliest_scanned_time": 4334.7,
577
+ "finished_at": 6811.22,
578
+ "latest_scanned_time": 5303.3,
579
+ "object_count": 9489.04,
580
+ },
581
+ "scan_mode": models.ScanMode.DETAILED,
582
+ },
583
+ }, storage_location_id="<id>", view_name="<value>")
415
584
 
416
585
  # Handle response
417
586
  print(res)
@@ -446,7 +615,7 @@ with CriblControlPlane(
446
615
 
447
616
 
448
617
  **Inherit from [`CriblControlPlaneError`](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/./src/cribl_control_plane/errors/criblcontrolplaneerror.py)**:
449
- * [`HealthStatusError`](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/./src/cribl_control_plane/errors/healthstatuserror.py): Healthy status. Status code `420`. Applicable to 1 of 61 methods.*
618
+ * [`HealthStatusError`](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/./src/cribl_control_plane/errors/healthstatuserror.py): Healthy status. Status code `420`. Applicable to 1 of 63 methods.*
450
619
  * [`ResponseValidationError`](https://github.com/criblio/cribl_control_plane_sdk_python/blob/master/./src/cribl_control_plane/errors/responsevalidationerror.py): Type mismatch between the response data and the expected Pydantic model. Provides access to the Pydantic validation error via the `cause` attribute.
451
620
 
452
621
  </details>