cribl-control-plane 0.1.0b2__py3-none-any.whl → 0.2.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_hooks/clientcredentials.py +91 -41
- cribl_control_plane/_version.py +4 -4
- cribl_control_plane/errors/apierror.py +1 -1
- cribl_control_plane/errors/criblcontrolplaneerror.py +1 -1
- cribl_control_plane/errors/error.py +1 -1
- cribl_control_plane/errors/healthstatus_error.py +1 -1
- cribl_control_plane/errors/no_response_error.py +1 -1
- cribl_control_plane/errors/responsevalidationerror.py +1 -1
- cribl_control_plane/groups_sdk.py +4 -4
- cribl_control_plane/httpclient.py +0 -1
- cribl_control_plane/lakedatasets.py +12 -12
- cribl_control_plane/models/__init__.py +106 -42
- cribl_control_plane/models/appmode.py +14 -0
- cribl_control_plane/models/configgroup.py +2 -17
- cribl_control_plane/models/cribllakedatasetupdate.py +81 -0
- cribl_control_plane/models/gitinfo.py +14 -3
- cribl_control_plane/models/hbcriblinfo.py +3 -14
- cribl_control_plane/models/heartbeatmetadata.py +0 -3
- cribl_control_plane/models/inputconfluentcloud.py +18 -0
- cribl_control_plane/models/inputkafka.py +17 -0
- cribl_control_plane/models/inputmsk.py +17 -0
- cribl_control_plane/models/inputsqs.py +8 -10
- cribl_control_plane/models/nodeprovidedinfo.py +0 -3
- cribl_control_plane/models/output.py +25 -25
- cribl_control_plane/models/outputchronicle.py +431 -0
- cribl_control_plane/models/outputconfluentcloud.py +18 -0
- cribl_control_plane/models/outputgooglechronicle.py +5 -4
- cribl_control_plane/models/outputgooglecloudlogging.py +9 -4
- cribl_control_plane/models/outputkafka.py +17 -0
- cribl_control_plane/models/outputmsk.py +17 -0
- cribl_control_plane/models/outputsqs.py +8 -10
- cribl_control_plane/models/routecloneconf.py +13 -0
- cribl_control_plane/models/routeconf.py +4 -3
- cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +9 -5
- {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/METADATA +1 -8
- {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/RECORD +37 -34
- cribl_control_plane/models/outputdatabricks.py +0 -282
- {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/WHEEL +0 -0
|
@@ -21,6 +21,7 @@ class Credentials:
|
|
|
21
21
|
client_id: str
|
|
22
22
|
client_secret: str
|
|
23
23
|
token_url: str
|
|
24
|
+
scopes: Optional[List[str]]
|
|
24
25
|
additional_properties: Dict[str, str]
|
|
25
26
|
|
|
26
27
|
def __init__(
|
|
@@ -28,25 +29,27 @@ class Credentials:
|
|
|
28
29
|
client_id: str,
|
|
29
30
|
client_secret: str,
|
|
30
31
|
token_url: str,
|
|
32
|
+
scopes: Optional[List[str]],
|
|
31
33
|
additional_properties: Optional[Dict[str, str]] = None,
|
|
32
34
|
):
|
|
33
35
|
self.client_id = client_id
|
|
34
36
|
self.client_secret = client_secret
|
|
35
37
|
self.token_url = token_url
|
|
38
|
+
self.scopes = scopes
|
|
36
39
|
self.additional_properties = additional_properties or {}
|
|
37
40
|
|
|
38
41
|
|
|
39
42
|
class Session:
|
|
40
43
|
credentials: Credentials
|
|
41
44
|
token: str
|
|
42
|
-
scopes:
|
|
45
|
+
scopes: List[str]
|
|
43
46
|
expires_at: Optional[int] = None
|
|
44
47
|
|
|
45
48
|
def __init__(
|
|
46
49
|
self,
|
|
47
50
|
credentials: Credentials,
|
|
48
51
|
token: str,
|
|
49
|
-
scopes:
|
|
52
|
+
scopes: List[str],
|
|
50
53
|
expires_at: Optional[int] = None,
|
|
51
54
|
):
|
|
52
55
|
self.credentials = credentials
|
|
@@ -57,7 +60,7 @@ class Session:
|
|
|
57
60
|
|
|
58
61
|
class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
59
62
|
client: HttpClient
|
|
60
|
-
sessions: Dict[str, Session] = {}
|
|
63
|
+
sessions: Dict[str, Dict[str, Session]] = {}
|
|
61
64
|
|
|
62
65
|
def sdk_init(self, config: SDKConfiguration) -> SDKConfiguration:
|
|
63
66
|
if config.client is None:
|
|
@@ -69,8 +72,7 @@ class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
|
69
72
|
def before_request(
|
|
70
73
|
self, hook_ctx: BeforeRequestContext, request: httpx.Request
|
|
71
74
|
) -> httpx.Request:
|
|
72
|
-
if hook_ctx
|
|
73
|
-
# OAuth2 not in use
|
|
75
|
+
if self.is_hook_disabled(hook_ctx):
|
|
74
76
|
return request
|
|
75
77
|
|
|
76
78
|
credentials = self.get_credentials(hook_ctx)
|
|
@@ -81,22 +83,24 @@ class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
|
81
83
|
credentials.client_id, credentials.client_secret
|
|
82
84
|
)
|
|
83
85
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
):
|
|
91
|
-
sess = self.do_token_request(
|
|
86
|
+
scopes = self.get_required_scopes(credentials, hook_ctx)
|
|
87
|
+
session = self.get_existing_session(session_key, scopes)
|
|
88
|
+
|
|
89
|
+
if session is None:
|
|
90
|
+
# Create new session
|
|
91
|
+
session = self.do_token_request(
|
|
92
92
|
hook_ctx,
|
|
93
93
|
credentials,
|
|
94
|
-
|
|
94
|
+
scopes,
|
|
95
95
|
)
|
|
96
96
|
|
|
97
|
-
self.sessions
|
|
97
|
+
if session_key not in self.sessions:
|
|
98
|
+
self.sessions[session_key] = {}
|
|
99
|
+
|
|
100
|
+
scope_key = self.get_scope_key(scopes)
|
|
101
|
+
self.sessions[session_key][scope_key] = session
|
|
98
102
|
|
|
99
|
-
request.headers["Authorization"] = f"Bearer {
|
|
103
|
+
request.headers["Authorization"] = f"Bearer {session.token}"
|
|
100
104
|
|
|
101
105
|
return request
|
|
102
106
|
|
|
@@ -106,8 +110,7 @@ class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
|
106
110
|
response: Optional[httpx.Response],
|
|
107
111
|
error: Optional[Exception],
|
|
108
112
|
) -> Union[Tuple[Optional[httpx.Response], Optional[Exception]], Exception]:
|
|
109
|
-
if hook_ctx
|
|
110
|
-
# OAuth2 not in use
|
|
113
|
+
if self.is_hook_disabled(hook_ctx):
|
|
111
114
|
return (response, error)
|
|
112
115
|
|
|
113
116
|
# We don't want to refresh the token if the error is not related to the token
|
|
@@ -122,12 +125,15 @@ class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
|
122
125
|
session_key = self.get_session_key(
|
|
123
126
|
credentials.client_id, credentials.client_secret
|
|
124
127
|
)
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
+
scopes = self.get_required_scopes(credentials, hook_ctx)
|
|
129
|
+
scope_key = self.get_scope_key(scopes)
|
|
130
|
+
self.remove_session(session_key, scope_key)
|
|
128
131
|
|
|
129
132
|
return (response, error)
|
|
130
133
|
|
|
134
|
+
def is_hook_disabled(self, hook_ctx: HookContext) -> bool:
|
|
135
|
+
return hook_ctx.oauth2_scopes is None
|
|
136
|
+
|
|
131
137
|
def get_credentials(self, hook_ctx: HookContext) -> Optional[Credentials]:
|
|
132
138
|
source = hook_ctx.security_source
|
|
133
139
|
|
|
@@ -145,21 +151,19 @@ class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
|
145
151
|
# Extract additional properties from security object
|
|
146
152
|
additional_properties = {}
|
|
147
153
|
for key, value in dict(security.client_oauth).items():
|
|
148
|
-
if key not in ["client_id", "client_secret", "token_url"]:
|
|
154
|
+
if key not in ["client_id", "client_secret", "token_url", "scopes"]:
|
|
149
155
|
additional_properties[key] = value
|
|
150
156
|
|
|
151
157
|
return Credentials(
|
|
152
158
|
client_id=security.client_oauth.client_id,
|
|
153
159
|
client_secret=security.client_oauth.client_secret,
|
|
154
160
|
token_url=security.client_oauth.token_url,
|
|
161
|
+
scopes=None,
|
|
155
162
|
additional_properties=additional_properties,
|
|
156
163
|
)
|
|
157
164
|
|
|
158
165
|
def do_token_request(
|
|
159
|
-
self,
|
|
160
|
-
hook_ctx: HookContext,
|
|
161
|
-
credentials: Credentials,
|
|
162
|
-
scopes: Optional[List[str]],
|
|
166
|
+
self, hook_ctx: HookContext, credentials: Credentials, scopes: List[str]
|
|
163
167
|
) -> Session:
|
|
164
168
|
payload = {
|
|
165
169
|
"grant_type": "client_credentials",
|
|
@@ -167,7 +171,7 @@ class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
|
167
171
|
"client_secret": credentials.client_secret,
|
|
168
172
|
}
|
|
169
173
|
|
|
170
|
-
if
|
|
174
|
+
if len(scopes) > 0:
|
|
171
175
|
payload["scope"] = " ".join(scopes)
|
|
172
176
|
|
|
173
177
|
# Add additional properties to payload
|
|
@@ -203,24 +207,70 @@ class ClientCredentialsHook(SDKInitHook, BeforeRequestHook, AfterErrorHook):
|
|
|
203
207
|
)
|
|
204
208
|
|
|
205
209
|
def get_session_key(self, client_id: str, client_secret: str) -> str:
|
|
210
|
+
"""Generate a consistent session key for the given client ID and secret."""
|
|
206
211
|
return hashlib.md5(f"{client_id}:{client_secret}".encode()).hexdigest()
|
|
207
212
|
|
|
213
|
+
def get_required_scopes(
|
|
214
|
+
self, credentials: Credentials, hook_ctx: HookContext
|
|
215
|
+
) -> List[str]:
|
|
216
|
+
"""Return the list of scopes that need to be requested."""
|
|
217
|
+
if credentials.scopes is not None:
|
|
218
|
+
return credentials.scopes
|
|
219
|
+
return hook_ctx.oauth2_scopes or []
|
|
220
|
+
|
|
221
|
+
def get_scope_key(self, scopes: List[str]) -> str:
|
|
222
|
+
"""Generate a consistent scope key for the given scopes."""
|
|
223
|
+
if not scopes:
|
|
224
|
+
return ""
|
|
225
|
+
|
|
226
|
+
sorted_scopes = sorted(scopes)
|
|
227
|
+
return "&".join(sorted_scopes)
|
|
228
|
+
|
|
229
|
+
def remove_session(self, client_key: str, scope_key: str) -> None:
|
|
230
|
+
"""Remove a session and clean up empty client session maps."""
|
|
231
|
+
if client_key in self.sessions and scope_key in self.sessions[client_key]:
|
|
232
|
+
del self.sessions[client_key][scope_key]
|
|
233
|
+
|
|
234
|
+
# Clean up empty client sessions
|
|
235
|
+
if not self.sessions[client_key]:
|
|
236
|
+
del self.sessions[client_key]
|
|
237
|
+
|
|
238
|
+
def get_existing_session(
|
|
239
|
+
self, client_key: str, required_scopes: List[str]
|
|
240
|
+
) -> Optional[Session]:
|
|
241
|
+
"""Find the best session for the required scopes."""
|
|
242
|
+
if client_key not in self.sessions:
|
|
243
|
+
return None
|
|
244
|
+
|
|
245
|
+
client_sessions = self.sessions[client_key]
|
|
246
|
+
scope_key = self.get_scope_key(required_scopes)
|
|
247
|
+
|
|
248
|
+
if scope_key in client_sessions:
|
|
249
|
+
exact_match = client_sessions[scope_key]
|
|
250
|
+
if self.has_token_expired(exact_match.expires_at):
|
|
251
|
+
self.remove_session(client_key, scope_key)
|
|
252
|
+
else:
|
|
253
|
+
return exact_match
|
|
254
|
+
|
|
255
|
+
# If no exact match was found, look for a superset match
|
|
256
|
+
for key, session in client_sessions.items():
|
|
257
|
+
if self.has_token_expired(session.expires_at):
|
|
258
|
+
self.remove_session(client_key, key)
|
|
259
|
+
elif self.has_required_scopes(session.scopes, required_scopes):
|
|
260
|
+
return session
|
|
261
|
+
|
|
262
|
+
return None
|
|
263
|
+
|
|
208
264
|
def has_required_scopes(
|
|
209
|
-
self, scopes:
|
|
265
|
+
self, scopes: List[str], required_scopes: List[str]
|
|
210
266
|
) -> bool:
|
|
211
|
-
if scopes
|
|
212
|
-
return False
|
|
213
|
-
|
|
267
|
+
"""Check if all required scopes are present in the given scopes."""
|
|
214
268
|
return all(scope in scopes for scope in required_scopes)
|
|
215
269
|
|
|
216
|
-
def get_scopes(
|
|
217
|
-
self, required_scopes: List[str], sess: Optional[Session]
|
|
218
|
-
) -> List[str]:
|
|
219
|
-
scopes = required_scopes.copy()
|
|
220
|
-
if sess is not None and sess.scopes is not None:
|
|
221
|
-
scopes.extend(sess.scopes)
|
|
222
|
-
scopes = list(set(scopes))
|
|
223
|
-
return scopes
|
|
224
|
-
|
|
225
270
|
def has_token_expired(self, expires_at: Optional[int]) -> bool:
|
|
226
|
-
|
|
271
|
+
"""
|
|
272
|
+
Check if the token has expired.
|
|
273
|
+
If no expires_in field was returned by the authorization server, the token is considered to never expire.
|
|
274
|
+
A 60-second buffer is applied to refresh tokens before they actually expire.
|
|
275
|
+
"""
|
|
276
|
+
return expires_at is not None and time.time() + 60 >= expires_at
|
cribl_control_plane/_version.py
CHANGED
|
@@ -3,10 +3,10 @@
|
|
|
3
3
|
import importlib.metadata
|
|
4
4
|
|
|
5
5
|
__title__: str = "cribl-control-plane"
|
|
6
|
-
__version__: str = "0.
|
|
7
|
-
__openapi_doc_version__: str = "4.
|
|
8
|
-
__gen_version__: str = "2.
|
|
9
|
-
__user_agent__: str = "speakeasy-sdk/python 0.
|
|
6
|
+
__version__: str = "0.2.0a1"
|
|
7
|
+
__openapi_doc_version__: str = "4.14.1-alpha.1760352614113-66cf8b7e"
|
|
8
|
+
__gen_version__: str = "2.723.11"
|
|
9
|
+
__user_agent__: str = "speakeasy-sdk/python 0.2.0a1 2.723.11 4.14.1-alpha.1760352614113-66cf8b7e cribl-control-plane"
|
|
10
10
|
|
|
11
11
|
try:
|
|
12
12
|
if __package__ is not None:
|
|
@@ -9,7 +9,7 @@ from cribl_control_plane.errors import CriblControlPlaneError
|
|
|
9
9
|
MAX_MESSAGE_LEN = 10_000
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
@dataclass(
|
|
12
|
+
@dataclass(unsafe_hash=True)
|
|
13
13
|
class APIError(CriblControlPlaneError):
|
|
14
14
|
"""The fallback error class if no more specific error class is matched."""
|
|
15
15
|
|
|
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
|
|
7
7
|
from cribl_control_plane.errors import CriblControlPlaneError
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
@dataclass(
|
|
10
|
+
@dataclass(unsafe_hash=True)
|
|
11
11
|
class ResponseValidationError(CriblControlPlaneError):
|
|
12
12
|
"""Error raised when there is a type mismatch between the response data and the expected Pydantic model."""
|
|
13
13
|
|
|
@@ -224,7 +224,7 @@ class GroupsSDK(BaseSDK):
|
|
|
224
224
|
config_version: Optional[str] = None,
|
|
225
225
|
deploying_worker_count: Optional[float] = None,
|
|
226
226
|
description: Optional[str] = None,
|
|
227
|
-
estimated_ingest_rate: Optional[
|
|
227
|
+
estimated_ingest_rate: Optional[float] = None,
|
|
228
228
|
git: Optional[Union[models.Git, models.GitTypedDict]] = None,
|
|
229
229
|
incompatible_worker_count: Optional[float] = None,
|
|
230
230
|
inherits: Optional[str] = None,
|
|
@@ -395,7 +395,7 @@ class GroupsSDK(BaseSDK):
|
|
|
395
395
|
config_version: Optional[str] = None,
|
|
396
396
|
deploying_worker_count: Optional[float] = None,
|
|
397
397
|
description: Optional[str] = None,
|
|
398
|
-
estimated_ingest_rate: Optional[
|
|
398
|
+
estimated_ingest_rate: Optional[float] = None,
|
|
399
399
|
git: Optional[Union[models.Git, models.GitTypedDict]] = None,
|
|
400
400
|
incompatible_worker_count: Optional[float] = None,
|
|
401
401
|
inherits: Optional[str] = None,
|
|
@@ -757,7 +757,7 @@ class GroupsSDK(BaseSDK):
|
|
|
757
757
|
config_version: Optional[str] = None,
|
|
758
758
|
deploying_worker_count: Optional[float] = None,
|
|
759
759
|
description: Optional[str] = None,
|
|
760
|
-
estimated_ingest_rate: Optional[
|
|
760
|
+
estimated_ingest_rate: Optional[float] = None,
|
|
761
761
|
git: Optional[Union[models.Git, models.GitTypedDict]] = None,
|
|
762
762
|
incompatible_worker_count: Optional[float] = None,
|
|
763
763
|
inherits: Optional[str] = None,
|
|
@@ -931,7 +931,7 @@ class GroupsSDK(BaseSDK):
|
|
|
931
931
|
config_version: Optional[str] = None,
|
|
932
932
|
deploying_worker_count: Optional[float] = None,
|
|
933
933
|
description: Optional[str] = None,
|
|
934
|
-
estimated_ingest_rate: Optional[
|
|
934
|
+
estimated_ingest_rate: Optional[float] = None,
|
|
935
935
|
git: Optional[Union[models.Git, models.GitTypedDict]] = None,
|
|
936
936
|
incompatible_worker_count: Optional[float] = None,
|
|
937
937
|
inherits: Optional[str] = None,
|
|
@@ -849,7 +849,6 @@ class LakeDatasets(BaseSDK):
|
|
|
849
849
|
*,
|
|
850
850
|
lake_id: str,
|
|
851
851
|
id_param: str,
|
|
852
|
-
id: str,
|
|
853
852
|
accelerated_fields: Optional[List[str]] = None,
|
|
854
853
|
bucket_name: Optional[str] = None,
|
|
855
854
|
cache_connection: Optional[
|
|
@@ -857,8 +856,9 @@ class LakeDatasets(BaseSDK):
|
|
|
857
856
|
] = None,
|
|
858
857
|
deletion_started_at: Optional[float] = None,
|
|
859
858
|
description: Optional[str] = None,
|
|
860
|
-
format_: Optional[models.
|
|
859
|
+
format_: Optional[models.CriblLakeDatasetUpdateFormat] = None,
|
|
861
860
|
http_da_used: Optional[bool] = None,
|
|
861
|
+
id: Optional[str] = None,
|
|
862
862
|
retention_period_in_days: Optional[float] = None,
|
|
863
863
|
search_config: Optional[
|
|
864
864
|
Union[
|
|
@@ -878,7 +878,6 @@ class LakeDatasets(BaseSDK):
|
|
|
878
878
|
|
|
879
879
|
:param lake_id: The <code>id</code> of the Lake that contains the Lake Dataset to update.
|
|
880
880
|
:param id_param: The <code>id</code> of the Lake Dataset to update.
|
|
881
|
-
:param id:
|
|
882
881
|
:param accelerated_fields:
|
|
883
882
|
:param bucket_name:
|
|
884
883
|
:param cache_connection:
|
|
@@ -886,6 +885,7 @@ class LakeDatasets(BaseSDK):
|
|
|
886
885
|
:param description:
|
|
887
886
|
:param format_:
|
|
888
887
|
:param http_da_used:
|
|
888
|
+
:param id:
|
|
889
889
|
:param retention_period_in_days:
|
|
890
890
|
:param search_config:
|
|
891
891
|
:param storage_location_id:
|
|
@@ -908,7 +908,7 @@ class LakeDatasets(BaseSDK):
|
|
|
908
908
|
request = models.UpdateCriblLakeDatasetByLakeIDAndIDRequest(
|
|
909
909
|
lake_id=lake_id,
|
|
910
910
|
id_param=id_param,
|
|
911
|
-
|
|
911
|
+
cribl_lake_dataset_update=models.CriblLakeDatasetUpdate(
|
|
912
912
|
accelerated_fields=accelerated_fields,
|
|
913
913
|
bucket_name=bucket_name,
|
|
914
914
|
cache_connection=utils.get_pydantic_model(
|
|
@@ -942,11 +942,11 @@ class LakeDatasets(BaseSDK):
|
|
|
942
942
|
http_headers=http_headers,
|
|
943
943
|
security=self.sdk_configuration.security,
|
|
944
944
|
get_serialized_body=lambda: utils.serialize_request_body(
|
|
945
|
-
request.
|
|
945
|
+
request.cribl_lake_dataset_update,
|
|
946
946
|
False,
|
|
947
947
|
False,
|
|
948
948
|
"json",
|
|
949
|
-
models.
|
|
949
|
+
models.CriblLakeDatasetUpdate,
|
|
950
950
|
),
|
|
951
951
|
timeout_ms=timeout_ms,
|
|
952
952
|
)
|
|
@@ -996,7 +996,6 @@ class LakeDatasets(BaseSDK):
|
|
|
996
996
|
*,
|
|
997
997
|
lake_id: str,
|
|
998
998
|
id_param: str,
|
|
999
|
-
id: str,
|
|
1000
999
|
accelerated_fields: Optional[List[str]] = None,
|
|
1001
1000
|
bucket_name: Optional[str] = None,
|
|
1002
1001
|
cache_connection: Optional[
|
|
@@ -1004,8 +1003,9 @@ class LakeDatasets(BaseSDK):
|
|
|
1004
1003
|
] = None,
|
|
1005
1004
|
deletion_started_at: Optional[float] = None,
|
|
1006
1005
|
description: Optional[str] = None,
|
|
1007
|
-
format_: Optional[models.
|
|
1006
|
+
format_: Optional[models.CriblLakeDatasetUpdateFormat] = None,
|
|
1008
1007
|
http_da_used: Optional[bool] = None,
|
|
1008
|
+
id: Optional[str] = None,
|
|
1009
1009
|
retention_period_in_days: Optional[float] = None,
|
|
1010
1010
|
search_config: Optional[
|
|
1011
1011
|
Union[
|
|
@@ -1025,7 +1025,6 @@ class LakeDatasets(BaseSDK):
|
|
|
1025
1025
|
|
|
1026
1026
|
:param lake_id: The <code>id</code> of the Lake that contains the Lake Dataset to update.
|
|
1027
1027
|
:param id_param: The <code>id</code> of the Lake Dataset to update.
|
|
1028
|
-
:param id:
|
|
1029
1028
|
:param accelerated_fields:
|
|
1030
1029
|
:param bucket_name:
|
|
1031
1030
|
:param cache_connection:
|
|
@@ -1033,6 +1032,7 @@ class LakeDatasets(BaseSDK):
|
|
|
1033
1032
|
:param description:
|
|
1034
1033
|
:param format_:
|
|
1035
1034
|
:param http_da_used:
|
|
1035
|
+
:param id:
|
|
1036
1036
|
:param retention_period_in_days:
|
|
1037
1037
|
:param search_config:
|
|
1038
1038
|
:param storage_location_id:
|
|
@@ -1055,7 +1055,7 @@ class LakeDatasets(BaseSDK):
|
|
|
1055
1055
|
request = models.UpdateCriblLakeDatasetByLakeIDAndIDRequest(
|
|
1056
1056
|
lake_id=lake_id,
|
|
1057
1057
|
id_param=id_param,
|
|
1058
|
-
|
|
1058
|
+
cribl_lake_dataset_update=models.CriblLakeDatasetUpdate(
|
|
1059
1059
|
accelerated_fields=accelerated_fields,
|
|
1060
1060
|
bucket_name=bucket_name,
|
|
1061
1061
|
cache_connection=utils.get_pydantic_model(
|
|
@@ -1089,11 +1089,11 @@ class LakeDatasets(BaseSDK):
|
|
|
1089
1089
|
http_headers=http_headers,
|
|
1090
1090
|
security=self.sdk_configuration.security,
|
|
1091
1091
|
get_serialized_body=lambda: utils.serialize_request_body(
|
|
1092
|
-
request.
|
|
1092
|
+
request.cribl_lake_dataset_update,
|
|
1093
1093
|
False,
|
|
1094
1094
|
False,
|
|
1095
1095
|
"json",
|
|
1096
|
-
models.
|
|
1096
|
+
models.CriblLakeDatasetUpdate,
|
|
1097
1097
|
),
|
|
1098
1098
|
timeout_ms=timeout_ms,
|
|
1099
1099
|
)
|