cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +4 -4
- cribl_control_plane/errors/__init__.py +5 -8
- cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
- cribl_control_plane/groups_sdk.py +28 -52
- cribl_control_plane/health.py +16 -22
- cribl_control_plane/models/__init__.py +54 -217
- cribl_control_plane/models/appmode.py +14 -0
- cribl_control_plane/models/authtoken.py +1 -5
- cribl_control_plane/models/cacheconnection.py +0 -20
- cribl_control_plane/models/configgroup.py +7 -55
- cribl_control_plane/models/configgroupcloud.py +1 -11
- cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
- cribl_control_plane/models/createroutesappendbyidop.py +2 -2
- cribl_control_plane/models/createversionundoop.py +3 -3
- cribl_control_plane/models/cribllakedataset.py +1 -11
- cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
- cribl_control_plane/models/datasetmetadata.py +1 -11
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
- cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
- cribl_control_plane/models/distributedsummary.py +0 -6
- cribl_control_plane/models/error.py +16 -0
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
- cribl_control_plane/models/gethealthinfoop.py +17 -0
- cribl_control_plane/models/getsummaryop.py +0 -11
- cribl_control_plane/models/hbcriblinfo.py +3 -24
- cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
- cribl_control_plane/models/heartbeatmetadata.py +0 -3
- cribl_control_plane/models/input.py +78 -80
- cribl_control_plane/models/inputappscope.py +17 -80
- cribl_control_plane/models/inputazureblob.py +1 -33
- cribl_control_plane/models/inputcollection.py +1 -24
- cribl_control_plane/models/inputconfluentcloud.py +18 -195
- cribl_control_plane/models/inputcribl.py +1 -24
- cribl_control_plane/models/inputcriblhttp.py +17 -62
- cribl_control_plane/models/inputcribllakehttp.py +17 -62
- cribl_control_plane/models/inputcriblmetrics.py +1 -24
- cribl_control_plane/models/inputcribltcp.py +17 -62
- cribl_control_plane/models/inputcrowdstrike.py +1 -54
- cribl_control_plane/models/inputdatadogagent.py +17 -62
- cribl_control_plane/models/inputdatagen.py +1 -24
- cribl_control_plane/models/inputedgeprometheus.py +34 -147
- cribl_control_plane/models/inputelastic.py +27 -119
- cribl_control_plane/models/inputeventhub.py +1 -182
- cribl_control_plane/models/inputexec.py +1 -33
- cribl_control_plane/models/inputfile.py +3 -42
- cribl_control_plane/models/inputfirehose.py +17 -62
- cribl_control_plane/models/inputgooglepubsub.py +1 -36
- cribl_control_plane/models/inputgrafana.py +32 -157
- cribl_control_plane/models/inputhttp.py +17 -62
- cribl_control_plane/models/inputhttpraw.py +17 -62
- cribl_control_plane/models/inputjournalfiles.py +1 -24
- cribl_control_plane/models/inputkafka.py +17 -189
- cribl_control_plane/models/inputkinesis.py +1 -80
- cribl_control_plane/models/inputkubeevents.py +1 -24
- cribl_control_plane/models/inputkubelogs.py +1 -33
- cribl_control_plane/models/inputkubemetrics.py +1 -33
- cribl_control_plane/models/inputloki.py +17 -71
- cribl_control_plane/models/inputmetrics.py +17 -62
- cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
- cribl_control_plane/models/inputmsk.py +18 -81
- cribl_control_plane/models/inputnetflow.py +1 -24
- cribl_control_plane/models/inputoffice365mgmt.py +1 -67
- cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
- cribl_control_plane/models/inputoffice365service.py +1 -67
- cribl_control_plane/models/inputopentelemetry.py +16 -92
- cribl_control_plane/models/inputprometheus.py +34 -138
- cribl_control_plane/models/inputprometheusrw.py +17 -71
- cribl_control_plane/models/inputrawudp.py +1 -24
- cribl_control_plane/models/inputs3.py +1 -45
- cribl_control_plane/models/inputs3inventory.py +1 -54
- cribl_control_plane/models/inputsecuritylake.py +1 -54
- cribl_control_plane/models/inputsnmp.py +1 -40
- cribl_control_plane/models/inputsplunk.py +17 -85
- cribl_control_plane/models/inputsplunkhec.py +16 -70
- cribl_control_plane/models/inputsplunksearch.py +1 -63
- cribl_control_plane/models/inputsqs.py +1 -56
- cribl_control_plane/models/inputsyslog.py +32 -121
- cribl_control_plane/models/inputsystemmetrics.py +9 -142
- cribl_control_plane/models/inputsystemstate.py +1 -33
- cribl_control_plane/models/inputtcp.py +17 -81
- cribl_control_plane/models/inputtcpjson.py +17 -71
- cribl_control_plane/models/inputwef.py +1 -71
- cribl_control_plane/models/inputwindowsmetrics.py +9 -129
- cribl_control_plane/models/inputwineventlogs.py +1 -60
- cribl_control_plane/models/inputwiz.py +1 -45
- cribl_control_plane/models/inputwizwebhook.py +17 -62
- cribl_control_plane/models/inputzscalerhec.py +16 -70
- cribl_control_plane/models/jobinfo.py +1 -4
- cribl_control_plane/models/jobstatus.py +3 -34
- cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
- cribl_control_plane/models/logininfo.py +3 -3
- cribl_control_plane/models/masterworkerentry.py +1 -11
- cribl_control_plane/models/nodeprovidedinfo.py +1 -11
- cribl_control_plane/models/nodeupgradestatus.py +0 -38
- cribl_control_plane/models/output.py +88 -93
- cribl_control_plane/models/outputazureblob.py +1 -110
- cribl_control_plane/models/outputazuredataexplorer.py +87 -452
- cribl_control_plane/models/outputazureeventhub.py +19 -281
- cribl_control_plane/models/outputazurelogs.py +19 -115
- cribl_control_plane/models/outputchronicle.py +19 -115
- cribl_control_plane/models/outputclickhouse.py +19 -155
- cribl_control_plane/models/outputcloudwatch.py +19 -106
- cribl_control_plane/models/outputconfluentcloud.py +38 -311
- cribl_control_plane/models/outputcriblhttp.py +19 -135
- cribl_control_plane/models/outputcribllake.py +1 -97
- cribl_control_plane/models/outputcribltcp.py +19 -132
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
- cribl_control_plane/models/outputdatadog.py +19 -159
- cribl_control_plane/models/outputdataset.py +19 -143
- cribl_control_plane/models/outputdiskspool.py +1 -11
- cribl_control_plane/models/outputdls3.py +1 -152
- cribl_control_plane/models/outputdynatracehttp.py +19 -160
- cribl_control_plane/models/outputdynatraceotlp.py +19 -160
- cribl_control_plane/models/outputelastic.py +19 -163
- cribl_control_plane/models/outputelasticcloud.py +19 -140
- cribl_control_plane/models/outputexabeam.py +1 -61
- cribl_control_plane/models/outputfilesystem.py +1 -87
- cribl_control_plane/models/outputgooglechronicle.py +20 -166
- cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
- cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
- cribl_control_plane/models/outputgooglepubsub.py +19 -106
- cribl_control_plane/models/outputgrafanacloud.py +37 -288
- cribl_control_plane/models/outputgraphite.py +19 -105
- cribl_control_plane/models/outputhoneycomb.py +19 -115
- cribl_control_plane/models/outputhumiohec.py +19 -126
- cribl_control_plane/models/outputinfluxdb.py +19 -130
- cribl_control_plane/models/outputkafka.py +34 -302
- cribl_control_plane/models/outputkinesis.py +19 -133
- cribl_control_plane/models/outputloki.py +17 -129
- cribl_control_plane/models/outputminio.py +1 -145
- cribl_control_plane/models/outputmsk.py +34 -193
- cribl_control_plane/models/outputnewrelic.py +19 -136
- cribl_control_plane/models/outputnewrelicevents.py +20 -128
- cribl_control_plane/models/outputopentelemetry.py +19 -178
- cribl_control_plane/models/outputprometheus.py +19 -115
- cribl_control_plane/models/outputring.py +1 -31
- cribl_control_plane/models/outputs3.py +1 -152
- cribl_control_plane/models/outputsecuritylake.py +1 -114
- cribl_control_plane/models/outputsentinel.py +19 -135
- cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
- cribl_control_plane/models/outputservicenow.py +19 -168
- cribl_control_plane/models/outputsignalfx.py +19 -115
- cribl_control_plane/models/outputsns.py +17 -113
- cribl_control_plane/models/outputsplunk.py +19 -153
- cribl_control_plane/models/outputsplunkhec.py +19 -208
- cribl_control_plane/models/outputsplunklb.py +19 -182
- cribl_control_plane/models/outputsqs.py +17 -124
- cribl_control_plane/models/outputstatsd.py +19 -105
- cribl_control_plane/models/outputstatsdext.py +19 -105
- cribl_control_plane/models/outputsumologic.py +19 -117
- cribl_control_plane/models/outputsyslog.py +96 -259
- cribl_control_plane/models/outputtcpjson.py +19 -141
- cribl_control_plane/models/outputwavefront.py +19 -115
- cribl_control_plane/models/outputwebhook.py +19 -161
- cribl_control_plane/models/outputxsiam.py +17 -113
- cribl_control_plane/models/packinfo.py +5 -8
- cribl_control_plane/models/packinstallinfo.py +5 -8
- cribl_control_plane/models/resourcepolicy.py +0 -11
- cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
- cribl_control_plane/models/routeconf.py +4 -3
- cribl_control_plane/models/runnablejobcollection.py +9 -72
- cribl_control_plane/models/runnablejobexecutor.py +9 -32
- cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
- cribl_control_plane/packs.py +7 -202
- cribl_control_plane/routes_sdk.py +6 -6
- cribl_control_plane/tokens.py +15 -23
- {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
- cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
- cribl_control_plane/models/groupcreaterequest.py +0 -171
- cribl_control_plane/models/outpostnodeinfo.py +0 -16
- cribl_control_plane/models/outputdatabricks.py +0 -482
- cribl_control_plane/models/updatepacksop.py +0 -25
- cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
- {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
5
|
from cribl_control_plane.types import BaseModel
|
|
6
6
|
from cribl_control_plane.utils import validate_open_enum
|
|
7
7
|
from enum import Enum
|
|
8
8
|
import pydantic
|
|
9
|
-
from pydantic import field_serializer
|
|
10
9
|
from pydantic.functional_validators import PlainValidator
|
|
11
10
|
from typing import List, Optional
|
|
12
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
@@ -30,18 +29,14 @@ class InputConfluentCloudConnection(BaseModel):
|
|
|
30
29
|
class InputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
31
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
32
31
|
|
|
33
|
-
# Smart
|
|
34
32
|
SMART = "smart"
|
|
35
|
-
# Always On
|
|
36
33
|
ALWAYS = "always"
|
|
37
34
|
|
|
38
35
|
|
|
39
36
|
class InputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
40
37
|
r"""Codec to use to compress the persisted data"""
|
|
41
38
|
|
|
42
|
-
# None
|
|
43
39
|
NONE = "none"
|
|
44
|
-
# Gzip
|
|
45
40
|
GZIP = "gzip"
|
|
46
41
|
|
|
47
42
|
|
|
@@ -108,24 +103,6 @@ class InputConfluentCloudPq(BaseModel):
|
|
|
108
103
|
Optional[InputConfluentCloudPqControls], pydantic.Field(alias="pqControls")
|
|
109
104
|
] = None
|
|
110
105
|
|
|
111
|
-
@field_serializer("mode")
|
|
112
|
-
def serialize_mode(self, value):
|
|
113
|
-
if isinstance(value, str):
|
|
114
|
-
try:
|
|
115
|
-
return models.InputConfluentCloudMode(value)
|
|
116
|
-
except ValueError:
|
|
117
|
-
return value
|
|
118
|
-
return value
|
|
119
|
-
|
|
120
|
-
@field_serializer("compress")
|
|
121
|
-
def serialize_compress(self, value):
|
|
122
|
-
if isinstance(value, str):
|
|
123
|
-
try:
|
|
124
|
-
return models.InputConfluentCloudCompression(value)
|
|
125
|
-
except ValueError:
|
|
126
|
-
return value
|
|
127
|
-
return value
|
|
128
|
-
|
|
129
106
|
|
|
130
107
|
class InputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
131
108
|
TL_SV1 = "TLSv1"
|
|
@@ -209,23 +186,12 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
|
|
|
209
186
|
pydantic.Field(alias="maxVersion"),
|
|
210
187
|
] = None
|
|
211
188
|
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
return value
|
|
219
|
-
return value
|
|
220
|
-
|
|
221
|
-
@field_serializer("max_version")
|
|
222
|
-
def serialize_max_version(self, value):
|
|
223
|
-
if isinstance(value, str):
|
|
224
|
-
try:
|
|
225
|
-
return models.InputConfluentCloudMaximumTLSVersion(value)
|
|
226
|
-
except ValueError:
|
|
227
|
-
return value
|
|
228
|
-
return value
|
|
189
|
+
|
|
190
|
+
class InputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
191
|
+
r"""The schema format used to encode and decode event data"""
|
|
192
|
+
|
|
193
|
+
AVRO = "avro"
|
|
194
|
+
JSON = "json"
|
|
229
195
|
|
|
230
196
|
|
|
231
197
|
class InputConfluentCloudAuthTypedDict(TypedDict):
|
|
@@ -333,33 +299,13 @@ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
|
|
|
333
299
|
pydantic.Field(alias="maxVersion"),
|
|
334
300
|
] = None
|
|
335
301
|
|
|
336
|
-
@field_serializer("min_version")
|
|
337
|
-
def serialize_min_version(self, value):
|
|
338
|
-
if isinstance(value, str):
|
|
339
|
-
try:
|
|
340
|
-
return models.InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
|
|
341
|
-
value
|
|
342
|
-
)
|
|
343
|
-
except ValueError:
|
|
344
|
-
return value
|
|
345
|
-
return value
|
|
346
|
-
|
|
347
|
-
@field_serializer("max_version")
|
|
348
|
-
def serialize_max_version(self, value):
|
|
349
|
-
if isinstance(value, str):
|
|
350
|
-
try:
|
|
351
|
-
return models.InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
|
|
352
|
-
value
|
|
353
|
-
)
|
|
354
|
-
except ValueError:
|
|
355
|
-
return value
|
|
356
|
-
return value
|
|
357
|
-
|
|
358
302
|
|
|
359
303
|
class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
360
304
|
disabled: NotRequired[bool]
|
|
361
305
|
schema_registry_url: NotRequired[str]
|
|
362
306
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
307
|
+
schema_type: NotRequired[InputConfluentCloudSchemaType]
|
|
308
|
+
r"""The schema format used to encode and decode event data"""
|
|
363
309
|
connection_timeout: NotRequired[float]
|
|
364
310
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
365
311
|
request_timeout: NotRequired[float]
|
|
@@ -381,6 +327,15 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
381
327
|
] = "http://localhost:8081"
|
|
382
328
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
383
329
|
|
|
330
|
+
schema_type: Annotated[
|
|
331
|
+
Annotated[
|
|
332
|
+
Optional[InputConfluentCloudSchemaType],
|
|
333
|
+
PlainValidator(validate_open_enum(False)),
|
|
334
|
+
],
|
|
335
|
+
pydantic.Field(alias="schemaType"),
|
|
336
|
+
] = InputConfluentCloudSchemaType.AVRO
|
|
337
|
+
r"""The schema format used to encode and decode event data"""
|
|
338
|
+
|
|
384
339
|
connection_timeout: Annotated[
|
|
385
340
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
386
341
|
] = 30000
|
|
@@ -400,76 +355,20 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
400
355
|
tls: Optional[InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
|
|
401
356
|
|
|
402
357
|
|
|
403
|
-
class InputConfluentCloudAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
404
|
-
r"""Enter credentials directly, or select a stored secret"""
|
|
405
|
-
|
|
406
|
-
MANUAL = "manual"
|
|
407
|
-
SECRET = "secret"
|
|
408
|
-
|
|
409
|
-
|
|
410
358
|
class InputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
411
|
-
# PLAIN
|
|
412
359
|
PLAIN = "plain"
|
|
413
|
-
# SCRAM-SHA-256
|
|
414
360
|
SCRAM_SHA_256 = "scram-sha-256"
|
|
415
|
-
# SCRAM-SHA-512
|
|
416
361
|
SCRAM_SHA_512 = "scram-sha-512"
|
|
417
|
-
# GSSAPI/Kerberos
|
|
418
362
|
KERBEROS = "kerberos"
|
|
419
363
|
|
|
420
364
|
|
|
421
|
-
class InputConfluentCloudOauthParamTypedDict(TypedDict):
|
|
422
|
-
name: str
|
|
423
|
-
value: str
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
class InputConfluentCloudOauthParam(BaseModel):
|
|
427
|
-
name: str
|
|
428
|
-
|
|
429
|
-
value: str
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
class InputConfluentCloudSaslExtensionTypedDict(TypedDict):
|
|
433
|
-
name: str
|
|
434
|
-
value: str
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
class InputConfluentCloudSaslExtension(BaseModel):
|
|
438
|
-
name: str
|
|
439
|
-
|
|
440
|
-
value: str
|
|
441
|
-
|
|
442
|
-
|
|
443
365
|
class InputConfluentCloudAuthenticationTypedDict(TypedDict):
|
|
444
366
|
r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
|
|
445
367
|
|
|
446
368
|
disabled: NotRequired[bool]
|
|
447
|
-
username: NotRequired[str]
|
|
448
|
-
password: NotRequired[str]
|
|
449
|
-
auth_type: NotRequired[InputConfluentCloudAuthenticationMethod]
|
|
450
|
-
r"""Enter credentials directly, or select a stored secret"""
|
|
451
|
-
credentials_secret: NotRequired[str]
|
|
452
|
-
r"""Select or create a secret that references your credentials"""
|
|
453
369
|
mechanism: NotRequired[InputConfluentCloudSASLMechanism]
|
|
454
|
-
keytab_location: NotRequired[str]
|
|
455
|
-
r"""Location of keytab file for authentication principal"""
|
|
456
|
-
principal: NotRequired[str]
|
|
457
|
-
r"""Authentication principal, such as `kafka_user@example.com`"""
|
|
458
|
-
broker_service_class: NotRequired[str]
|
|
459
|
-
r"""Kerberos service class for Kafka brokers, such as `kafka`"""
|
|
460
370
|
oauth_enabled: NotRequired[bool]
|
|
461
371
|
r"""Enable OAuth authentication"""
|
|
462
|
-
token_url: NotRequired[str]
|
|
463
|
-
r"""URL of the token endpoint to use for OAuth authentication"""
|
|
464
|
-
client_id: NotRequired[str]
|
|
465
|
-
r"""Client ID to use for OAuth authentication"""
|
|
466
|
-
oauth_secret_type: NotRequired[str]
|
|
467
|
-
client_text_secret: NotRequired[str]
|
|
468
|
-
r"""Select or create a stored text secret"""
|
|
469
|
-
oauth_params: NotRequired[List[InputConfluentCloudOauthParamTypedDict]]
|
|
470
|
-
r"""Additional fields to send to the token endpoint, such as scope or audience"""
|
|
471
|
-
sasl_extensions: NotRequired[List[InputConfluentCloudSaslExtensionTypedDict]]
|
|
472
|
-
r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
|
|
473
372
|
|
|
474
373
|
|
|
475
374
|
class InputConfluentCloudAuthentication(BaseModel):
|
|
@@ -477,92 +376,16 @@ class InputConfluentCloudAuthentication(BaseModel):
|
|
|
477
376
|
|
|
478
377
|
disabled: Optional[bool] = True
|
|
479
378
|
|
|
480
|
-
username: Optional[str] = None
|
|
481
|
-
|
|
482
|
-
password: Optional[str] = None
|
|
483
|
-
|
|
484
|
-
auth_type: Annotated[
|
|
485
|
-
Annotated[
|
|
486
|
-
Optional[InputConfluentCloudAuthenticationMethod],
|
|
487
|
-
PlainValidator(validate_open_enum(False)),
|
|
488
|
-
],
|
|
489
|
-
pydantic.Field(alias="authType"),
|
|
490
|
-
] = InputConfluentCloudAuthenticationMethod.MANUAL
|
|
491
|
-
r"""Enter credentials directly, or select a stored secret"""
|
|
492
|
-
|
|
493
|
-
credentials_secret: Annotated[
|
|
494
|
-
Optional[str], pydantic.Field(alias="credentialsSecret")
|
|
495
|
-
] = None
|
|
496
|
-
r"""Select or create a secret that references your credentials"""
|
|
497
|
-
|
|
498
379
|
mechanism: Annotated[
|
|
499
380
|
Optional[InputConfluentCloudSASLMechanism],
|
|
500
381
|
PlainValidator(validate_open_enum(False)),
|
|
501
382
|
] = InputConfluentCloudSASLMechanism.PLAIN
|
|
502
383
|
|
|
503
|
-
keytab_location: Annotated[
|
|
504
|
-
Optional[str], pydantic.Field(alias="keytabLocation")
|
|
505
|
-
] = None
|
|
506
|
-
r"""Location of keytab file for authentication principal"""
|
|
507
|
-
|
|
508
|
-
principal: Optional[str] = None
|
|
509
|
-
r"""Authentication principal, such as `kafka_user@example.com`"""
|
|
510
|
-
|
|
511
|
-
broker_service_class: Annotated[
|
|
512
|
-
Optional[str], pydantic.Field(alias="brokerServiceClass")
|
|
513
|
-
] = None
|
|
514
|
-
r"""Kerberos service class for Kafka brokers, such as `kafka`"""
|
|
515
|
-
|
|
516
384
|
oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
|
|
517
385
|
False
|
|
518
386
|
)
|
|
519
387
|
r"""Enable OAuth authentication"""
|
|
520
388
|
|
|
521
|
-
token_url: Annotated[Optional[str], pydantic.Field(alias="tokenUrl")] = None
|
|
522
|
-
r"""URL of the token endpoint to use for OAuth authentication"""
|
|
523
|
-
|
|
524
|
-
client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
|
|
525
|
-
r"""Client ID to use for OAuth authentication"""
|
|
526
|
-
|
|
527
|
-
oauth_secret_type: Annotated[
|
|
528
|
-
Optional[str], pydantic.Field(alias="oauthSecretType")
|
|
529
|
-
] = "secret"
|
|
530
|
-
|
|
531
|
-
client_text_secret: Annotated[
|
|
532
|
-
Optional[str], pydantic.Field(alias="clientTextSecret")
|
|
533
|
-
] = None
|
|
534
|
-
r"""Select or create a stored text secret"""
|
|
535
|
-
|
|
536
|
-
oauth_params: Annotated[
|
|
537
|
-
Optional[List[InputConfluentCloudOauthParam]],
|
|
538
|
-
pydantic.Field(alias="oauthParams"),
|
|
539
|
-
] = None
|
|
540
|
-
r"""Additional fields to send to the token endpoint, such as scope or audience"""
|
|
541
|
-
|
|
542
|
-
sasl_extensions: Annotated[
|
|
543
|
-
Optional[List[InputConfluentCloudSaslExtension]],
|
|
544
|
-
pydantic.Field(alias="saslExtensions"),
|
|
545
|
-
] = None
|
|
546
|
-
r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
|
|
547
|
-
|
|
548
|
-
@field_serializer("auth_type")
|
|
549
|
-
def serialize_auth_type(self, value):
|
|
550
|
-
if isinstance(value, str):
|
|
551
|
-
try:
|
|
552
|
-
return models.InputConfluentCloudAuthenticationMethod(value)
|
|
553
|
-
except ValueError:
|
|
554
|
-
return value
|
|
555
|
-
return value
|
|
556
|
-
|
|
557
|
-
@field_serializer("mechanism")
|
|
558
|
-
def serialize_mechanism(self, value):
|
|
559
|
-
if isinstance(value, str):
|
|
560
|
-
try:
|
|
561
|
-
return models.InputConfluentCloudSASLMechanism(value)
|
|
562
|
-
except ValueError:
|
|
563
|
-
return value
|
|
564
|
-
return value
|
|
565
|
-
|
|
566
389
|
|
|
567
390
|
class InputConfluentCloudMetadatumTypedDict(TypedDict):
|
|
568
391
|
name: str
|
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
5
|
from cribl_control_plane.types import BaseModel
|
|
6
6
|
from cribl_control_plane.utils import validate_open_enum
|
|
7
7
|
from enum import Enum
|
|
8
8
|
import pydantic
|
|
9
|
-
from pydantic import field_serializer
|
|
10
9
|
from pydantic.functional_validators import PlainValidator
|
|
11
10
|
from typing import List, Optional
|
|
12
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
@@ -30,18 +29,14 @@ class InputCriblConnection(BaseModel):
|
|
|
30
29
|
class InputCriblMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
31
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
32
31
|
|
|
33
|
-
# Smart
|
|
34
32
|
SMART = "smart"
|
|
35
|
-
# Always On
|
|
36
33
|
ALWAYS = "always"
|
|
37
34
|
|
|
38
35
|
|
|
39
36
|
class InputCriblCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
40
37
|
r"""Codec to use to compress the persisted data"""
|
|
41
38
|
|
|
42
|
-
# None
|
|
43
39
|
NONE = "none"
|
|
44
|
-
# Gzip
|
|
45
40
|
GZIP = "gzip"
|
|
46
41
|
|
|
47
42
|
|
|
@@ -107,24 +102,6 @@ class InputCriblPq(BaseModel):
|
|
|
107
102
|
Optional[InputCriblPqControls], pydantic.Field(alias="pqControls")
|
|
108
103
|
] = None
|
|
109
104
|
|
|
110
|
-
@field_serializer("mode")
|
|
111
|
-
def serialize_mode(self, value):
|
|
112
|
-
if isinstance(value, str):
|
|
113
|
-
try:
|
|
114
|
-
return models.InputCriblMode(value)
|
|
115
|
-
except ValueError:
|
|
116
|
-
return value
|
|
117
|
-
return value
|
|
118
|
-
|
|
119
|
-
@field_serializer("compress")
|
|
120
|
-
def serialize_compress(self, value):
|
|
121
|
-
if isinstance(value, str):
|
|
122
|
-
try:
|
|
123
|
-
return models.InputCriblCompression(value)
|
|
124
|
-
except ValueError:
|
|
125
|
-
return value
|
|
126
|
-
return value
|
|
127
|
-
|
|
128
105
|
|
|
129
106
|
class InputCriblMetadatumTypedDict(TypedDict):
|
|
130
107
|
name: str
|
|
@@ -1,14 +1,13 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
5
|
from cribl_control_plane.types import BaseModel
|
|
6
6
|
from cribl_control_plane.utils import validate_open_enum
|
|
7
7
|
from enum import Enum
|
|
8
8
|
import pydantic
|
|
9
|
-
from pydantic import field_serializer
|
|
10
9
|
from pydantic.functional_validators import PlainValidator
|
|
11
|
-
from typing import List, Optional
|
|
10
|
+
from typing import Any, List, Optional
|
|
12
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
13
12
|
|
|
14
13
|
|
|
@@ -30,18 +29,14 @@ class InputCriblHTTPConnection(BaseModel):
|
|
|
30
29
|
class InputCriblHTTPMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
31
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
32
31
|
|
|
33
|
-
# Smart
|
|
34
32
|
SMART = "smart"
|
|
35
|
-
# Always On
|
|
36
33
|
ALWAYS = "always"
|
|
37
34
|
|
|
38
35
|
|
|
39
36
|
class InputCriblHTTPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
40
37
|
r"""Codec to use to compress the persisted data"""
|
|
41
38
|
|
|
42
|
-
# None
|
|
43
39
|
NONE = "none"
|
|
44
|
-
# Gzip
|
|
45
40
|
GZIP = "gzip"
|
|
46
41
|
|
|
47
42
|
|
|
@@ -107,24 +102,6 @@ class InputCriblHTTPPq(BaseModel):
|
|
|
107
102
|
Optional[InputCriblHTTPPqControls], pydantic.Field(alias="pqControls")
|
|
108
103
|
] = None
|
|
109
104
|
|
|
110
|
-
@field_serializer("mode")
|
|
111
|
-
def serialize_mode(self, value):
|
|
112
|
-
if isinstance(value, str):
|
|
113
|
-
try:
|
|
114
|
-
return models.InputCriblHTTPMode(value)
|
|
115
|
-
except ValueError:
|
|
116
|
-
return value
|
|
117
|
-
return value
|
|
118
|
-
|
|
119
|
-
@field_serializer("compress")
|
|
120
|
-
def serialize_compress(self, value):
|
|
121
|
-
if isinstance(value, str):
|
|
122
|
-
try:
|
|
123
|
-
return models.InputCriblHTTPCompression(value)
|
|
124
|
-
except ValueError:
|
|
125
|
-
return value
|
|
126
|
-
return value
|
|
127
|
-
|
|
128
105
|
|
|
129
106
|
class InputCriblHTTPMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
130
107
|
TL_SV1 = "TLSv1"
|
|
@@ -142,12 +119,6 @@ class InputCriblHTTPMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
142
119
|
|
|
143
120
|
class InputCriblHTTPTLSSettingsServerSideTypedDict(TypedDict):
|
|
144
121
|
disabled: NotRequired[bool]
|
|
145
|
-
request_cert: NotRequired[bool]
|
|
146
|
-
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
147
|
-
reject_unauthorized: NotRequired[bool]
|
|
148
|
-
r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's)"""
|
|
149
|
-
common_name_regex: NotRequired[str]
|
|
150
|
-
r"""Regex matching allowable common names in peer certificates' subject attribute"""
|
|
151
122
|
certificate_name: NotRequired[str]
|
|
152
123
|
r"""The name of the predefined certificate"""
|
|
153
124
|
priv_key_path: NotRequired[str]
|
|
@@ -158,6 +129,10 @@ class InputCriblHTTPTLSSettingsServerSideTypedDict(TypedDict):
|
|
|
158
129
|
r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
159
130
|
ca_path: NotRequired[str]
|
|
160
131
|
r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
132
|
+
request_cert: NotRequired[bool]
|
|
133
|
+
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
134
|
+
reject_unauthorized: NotRequired[Any]
|
|
135
|
+
common_name_regex: NotRequired[Any]
|
|
161
136
|
min_version: NotRequired[InputCriblHTTPMinimumTLSVersion]
|
|
162
137
|
max_version: NotRequired[InputCriblHTTPMaximumTLSVersion]
|
|
163
138
|
|
|
@@ -165,19 +140,6 @@ class InputCriblHTTPTLSSettingsServerSideTypedDict(TypedDict):
|
|
|
165
140
|
class InputCriblHTTPTLSSettingsServerSide(BaseModel):
|
|
166
141
|
disabled: Optional[bool] = True
|
|
167
142
|
|
|
168
|
-
request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
|
|
169
|
-
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
170
|
-
|
|
171
|
-
reject_unauthorized: Annotated[
|
|
172
|
-
Optional[bool], pydantic.Field(alias="rejectUnauthorized")
|
|
173
|
-
] = True
|
|
174
|
-
r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's)"""
|
|
175
|
-
|
|
176
|
-
common_name_regex: Annotated[
|
|
177
|
-
Optional[str], pydantic.Field(alias="commonNameRegex")
|
|
178
|
-
] = "/.*/"
|
|
179
|
-
r"""Regex matching allowable common names in peer certificates' subject attribute"""
|
|
180
|
-
|
|
181
143
|
certificate_name: Annotated[
|
|
182
144
|
Optional[str], pydantic.Field(alias="certificateName")
|
|
183
145
|
] = None
|
|
@@ -195,6 +157,17 @@ class InputCriblHTTPTLSSettingsServerSide(BaseModel):
|
|
|
195
157
|
ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
|
|
196
158
|
r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
197
159
|
|
|
160
|
+
request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
|
|
161
|
+
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
162
|
+
|
|
163
|
+
reject_unauthorized: Annotated[
|
|
164
|
+
Optional[Any], pydantic.Field(alias="rejectUnauthorized")
|
|
165
|
+
] = None
|
|
166
|
+
|
|
167
|
+
common_name_regex: Annotated[
|
|
168
|
+
Optional[Any], pydantic.Field(alias="commonNameRegex")
|
|
169
|
+
] = None
|
|
170
|
+
|
|
198
171
|
min_version: Annotated[
|
|
199
172
|
Annotated[
|
|
200
173
|
Optional[InputCriblHTTPMinimumTLSVersion],
|
|
@@ -211,24 +184,6 @@ class InputCriblHTTPTLSSettingsServerSide(BaseModel):
|
|
|
211
184
|
pydantic.Field(alias="maxVersion"),
|
|
212
185
|
] = None
|
|
213
186
|
|
|
214
|
-
@field_serializer("min_version")
|
|
215
|
-
def serialize_min_version(self, value):
|
|
216
|
-
if isinstance(value, str):
|
|
217
|
-
try:
|
|
218
|
-
return models.InputCriblHTTPMinimumTLSVersion(value)
|
|
219
|
-
except ValueError:
|
|
220
|
-
return value
|
|
221
|
-
return value
|
|
222
|
-
|
|
223
|
-
@field_serializer("max_version")
|
|
224
|
-
def serialize_max_version(self, value):
|
|
225
|
-
if isinstance(value, str):
|
|
226
|
-
try:
|
|
227
|
-
return models.InputCriblHTTPMaximumTLSVersion(value)
|
|
228
|
-
except ValueError:
|
|
229
|
-
return value
|
|
230
|
-
return value
|
|
231
|
-
|
|
232
187
|
|
|
233
188
|
class InputCriblHTTPMetadatumTypedDict(TypedDict):
|
|
234
189
|
name: str
|
|
@@ -1,14 +1,13 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
5
|
from cribl_control_plane.types import BaseModel
|
|
6
6
|
from cribl_control_plane.utils import validate_open_enum
|
|
7
7
|
from enum import Enum
|
|
8
8
|
import pydantic
|
|
9
|
-
from pydantic import field_serializer
|
|
10
9
|
from pydantic.functional_validators import PlainValidator
|
|
11
|
-
from typing import List, Optional
|
|
10
|
+
from typing import Any, List, Optional
|
|
12
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
13
12
|
|
|
14
13
|
|
|
@@ -30,18 +29,14 @@ class InputCriblLakeHTTPConnection(BaseModel):
|
|
|
30
29
|
class InputCriblLakeHTTPMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
31
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
32
31
|
|
|
33
|
-
# Smart
|
|
34
32
|
SMART = "smart"
|
|
35
|
-
# Always On
|
|
36
33
|
ALWAYS = "always"
|
|
37
34
|
|
|
38
35
|
|
|
39
36
|
class InputCriblLakeHTTPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
40
37
|
r"""Codec to use to compress the persisted data"""
|
|
41
38
|
|
|
42
|
-
# None
|
|
43
39
|
NONE = "none"
|
|
44
|
-
# Gzip
|
|
45
40
|
GZIP = "gzip"
|
|
46
41
|
|
|
47
42
|
|
|
@@ -108,24 +103,6 @@ class InputCriblLakeHTTPPq(BaseModel):
|
|
|
108
103
|
Optional[InputCriblLakeHTTPPqControls], pydantic.Field(alias="pqControls")
|
|
109
104
|
] = None
|
|
110
105
|
|
|
111
|
-
@field_serializer("mode")
|
|
112
|
-
def serialize_mode(self, value):
|
|
113
|
-
if isinstance(value, str):
|
|
114
|
-
try:
|
|
115
|
-
return models.InputCriblLakeHTTPMode(value)
|
|
116
|
-
except ValueError:
|
|
117
|
-
return value
|
|
118
|
-
return value
|
|
119
|
-
|
|
120
|
-
@field_serializer("compress")
|
|
121
|
-
def serialize_compress(self, value):
|
|
122
|
-
if isinstance(value, str):
|
|
123
|
-
try:
|
|
124
|
-
return models.InputCriblLakeHTTPCompression(value)
|
|
125
|
-
except ValueError:
|
|
126
|
-
return value
|
|
127
|
-
return value
|
|
128
|
-
|
|
129
106
|
|
|
130
107
|
class InputCriblLakeHTTPMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
131
108
|
TL_SV1 = "TLSv1"
|
|
@@ -143,12 +120,6 @@ class InputCriblLakeHTTPMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMet
|
|
|
143
120
|
|
|
144
121
|
class InputCriblLakeHTTPTLSSettingsServerSideTypedDict(TypedDict):
|
|
145
122
|
disabled: NotRequired[bool]
|
|
146
|
-
request_cert: NotRequired[bool]
|
|
147
|
-
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
148
|
-
reject_unauthorized: NotRequired[bool]
|
|
149
|
-
r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's)"""
|
|
150
|
-
common_name_regex: NotRequired[str]
|
|
151
|
-
r"""Regex matching allowable common names in peer certificates' subject attribute"""
|
|
152
123
|
certificate_name: NotRequired[str]
|
|
153
124
|
r"""The name of the predefined certificate"""
|
|
154
125
|
priv_key_path: NotRequired[str]
|
|
@@ -159,6 +130,10 @@ class InputCriblLakeHTTPTLSSettingsServerSideTypedDict(TypedDict):
|
|
|
159
130
|
r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
160
131
|
ca_path: NotRequired[str]
|
|
161
132
|
r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
133
|
+
request_cert: NotRequired[bool]
|
|
134
|
+
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
135
|
+
reject_unauthorized: NotRequired[Any]
|
|
136
|
+
common_name_regex: NotRequired[Any]
|
|
162
137
|
min_version: NotRequired[InputCriblLakeHTTPMinimumTLSVersion]
|
|
163
138
|
max_version: NotRequired[InputCriblLakeHTTPMaximumTLSVersion]
|
|
164
139
|
|
|
@@ -166,19 +141,6 @@ class InputCriblLakeHTTPTLSSettingsServerSideTypedDict(TypedDict):
|
|
|
166
141
|
class InputCriblLakeHTTPTLSSettingsServerSide(BaseModel):
|
|
167
142
|
disabled: Optional[bool] = True
|
|
168
143
|
|
|
169
|
-
request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
|
|
170
|
-
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
171
|
-
|
|
172
|
-
reject_unauthorized: Annotated[
|
|
173
|
-
Optional[bool], pydantic.Field(alias="rejectUnauthorized")
|
|
174
|
-
] = True
|
|
175
|
-
r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's)"""
|
|
176
|
-
|
|
177
|
-
common_name_regex: Annotated[
|
|
178
|
-
Optional[str], pydantic.Field(alias="commonNameRegex")
|
|
179
|
-
] = "/.*/"
|
|
180
|
-
r"""Regex matching allowable common names in peer certificates' subject attribute"""
|
|
181
|
-
|
|
182
144
|
certificate_name: Annotated[
|
|
183
145
|
Optional[str], pydantic.Field(alias="certificateName")
|
|
184
146
|
] = None
|
|
@@ -196,6 +158,17 @@ class InputCriblLakeHTTPTLSSettingsServerSide(BaseModel):
|
|
|
196
158
|
ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
|
|
197
159
|
r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
198
160
|
|
|
161
|
+
request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
|
|
162
|
+
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
163
|
+
|
|
164
|
+
reject_unauthorized: Annotated[
|
|
165
|
+
Optional[Any], pydantic.Field(alias="rejectUnauthorized")
|
|
166
|
+
] = None
|
|
167
|
+
|
|
168
|
+
common_name_regex: Annotated[
|
|
169
|
+
Optional[Any], pydantic.Field(alias="commonNameRegex")
|
|
170
|
+
] = None
|
|
171
|
+
|
|
199
172
|
min_version: Annotated[
|
|
200
173
|
Annotated[
|
|
201
174
|
Optional[InputCriblLakeHTTPMinimumTLSVersion],
|
|
@@ -212,24 +185,6 @@ class InputCriblLakeHTTPTLSSettingsServerSide(BaseModel):
|
|
|
212
185
|
pydantic.Field(alias="maxVersion"),
|
|
213
186
|
] = None
|
|
214
187
|
|
|
215
|
-
@field_serializer("min_version")
|
|
216
|
-
def serialize_min_version(self, value):
|
|
217
|
-
if isinstance(value, str):
|
|
218
|
-
try:
|
|
219
|
-
return models.InputCriblLakeHTTPMinimumTLSVersion(value)
|
|
220
|
-
except ValueError:
|
|
221
|
-
return value
|
|
222
|
-
return value
|
|
223
|
-
|
|
224
|
-
@field_serializer("max_version")
|
|
225
|
-
def serialize_max_version(self, value):
|
|
226
|
-
if isinstance(value, str):
|
|
227
|
-
try:
|
|
228
|
-
return models.InputCriblLakeHTTPMaximumTLSVersion(value)
|
|
229
|
-
except ValueError:
|
|
230
|
-
return value
|
|
231
|
-
return value
|
|
232
|
-
|
|
233
188
|
|
|
234
189
|
class InputCriblLakeHTTPMetadatumTypedDict(TypedDict):
|
|
235
190
|
name: str
|