cribl-control-plane 0.0.50rc2__py3-none-any.whl → 0.0.52__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_hooks/clientcredentials.py +91 -41
- cribl_control_plane/_version.py +6 -4
- cribl_control_plane/errors/apierror.py +1 -1
- cribl_control_plane/errors/criblcontrolplaneerror.py +1 -1
- cribl_control_plane/errors/error.py +1 -1
- cribl_control_plane/errors/healthstatus_error.py +3 -9
- cribl_control_plane/errors/no_response_error.py +1 -1
- cribl_control_plane/errors/responsevalidationerror.py +1 -1
- cribl_control_plane/groups_sdk.py +4 -4
- cribl_control_plane/health.py +2 -6
- cribl_control_plane/models/__init__.py +31 -56
- cribl_control_plane/models/appmode.py +13 -0
- cribl_control_plane/models/cacheconnection.py +2 -10
- cribl_control_plane/models/cacheconnectionbackfillstatus.py +1 -2
- cribl_control_plane/models/cloudprovider.py +1 -2
- cribl_control_plane/models/configgroup.py +4 -24
- cribl_control_plane/models/configgroupcloud.py +2 -6
- cribl_control_plane/models/createconfiggroupbyproductop.py +2 -8
- cribl_control_plane/models/createinputhectokenbyidop.py +5 -6
- cribl_control_plane/models/createversionpushop.py +5 -5
- cribl_control_plane/models/cribllakedataset.py +2 -8
- cribl_control_plane/models/datasetmetadata.py +2 -8
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +2 -7
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +2 -4
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +2 -4
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +1 -3
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +2 -7
- cribl_control_plane/models/getsummaryop.py +2 -7
- cribl_control_plane/models/getversionshowop.py +5 -6
- cribl_control_plane/models/gitinfo.py +3 -14
- cribl_control_plane/models/hbcriblinfo.py +3 -24
- cribl_control_plane/models/healthstatus.py +4 -7
- cribl_control_plane/models/heartbeatmetadata.py +0 -3
- cribl_control_plane/models/input.py +63 -65
- cribl_control_plane/models/inputappscope.py +14 -34
- cribl_control_plane/models/inputazureblob.py +6 -17
- cribl_control_plane/models/inputcollection.py +4 -11
- cribl_control_plane/models/inputconfluentcloud.py +32 -41
- cribl_control_plane/models/inputcribl.py +4 -11
- cribl_control_plane/models/inputcriblhttp.py +8 -23
- cribl_control_plane/models/inputcribllakehttp.py +10 -22
- cribl_control_plane/models/inputcriblmetrics.py +4 -12
- cribl_control_plane/models/inputcribltcp.py +8 -23
- cribl_control_plane/models/inputcrowdstrike.py +10 -26
- cribl_control_plane/models/inputdatadogagent.py +8 -24
- cribl_control_plane/models/inputdatagen.py +4 -11
- cribl_control_plane/models/inputedgeprometheus.py +24 -58
- cribl_control_plane/models/inputelastic.py +14 -40
- cribl_control_plane/models/inputeventhub.py +6 -15
- cribl_control_plane/models/inputexec.py +6 -14
- cribl_control_plane/models/inputfile.py +6 -15
- cribl_control_plane/models/inputfirehose.py +8 -23
- cribl_control_plane/models/inputgooglepubsub.py +6 -19
- cribl_control_plane/models/inputgrafana.py +24 -67
- cribl_control_plane/models/inputhttp.py +8 -23
- cribl_control_plane/models/inputhttpraw.py +8 -23
- cribl_control_plane/models/inputjournalfiles.py +4 -12
- cribl_control_plane/models/inputkafka.py +28 -41
- cribl_control_plane/models/inputkinesis.py +14 -38
- cribl_control_plane/models/inputkubeevents.py +4 -11
- cribl_control_plane/models/inputkubelogs.py +8 -16
- cribl_control_plane/models/inputkubemetrics.py +8 -16
- cribl_control_plane/models/inputloki.py +10 -29
- cribl_control_plane/models/inputmetrics.py +8 -23
- cribl_control_plane/models/inputmodeldriventelemetry.py +10 -32
- cribl_control_plane/models/inputmsk.py +30 -48
- cribl_control_plane/models/inputnetflow.py +4 -11
- cribl_control_plane/models/inputoffice365mgmt.py +14 -33
- cribl_control_plane/models/inputoffice365msgtrace.py +16 -35
- cribl_control_plane/models/inputoffice365service.py +16 -35
- cribl_control_plane/models/inputopentelemetry.py +16 -38
- cribl_control_plane/models/inputprometheus.py +18 -50
- cribl_control_plane/models/inputprometheusrw.py +10 -30
- cribl_control_plane/models/inputrawudp.py +4 -11
- cribl_control_plane/models/inputs3.py +8 -21
- cribl_control_plane/models/inputs3inventory.py +10 -26
- cribl_control_plane/models/inputsecuritylake.py +10 -27
- cribl_control_plane/models/inputsnmp.py +6 -16
- cribl_control_plane/models/inputsplunk.py +12 -33
- cribl_control_plane/models/inputsplunkhec.py +10 -29
- cribl_control_plane/models/inputsplunksearch.py +14 -33
- cribl_control_plane/models/inputsqs.py +10 -27
- cribl_control_plane/models/inputsyslog.py +16 -43
- cribl_control_plane/models/inputsystemmetrics.py +24 -48
- cribl_control_plane/models/inputsystemstate.py +8 -16
- cribl_control_plane/models/inputtcp.py +10 -29
- cribl_control_plane/models/inputtcpjson.py +10 -29
- cribl_control_plane/models/inputwef.py +14 -37
- cribl_control_plane/models/inputwindowsmetrics.py +24 -44
- cribl_control_plane/models/inputwineventlogs.py +10 -20
- cribl_control_plane/models/inputwiz.py +8 -21
- cribl_control_plane/models/inputwizwebhook.py +8 -23
- cribl_control_plane/models/inputzscalerhec.py +10 -29
- cribl_control_plane/models/lakehouseconnectiontype.py +1 -2
- cribl_control_plane/models/listconfiggroupbyproductop.py +1 -3
- cribl_control_plane/models/masterworkerentry.py +2 -7
- cribl_control_plane/models/nodeactiveupgradestatus.py +1 -2
- cribl_control_plane/models/nodefailedupgradestatus.py +1 -2
- cribl_control_plane/models/nodeprovidedinfo.py +0 -3
- cribl_control_plane/models/nodeskippedupgradestatus.py +1 -2
- cribl_control_plane/models/nodeupgradestate.py +1 -2
- cribl_control_plane/models/nodeupgradestatus.py +5 -13
- cribl_control_plane/models/output.py +79 -84
- cribl_control_plane/models/outputazureblob.py +18 -48
- cribl_control_plane/models/outputazuredataexplorer.py +28 -73
- cribl_control_plane/models/outputazureeventhub.py +18 -40
- cribl_control_plane/models/outputazurelogs.py +12 -35
- cribl_control_plane/models/outputclickhouse.py +20 -55
- cribl_control_plane/models/outputcloudwatch.py +10 -29
- cribl_control_plane/models/outputconfluentcloud.py +44 -71
- cribl_control_plane/models/outputcriblhttp.py +16 -44
- cribl_control_plane/models/outputcribllake.py +16 -46
- cribl_control_plane/models/outputcribltcp.py +18 -45
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +14 -49
- cribl_control_plane/models/outputdatadog.py +20 -48
- cribl_control_plane/models/outputdataset.py +18 -46
- cribl_control_plane/models/outputdiskspool.py +2 -7
- cribl_control_plane/models/outputdls3.py +24 -68
- cribl_control_plane/models/outputdynatracehttp.py +20 -53
- cribl_control_plane/models/outputdynatraceotlp.py +22 -55
- cribl_control_plane/models/outputelastic.py +18 -43
- cribl_control_plane/models/outputelasticcloud.py +12 -36
- cribl_control_plane/models/outputexabeam.py +10 -29
- cribl_control_plane/models/outputfilesystem.py +14 -39
- cribl_control_plane/models/outputgooglechronicle.py +16 -50
- cribl_control_plane/models/outputgooglecloudlogging.py +18 -50
- cribl_control_plane/models/outputgooglecloudstorage.py +24 -66
- cribl_control_plane/models/outputgooglepubsub.py +10 -31
- cribl_control_plane/models/outputgrafanacloud.py +32 -97
- cribl_control_plane/models/outputgraphite.py +14 -31
- cribl_control_plane/models/outputhoneycomb.py +12 -35
- cribl_control_plane/models/outputhumiohec.py +16 -43
- cribl_control_plane/models/outputinfluxdb.py +16 -42
- cribl_control_plane/models/outputkafka.py +40 -69
- cribl_control_plane/models/outputkinesis.py +16 -40
- cribl_control_plane/models/outputloki.py +16 -41
- cribl_control_plane/models/outputminio.py +24 -65
- cribl_control_plane/models/outputmsk.py +42 -77
- cribl_control_plane/models/outputnewrelic.py +18 -43
- cribl_control_plane/models/outputnewrelicevents.py +14 -41
- cribl_control_plane/models/outputopentelemetry.py +26 -67
- cribl_control_plane/models/outputprometheus.py +12 -35
- cribl_control_plane/models/outputring.py +8 -19
- cribl_control_plane/models/outputs3.py +26 -68
- cribl_control_plane/models/outputsecuritylake.py +18 -52
- cribl_control_plane/models/outputsentinel.py +18 -45
- cribl_control_plane/models/outputsentineloneaisiem.py +18 -50
- cribl_control_plane/models/outputservicenow.py +24 -60
- cribl_control_plane/models/outputsignalfx.py +14 -37
- cribl_control_plane/models/outputsns.py +14 -36
- cribl_control_plane/models/outputsplunk.py +24 -60
- cribl_control_plane/models/outputsplunkhec.py +12 -35
- cribl_control_plane/models/outputsplunklb.py +30 -77
- cribl_control_plane/models/outputsqs.py +16 -41
- cribl_control_plane/models/outputstatsd.py +14 -30
- cribl_control_plane/models/outputstatsdext.py +12 -29
- cribl_control_plane/models/outputsumologic.py +12 -35
- cribl_control_plane/models/outputsyslog.py +24 -58
- cribl_control_plane/models/outputtcpjson.py +20 -52
- cribl_control_plane/models/outputwavefront.py +12 -35
- cribl_control_plane/models/outputwebhook.py +22 -58
- cribl_control_plane/models/outputxsiam.py +14 -35
- cribl_control_plane/models/productscore.py +1 -2
- cribl_control_plane/models/rbacresource.py +1 -2
- cribl_control_plane/models/resourcepolicy.py +2 -4
- cribl_control_plane/models/routecloneconf.py +13 -0
- cribl_control_plane/models/routeconf.py +4 -3
- cribl_control_plane/models/runnablejobcollection.py +13 -30
- cribl_control_plane/models/runnablejobexecutor.py +4 -13
- cribl_control_plane/models/runnablejobscheduledsearch.py +2 -7
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +2 -8
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +2 -8
- cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +5 -6
- cribl_control_plane/models/workertypes.py +1 -2
- {cribl_control_plane-0.0.50rc2.dist-info → cribl_control_plane-0.0.52.dist-info}/METADATA +14 -12
- cribl_control_plane-0.0.52.dist-info/RECORD +325 -0
- cribl_control_plane/models/error.py +0 -16
- cribl_control_plane/models/gethealthinfoop.py +0 -17
- cribl_control_plane/models/gitshowresult.py +0 -19
- cribl_control_plane/models/outputdatabricks.py +0 -282
- cribl_control_plane-0.0.50rc2.dist-info/RECORD +0 -327
- {cribl_control_plane-0.0.50rc2.dist-info → cribl_control_plane-0.0.52.dist-info}/WHEEL +0 -0
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Any, List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputAppscopeConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputAppscopeMode(str, Enum
|
|
26
|
+
class InputAppscopeMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputAppscopeCompression(str, Enum
|
|
33
|
+
class InputAppscopeCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputAppscopePqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputAppscopePq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputAppscopeMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputAppscopeMode.ALWAYS
|
|
67
|
+
mode: Optional[InputAppscopeMode] = InputAppscopeMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputAppscopePq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputAppscopeCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputAppscopeCompression.NONE
|
|
91
|
+
compress: Optional[InputAppscopeCompression] = InputAppscopeCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -151,7 +144,7 @@ class InputAppscopeFilter(BaseModel):
|
|
|
151
144
|
r"""To override the UNIX domain socket or address/port specified in General Settings (while leaving Authentication settings as is), enter a URL."""
|
|
152
145
|
|
|
153
146
|
|
|
154
|
-
class InputAppscopeDataCompressionFormat(str, Enum
|
|
147
|
+
class InputAppscopeDataCompressionFormat(str, Enum):
|
|
155
148
|
NONE = "none"
|
|
156
149
|
GZIP = "gzip"
|
|
157
150
|
|
|
@@ -183,10 +176,9 @@ class InputAppscopePersistence(BaseModel):
|
|
|
183
176
|
max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
|
|
184
177
|
r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
|
|
185
178
|
|
|
186
|
-
compress:
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
] = InputAppscopeDataCompressionFormat.GZIP
|
|
179
|
+
compress: Optional[InputAppscopeDataCompressionFormat] = (
|
|
180
|
+
InputAppscopeDataCompressionFormat.GZIP
|
|
181
|
+
)
|
|
190
182
|
|
|
191
183
|
dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
|
|
192
184
|
"$CRIBL_HOME/state/appscope"
|
|
@@ -194,21 +186,21 @@ class InputAppscopePersistence(BaseModel):
|
|
|
194
186
|
r"""Path to use to write metrics. Defaults to $CRIBL_HOME/state/appscope"""
|
|
195
187
|
|
|
196
188
|
|
|
197
|
-
class InputAppscopeAuthenticationMethod(str, Enum
|
|
189
|
+
class InputAppscopeAuthenticationMethod(str, Enum):
|
|
198
190
|
r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
|
|
199
191
|
|
|
200
192
|
MANUAL = "manual"
|
|
201
193
|
SECRET = "secret"
|
|
202
194
|
|
|
203
195
|
|
|
204
|
-
class InputAppscopeMinimumTLSVersion(str, Enum
|
|
196
|
+
class InputAppscopeMinimumTLSVersion(str, Enum):
|
|
205
197
|
TL_SV1 = "TLSv1"
|
|
206
198
|
TL_SV1_1 = "TLSv1.1"
|
|
207
199
|
TL_SV1_2 = "TLSv1.2"
|
|
208
200
|
TL_SV1_3 = "TLSv1.3"
|
|
209
201
|
|
|
210
202
|
|
|
211
|
-
class InputAppscopeMaximumTLSVersion(str, Enum
|
|
203
|
+
class InputAppscopeMaximumTLSVersion(str, Enum):
|
|
212
204
|
TL_SV1 = "TLSv1"
|
|
213
205
|
TL_SV1_1 = "TLSv1.1"
|
|
214
206
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -267,19 +259,11 @@ class InputAppscopeTLSSettingsServerSide(BaseModel):
|
|
|
267
259
|
] = None
|
|
268
260
|
|
|
269
261
|
min_version: Annotated[
|
|
270
|
-
|
|
271
|
-
Optional[InputAppscopeMinimumTLSVersion],
|
|
272
|
-
PlainValidator(validate_open_enum(False)),
|
|
273
|
-
],
|
|
274
|
-
pydantic.Field(alias="minVersion"),
|
|
262
|
+
Optional[InputAppscopeMinimumTLSVersion], pydantic.Field(alias="minVersion")
|
|
275
263
|
] = None
|
|
276
264
|
|
|
277
265
|
max_version: Annotated[
|
|
278
|
-
|
|
279
|
-
Optional[InputAppscopeMaximumTLSVersion],
|
|
280
|
-
PlainValidator(validate_open_enum(False)),
|
|
281
|
-
],
|
|
282
|
-
pydantic.Field(alias="maxVersion"),
|
|
266
|
+
Optional[InputAppscopeMaximumTLSVersion], pydantic.Field(alias="maxVersion")
|
|
283
267
|
] = None
|
|
284
268
|
|
|
285
269
|
|
|
@@ -426,11 +410,7 @@ class InputAppscope(BaseModel):
|
|
|
426
410
|
persistence: Optional[InputAppscopePersistence] = None
|
|
427
411
|
|
|
428
412
|
auth_type: Annotated[
|
|
429
|
-
|
|
430
|
-
Optional[InputAppscopeAuthenticationMethod],
|
|
431
|
-
PlainValidator(validate_open_enum(False)),
|
|
432
|
-
],
|
|
433
|
-
pydantic.Field(alias="authType"),
|
|
413
|
+
Optional[InputAppscopeAuthenticationMethod], pydantic.Field(alias="authType")
|
|
434
414
|
] = InputAppscopeAuthenticationMethod.MANUAL
|
|
435
415
|
r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
|
|
436
416
|
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputAzureBlobConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputAzureBlobMode(str, Enum
|
|
26
|
+
class InputAzureBlobMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputAzureBlobCompression(str, Enum
|
|
33
|
+
class InputAzureBlobCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputAzureBlobPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputAzureBlobPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputAzureBlobMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputAzureBlobMode.ALWAYS
|
|
67
|
+
mode: Optional[InputAzureBlobMode] = InputAzureBlobMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputAzureBlobPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputAzureBlobCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputAzureBlobCompression.NONE
|
|
91
|
+
compress: Optional[InputAzureBlobCompression] = InputAzureBlobCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -116,7 +109,7 @@ class InputAzureBlobMetadatum(BaseModel):
|
|
|
116
109
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
117
110
|
|
|
118
111
|
|
|
119
|
-
class InputAzureBlobAuthenticationMethod(str, Enum
|
|
112
|
+
class InputAzureBlobAuthenticationMethod(str, Enum):
|
|
120
113
|
MANUAL = "manual"
|
|
121
114
|
SECRET = "secret"
|
|
122
115
|
CLIENT_SECRET = "clientSecret"
|
|
@@ -277,11 +270,7 @@ class InputAzureBlob(BaseModel):
|
|
|
277
270
|
r"""The maximum time allowed for downloading a Parquet chunk. Processing will stop if a chunk cannot be downloaded within the time specified."""
|
|
278
271
|
|
|
279
272
|
auth_type: Annotated[
|
|
280
|
-
|
|
281
|
-
Optional[InputAzureBlobAuthenticationMethod],
|
|
282
|
-
PlainValidator(validate_open_enum(False)),
|
|
283
|
-
],
|
|
284
|
-
pydantic.Field(alias="authType"),
|
|
273
|
+
Optional[InputAzureBlobAuthenticationMethod], pydantic.Field(alias="authType")
|
|
285
274
|
] = InputAzureBlobAuthenticationMethod.MANUAL
|
|
286
275
|
|
|
287
276
|
description: Optional[str] = None
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputCollectionConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputCollectionMode(str, Enum
|
|
26
|
+
class InputCollectionMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputCollectionCompression(str, Enum
|
|
33
|
+
class InputCollectionCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputCollectionPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputCollectionPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputCollectionMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputCollectionMode.ALWAYS
|
|
67
|
+
mode: Optional[InputCollectionMode] = InputCollectionMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputCollectionPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputCollectionCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputCollectionCompression.NONE
|
|
91
|
+
compress: Optional[InputCollectionCompression] = InputCollectionCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputConfluentCloudConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputConfluentCloudMode(str, Enum
|
|
26
|
+
class InputConfluentCloudMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputConfluentCloudCompression(str, Enum
|
|
33
|
+
class InputConfluentCloudCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputConfluentCloudPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputConfluentCloudPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputConfluentCloudMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputConfluentCloudMode.ALWAYS
|
|
67
|
+
mode: Optional[InputConfluentCloudMode] = InputConfluentCloudMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,10 +88,9 @@ class InputConfluentCloudPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
] = InputConfluentCloudCompression.NONE
|
|
91
|
+
compress: Optional[InputConfluentCloudCompression] = (
|
|
92
|
+
InputConfluentCloudCompression.NONE
|
|
93
|
+
)
|
|
100
94
|
r"""Codec to use to compress the persisted data"""
|
|
101
95
|
|
|
102
96
|
pq_controls: Annotated[
|
|
@@ -104,14 +98,14 @@ class InputConfluentCloudPq(BaseModel):
|
|
|
104
98
|
] = None
|
|
105
99
|
|
|
106
100
|
|
|
107
|
-
class InputConfluentCloudMinimumTLSVersion(str, Enum
|
|
101
|
+
class InputConfluentCloudMinimumTLSVersion(str, Enum):
|
|
108
102
|
TL_SV1 = "TLSv1"
|
|
109
103
|
TL_SV1_1 = "TLSv1.1"
|
|
110
104
|
TL_SV1_2 = "TLSv1.2"
|
|
111
105
|
TL_SV1_3 = "TLSv1.3"
|
|
112
106
|
|
|
113
107
|
|
|
114
|
-
class InputConfluentCloudMaximumTLSVersion(str, Enum
|
|
108
|
+
class InputConfluentCloudMaximumTLSVersion(str, Enum):
|
|
115
109
|
TL_SV1 = "TLSv1"
|
|
116
110
|
TL_SV1_1 = "TLSv1.1"
|
|
117
111
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -171,22 +165,23 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
|
|
|
171
165
|
r"""Passphrase to use to decrypt private key"""
|
|
172
166
|
|
|
173
167
|
min_version: Annotated[
|
|
174
|
-
|
|
175
|
-
Optional[InputConfluentCloudMinimumTLSVersion],
|
|
176
|
-
PlainValidator(validate_open_enum(False)),
|
|
177
|
-
],
|
|
168
|
+
Optional[InputConfluentCloudMinimumTLSVersion],
|
|
178
169
|
pydantic.Field(alias="minVersion"),
|
|
179
170
|
] = None
|
|
180
171
|
|
|
181
172
|
max_version: Annotated[
|
|
182
|
-
|
|
183
|
-
Optional[InputConfluentCloudMaximumTLSVersion],
|
|
184
|
-
PlainValidator(validate_open_enum(False)),
|
|
185
|
-
],
|
|
173
|
+
Optional[InputConfluentCloudMaximumTLSVersion],
|
|
186
174
|
pydantic.Field(alias="maxVersion"),
|
|
187
175
|
] = None
|
|
188
176
|
|
|
189
177
|
|
|
178
|
+
class InputConfluentCloudSchemaType(str, Enum):
|
|
179
|
+
r"""The schema format used to encode and decode event data"""
|
|
180
|
+
|
|
181
|
+
AVRO = "avro"
|
|
182
|
+
JSON = "json"
|
|
183
|
+
|
|
184
|
+
|
|
190
185
|
class InputConfluentCloudAuthTypedDict(TypedDict):
|
|
191
186
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
192
187
|
|
|
@@ -206,18 +201,14 @@ class InputConfluentCloudAuth(BaseModel):
|
|
|
206
201
|
r"""Select or create a secret that references your credentials"""
|
|
207
202
|
|
|
208
203
|
|
|
209
|
-
class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
|
|
210
|
-
str, Enum, metaclass=utils.OpenEnumMeta
|
|
211
|
-
):
|
|
204
|
+
class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
|
|
212
205
|
TL_SV1 = "TLSv1"
|
|
213
206
|
TL_SV1_1 = "TLSv1.1"
|
|
214
207
|
TL_SV1_2 = "TLSv1.2"
|
|
215
208
|
TL_SV1_3 = "TLSv1.3"
|
|
216
209
|
|
|
217
210
|
|
|
218
|
-
class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
|
|
219
|
-
str, Enum, metaclass=utils.OpenEnumMeta
|
|
220
|
-
):
|
|
211
|
+
class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
|
|
221
212
|
TL_SV1 = "TLSv1"
|
|
222
213
|
TL_SV1_1 = "TLSv1.1"
|
|
223
214
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -277,18 +268,12 @@ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
|
|
|
277
268
|
r"""Passphrase to use to decrypt private key"""
|
|
278
269
|
|
|
279
270
|
min_version: Annotated[
|
|
280
|
-
|
|
281
|
-
Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
|
|
282
|
-
PlainValidator(validate_open_enum(False)),
|
|
283
|
-
],
|
|
271
|
+
Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
|
|
284
272
|
pydantic.Field(alias="minVersion"),
|
|
285
273
|
] = None
|
|
286
274
|
|
|
287
275
|
max_version: Annotated[
|
|
288
|
-
|
|
289
|
-
Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
|
|
290
|
-
PlainValidator(validate_open_enum(False)),
|
|
291
|
-
],
|
|
276
|
+
Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
|
|
292
277
|
pydantic.Field(alias="maxVersion"),
|
|
293
278
|
] = None
|
|
294
279
|
|
|
@@ -297,6 +282,8 @@ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
297
282
|
disabled: NotRequired[bool]
|
|
298
283
|
schema_registry_url: NotRequired[str]
|
|
299
284
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
285
|
+
schema_type: NotRequired[InputConfluentCloudSchemaType]
|
|
286
|
+
r"""The schema format used to encode and decode event data"""
|
|
300
287
|
connection_timeout: NotRequired[float]
|
|
301
288
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
302
289
|
request_timeout: NotRequired[float]
|
|
@@ -318,6 +305,11 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
318
305
|
] = "http://localhost:8081"
|
|
319
306
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
320
307
|
|
|
308
|
+
schema_type: Annotated[
|
|
309
|
+
Optional[InputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
|
|
310
|
+
] = InputConfluentCloudSchemaType.AVRO
|
|
311
|
+
r"""The schema format used to encode and decode event data"""
|
|
312
|
+
|
|
321
313
|
connection_timeout: Annotated[
|
|
322
314
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
323
315
|
] = 30000
|
|
@@ -337,7 +329,7 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
337
329
|
tls: Optional[InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
|
|
338
330
|
|
|
339
331
|
|
|
340
|
-
class InputConfluentCloudSASLMechanism(str, Enum
|
|
332
|
+
class InputConfluentCloudSASLMechanism(str, Enum):
|
|
341
333
|
PLAIN = "plain"
|
|
342
334
|
SCRAM_SHA_256 = "scram-sha-256"
|
|
343
335
|
SCRAM_SHA_512 = "scram-sha-512"
|
|
@@ -358,10 +350,9 @@ class InputConfluentCloudAuthentication(BaseModel):
|
|
|
358
350
|
|
|
359
351
|
disabled: Optional[bool] = True
|
|
360
352
|
|
|
361
|
-
mechanism:
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
] = InputConfluentCloudSASLMechanism.PLAIN
|
|
353
|
+
mechanism: Optional[InputConfluentCloudSASLMechanism] = (
|
|
354
|
+
InputConfluentCloudSASLMechanism.PLAIN
|
|
355
|
+
)
|
|
365
356
|
|
|
366
357
|
oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
|
|
367
358
|
False
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputCriblConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputCriblMode(str, Enum
|
|
26
|
+
class InputCriblMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputCriblCompression(str, Enum
|
|
33
|
+
class InputCriblCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputCriblPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputCriblPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputCriblMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputCriblMode.ALWAYS
|
|
67
|
+
mode: Optional[InputCriblMode] = InputCriblMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputCriblPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputCriblCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputCriblCompression.NONE
|
|
91
|
+
compress: Optional[InputCriblCompression] = InputCriblCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Any, List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputCriblHTTPConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputCriblHTTPMode(str, Enum
|
|
26
|
+
class InputCriblHTTPMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputCriblHTTPCompression(str, Enum
|
|
33
|
+
class InputCriblHTTPCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputCriblHTTPPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputCriblHTTPPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputCriblHTTPMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputCriblHTTPMode.ALWAYS
|
|
67
|
+
mode: Optional[InputCriblHTTPMode] = InputCriblHTTPMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputCriblHTTPPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputCriblHTTPCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputCriblHTTPCompression.NONE
|
|
91
|
+
compress: Optional[InputCriblHTTPCompression] = InputCriblHTTPCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -103,14 +96,14 @@ class InputCriblHTTPPq(BaseModel):
|
|
|
103
96
|
] = None
|
|
104
97
|
|
|
105
98
|
|
|
106
|
-
class InputCriblHTTPMinimumTLSVersion(str, Enum
|
|
99
|
+
class InputCriblHTTPMinimumTLSVersion(str, Enum):
|
|
107
100
|
TL_SV1 = "TLSv1"
|
|
108
101
|
TL_SV1_1 = "TLSv1.1"
|
|
109
102
|
TL_SV1_2 = "TLSv1.2"
|
|
110
103
|
TL_SV1_3 = "TLSv1.3"
|
|
111
104
|
|
|
112
105
|
|
|
113
|
-
class InputCriblHTTPMaximumTLSVersion(str, Enum
|
|
106
|
+
class InputCriblHTTPMaximumTLSVersion(str, Enum):
|
|
114
107
|
TL_SV1 = "TLSv1"
|
|
115
108
|
TL_SV1_1 = "TLSv1.1"
|
|
116
109
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -169,19 +162,11 @@ class InputCriblHTTPTLSSettingsServerSide(BaseModel):
|
|
|
169
162
|
] = None
|
|
170
163
|
|
|
171
164
|
min_version: Annotated[
|
|
172
|
-
|
|
173
|
-
Optional[InputCriblHTTPMinimumTLSVersion],
|
|
174
|
-
PlainValidator(validate_open_enum(False)),
|
|
175
|
-
],
|
|
176
|
-
pydantic.Field(alias="minVersion"),
|
|
165
|
+
Optional[InputCriblHTTPMinimumTLSVersion], pydantic.Field(alias="minVersion")
|
|
177
166
|
] = None
|
|
178
167
|
|
|
179
168
|
max_version: Annotated[
|
|
180
|
-
|
|
181
|
-
Optional[InputCriblHTTPMaximumTLSVersion],
|
|
182
|
-
PlainValidator(validate_open_enum(False)),
|
|
183
|
-
],
|
|
184
|
-
pydantic.Field(alias="maxVersion"),
|
|
169
|
+
Optional[InputCriblHTTPMaximumTLSVersion], pydantic.Field(alias="maxVersion")
|
|
185
170
|
] = None
|
|
186
171
|
|
|
187
172
|
|