cribl-control-plane 0.0.49__py3-none-any.whl → 0.1.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +4 -6
- cribl_control_plane/errors/healthstatus_error.py +8 -2
- cribl_control_plane/health.py +6 -2
- cribl_control_plane/models/__init__.py +68 -30
- cribl_control_plane/models/cacheconnection.py +10 -2
- cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
- cribl_control_plane/models/cloudprovider.py +2 -1
- cribl_control_plane/models/configgroup.py +7 -2
- cribl_control_plane/models/configgroupcloud.py +6 -2
- cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
- cribl_control_plane/models/createinputhectokenbyidop.py +6 -5
- cribl_control_plane/models/createversionpushop.py +5 -5
- cribl_control_plane/models/cribllakedataset.py +8 -2
- cribl_control_plane/models/datasetmetadata.py +8 -2
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
- cribl_control_plane/models/error.py +16 -0
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
- cribl_control_plane/models/gethealthinfoop.py +17 -0
- cribl_control_plane/models/getsummaryop.py +7 -2
- cribl_control_plane/models/getversionshowop.py +6 -5
- cribl_control_plane/models/gitshowresult.py +19 -0
- cribl_control_plane/models/hbcriblinfo.py +24 -3
- cribl_control_plane/models/healthstatus.py +7 -4
- cribl_control_plane/models/heartbeatmetadata.py +3 -0
- cribl_control_plane/models/inputappscope.py +34 -14
- cribl_control_plane/models/inputazureblob.py +17 -6
- cribl_control_plane/models/inputcollection.py +11 -4
- cribl_control_plane/models/inputconfluentcloud.py +41 -32
- cribl_control_plane/models/inputcribl.py +11 -4
- cribl_control_plane/models/inputcriblhttp.py +23 -8
- cribl_control_plane/models/inputcribllakehttp.py +22 -10
- cribl_control_plane/models/inputcriblmetrics.py +12 -4
- cribl_control_plane/models/inputcribltcp.py +23 -8
- cribl_control_plane/models/inputcrowdstrike.py +26 -10
- cribl_control_plane/models/inputdatadogagent.py +24 -8
- cribl_control_plane/models/inputdatagen.py +11 -4
- cribl_control_plane/models/inputedgeprometheus.py +58 -24
- cribl_control_plane/models/inputelastic.py +40 -14
- cribl_control_plane/models/inputeventhub.py +15 -6
- cribl_control_plane/models/inputexec.py +14 -6
- cribl_control_plane/models/inputfile.py +15 -6
- cribl_control_plane/models/inputfirehose.py +23 -8
- cribl_control_plane/models/inputgooglepubsub.py +19 -6
- cribl_control_plane/models/inputgrafana.py +67 -24
- cribl_control_plane/models/inputhttp.py +23 -8
- cribl_control_plane/models/inputhttpraw.py +23 -8
- cribl_control_plane/models/inputjournalfiles.py +12 -4
- cribl_control_plane/models/inputkafka.py +41 -28
- cribl_control_plane/models/inputkinesis.py +38 -14
- cribl_control_plane/models/inputkubeevents.py +11 -4
- cribl_control_plane/models/inputkubelogs.py +16 -8
- cribl_control_plane/models/inputkubemetrics.py +16 -8
- cribl_control_plane/models/inputloki.py +29 -10
- cribl_control_plane/models/inputmetrics.py +23 -8
- cribl_control_plane/models/inputmodeldriventelemetry.py +32 -10
- cribl_control_plane/models/inputmsk.py +48 -30
- cribl_control_plane/models/inputnetflow.py +11 -4
- cribl_control_plane/models/inputoffice365mgmt.py +33 -14
- cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
- cribl_control_plane/models/inputoffice365service.py +35 -16
- cribl_control_plane/models/inputopentelemetry.py +38 -16
- cribl_control_plane/models/inputprometheus.py +50 -18
- cribl_control_plane/models/inputprometheusrw.py +30 -10
- cribl_control_plane/models/inputrawudp.py +11 -4
- cribl_control_plane/models/inputs3.py +21 -8
- cribl_control_plane/models/inputs3inventory.py +26 -10
- cribl_control_plane/models/inputsecuritylake.py +27 -10
- cribl_control_plane/models/inputsnmp.py +16 -6
- cribl_control_plane/models/inputsplunk.py +33 -12
- cribl_control_plane/models/inputsplunkhec.py +29 -10
- cribl_control_plane/models/inputsplunksearch.py +33 -14
- cribl_control_plane/models/inputsqs.py +27 -10
- cribl_control_plane/models/inputsyslog.py +43 -16
- cribl_control_plane/models/inputsystemmetrics.py +48 -24
- cribl_control_plane/models/inputsystemstate.py +16 -8
- cribl_control_plane/models/inputtcp.py +29 -10
- cribl_control_plane/models/inputtcpjson.py +29 -10
- cribl_control_plane/models/inputwef.py +37 -14
- cribl_control_plane/models/inputwindowsmetrics.py +44 -24
- cribl_control_plane/models/inputwineventlogs.py +20 -10
- cribl_control_plane/models/inputwiz.py +21 -8
- cribl_control_plane/models/inputwizwebhook.py +23 -8
- cribl_control_plane/models/inputzscalerhec.py +29 -10
- cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
- cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
- cribl_control_plane/models/masterworkerentry.py +7 -2
- cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
- cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeprovidedinfo.py +3 -0
- cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeupgradestate.py +2 -1
- cribl_control_plane/models/nodeupgradestatus.py +13 -5
- cribl_control_plane/models/output.py +3 -0
- cribl_control_plane/models/outputazureblob.py +48 -18
- cribl_control_plane/models/outputazuredataexplorer.py +73 -28
- cribl_control_plane/models/outputazureeventhub.py +40 -18
- cribl_control_plane/models/outputazurelogs.py +35 -12
- cribl_control_plane/models/outputclickhouse.py +55 -20
- cribl_control_plane/models/outputcloudwatch.py +29 -10
- cribl_control_plane/models/outputconfluentcloud.py +71 -44
- cribl_control_plane/models/outputcriblhttp.py +44 -16
- cribl_control_plane/models/outputcribllake.py +46 -16
- cribl_control_plane/models/outputcribltcp.py +45 -18
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +49 -14
- cribl_control_plane/models/outputdatabricks.py +439 -0
- cribl_control_plane/models/outputdatadog.py +48 -20
- cribl_control_plane/models/outputdataset.py +46 -18
- cribl_control_plane/models/outputdiskspool.py +7 -2
- cribl_control_plane/models/outputdls3.py +68 -24
- cribl_control_plane/models/outputdynatracehttp.py +53 -20
- cribl_control_plane/models/outputdynatraceotlp.py +55 -22
- cribl_control_plane/models/outputelastic.py +43 -18
- cribl_control_plane/models/outputelasticcloud.py +36 -12
- cribl_control_plane/models/outputexabeam.py +29 -10
- cribl_control_plane/models/outputfilesystem.py +39 -14
- cribl_control_plane/models/outputgooglechronicle.py +50 -16
- cribl_control_plane/models/outputgooglecloudlogging.py +41 -14
- cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
- cribl_control_plane/models/outputgooglepubsub.py +31 -10
- cribl_control_plane/models/outputgrafanacloud.py +97 -32
- cribl_control_plane/models/outputgraphite.py +31 -14
- cribl_control_plane/models/outputhoneycomb.py +35 -12
- cribl_control_plane/models/outputhumiohec.py +43 -16
- cribl_control_plane/models/outputinfluxdb.py +42 -16
- cribl_control_plane/models/outputkafka.py +69 -40
- cribl_control_plane/models/outputkinesis.py +40 -16
- cribl_control_plane/models/outputloki.py +41 -16
- cribl_control_plane/models/outputminio.py +65 -24
- cribl_control_plane/models/outputmsk.py +77 -42
- cribl_control_plane/models/outputnewrelic.py +43 -18
- cribl_control_plane/models/outputnewrelicevents.py +41 -14
- cribl_control_plane/models/outputopentelemetry.py +67 -26
- cribl_control_plane/models/outputprometheus.py +35 -12
- cribl_control_plane/models/outputring.py +19 -8
- cribl_control_plane/models/outputs3.py +68 -26
- cribl_control_plane/models/outputsecuritylake.py +52 -18
- cribl_control_plane/models/outputsentinel.py +45 -18
- cribl_control_plane/models/outputsentineloneaisiem.py +50 -18
- cribl_control_plane/models/outputservicenow.py +60 -24
- cribl_control_plane/models/outputsignalfx.py +37 -14
- cribl_control_plane/models/outputsns.py +36 -14
- cribl_control_plane/models/outputsplunk.py +60 -24
- cribl_control_plane/models/outputsplunkhec.py +35 -12
- cribl_control_plane/models/outputsplunklb.py +77 -30
- cribl_control_plane/models/outputsqs.py +41 -16
- cribl_control_plane/models/outputstatsd.py +30 -14
- cribl_control_plane/models/outputstatsdext.py +29 -12
- cribl_control_plane/models/outputsumologic.py +35 -12
- cribl_control_plane/models/outputsyslog.py +58 -24
- cribl_control_plane/models/outputtcpjson.py +52 -20
- cribl_control_plane/models/outputwavefront.py +35 -12
- cribl_control_plane/models/outputwebhook.py +58 -22
- cribl_control_plane/models/outputxsiam.py +35 -14
- cribl_control_plane/models/productscore.py +2 -1
- cribl_control_plane/models/rbacresource.py +2 -1
- cribl_control_plane/models/resourcepolicy.py +4 -2
- cribl_control_plane/models/routeconf.py +3 -4
- cribl_control_plane/models/runnablejobcollection.py +30 -13
- cribl_control_plane/models/runnablejobexecutor.py +13 -4
- cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
- cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +6 -5
- cribl_control_plane/models/workertypes.py +2 -1
- {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0b1.dist-info}/METADATA +1 -1
- cribl_control_plane-0.1.0b1.dist-info/RECORD +327 -0
- cribl_control_plane/models/appmode.py +0 -13
- cribl_control_plane/models/routecloneconf.py +0 -13
- cribl_control_plane-0.0.49.dist-info/RECORD +0 -325
- {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0b1.dist-info}/WHEEL +0 -0
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
10
|
from typing import List, Optional
|
|
8
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
12
|
|
|
@@ -23,14 +26,14 @@ class InputKubeLogsConnection(BaseModel):
|
|
|
23
26
|
pipeline: Optional[str] = None
|
|
24
27
|
|
|
25
28
|
|
|
26
|
-
class InputKubeLogsMode(str, Enum):
|
|
29
|
+
class InputKubeLogsMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
28
31
|
|
|
29
32
|
SMART = "smart"
|
|
30
33
|
ALWAYS = "always"
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
class InputKubeLogsPqCompression(str, Enum):
|
|
36
|
+
class InputKubeLogsPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
34
37
|
r"""Codec to use to compress the persisted data"""
|
|
35
38
|
|
|
36
39
|
NONE = "none"
|
|
@@ -64,7 +67,9 @@ class InputKubeLogsPqTypedDict(TypedDict):
|
|
|
64
67
|
|
|
65
68
|
|
|
66
69
|
class InputKubeLogsPq(BaseModel):
|
|
67
|
-
mode:
|
|
70
|
+
mode: Annotated[
|
|
71
|
+
Optional[InputKubeLogsMode], PlainValidator(validate_open_enum(False))
|
|
72
|
+
] = InputKubeLogsMode.ALWAYS
|
|
68
73
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
69
74
|
|
|
70
75
|
max_buffer_size: Annotated[
|
|
@@ -88,7 +93,9 @@ class InputKubeLogsPq(BaseModel):
|
|
|
88
93
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
89
94
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
90
95
|
|
|
91
|
-
compress:
|
|
96
|
+
compress: Annotated[
|
|
97
|
+
Optional[InputKubeLogsPqCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
+
] = InputKubeLogsPqCompression.NONE
|
|
92
99
|
r"""Codec to use to compress the persisted data"""
|
|
93
100
|
|
|
94
101
|
pq_controls: Annotated[
|
|
@@ -124,7 +131,7 @@ class InputKubeLogsMetadatum(BaseModel):
|
|
|
124
131
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
125
132
|
|
|
126
133
|
|
|
127
|
-
class InputKubeLogsPersistenceCompression(str, Enum):
|
|
134
|
+
class InputKubeLogsPersistenceCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
128
135
|
r"""Data compression format. Default is gzip."""
|
|
129
136
|
|
|
130
137
|
NONE = "none"
|
|
@@ -157,9 +164,10 @@ class InputKubeLogsDiskSpooling(BaseModel):
|
|
|
157
164
|
max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
|
|
158
165
|
r"""Maximum amount of time to retain data before older buckets are deleted. Examples: 2h, 4d. Default is 24h."""
|
|
159
166
|
|
|
160
|
-
compress:
|
|
161
|
-
InputKubeLogsPersistenceCompression
|
|
162
|
-
|
|
167
|
+
compress: Annotated[
|
|
168
|
+
Optional[InputKubeLogsPersistenceCompression],
|
|
169
|
+
PlainValidator(validate_open_enum(False)),
|
|
170
|
+
] = InputKubeLogsPersistenceCompression.GZIP
|
|
163
171
|
r"""Data compression format. Default is gzip."""
|
|
164
172
|
|
|
165
173
|
|
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
10
|
from typing import List, Optional
|
|
8
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
12
|
|
|
@@ -23,14 +26,14 @@ class InputKubeMetricsConnection(BaseModel):
|
|
|
23
26
|
pipeline: Optional[str] = None
|
|
24
27
|
|
|
25
28
|
|
|
26
|
-
class InputKubeMetricsMode(str, Enum):
|
|
29
|
+
class InputKubeMetricsMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
28
31
|
|
|
29
32
|
SMART = "smart"
|
|
30
33
|
ALWAYS = "always"
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
class InputKubeMetricsCompression(str, Enum):
|
|
36
|
+
class InputKubeMetricsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
34
37
|
r"""Codec to use to compress the persisted data"""
|
|
35
38
|
|
|
36
39
|
NONE = "none"
|
|
@@ -64,7 +67,9 @@ class InputKubeMetricsPqTypedDict(TypedDict):
|
|
|
64
67
|
|
|
65
68
|
|
|
66
69
|
class InputKubeMetricsPq(BaseModel):
|
|
67
|
-
mode:
|
|
70
|
+
mode: Annotated[
|
|
71
|
+
Optional[InputKubeMetricsMode], PlainValidator(validate_open_enum(False))
|
|
72
|
+
] = InputKubeMetricsMode.ALWAYS
|
|
68
73
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
69
74
|
|
|
70
75
|
max_buffer_size: Annotated[
|
|
@@ -88,7 +93,9 @@ class InputKubeMetricsPq(BaseModel):
|
|
|
88
93
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
89
94
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
90
95
|
|
|
91
|
-
compress:
|
|
96
|
+
compress: Annotated[
|
|
97
|
+
Optional[InputKubeMetricsCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
+
] = InputKubeMetricsCompression.NONE
|
|
92
99
|
r"""Codec to use to compress the persisted data"""
|
|
93
100
|
|
|
94
101
|
pq_controls: Annotated[
|
|
@@ -124,7 +131,7 @@ class InputKubeMetricsMetadatum(BaseModel):
|
|
|
124
131
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
125
132
|
|
|
126
133
|
|
|
127
|
-
class InputKubeMetricsDataCompressionFormat(str, Enum):
|
|
134
|
+
class InputKubeMetricsDataCompressionFormat(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
128
135
|
NONE = "none"
|
|
129
136
|
GZIP = "gzip"
|
|
130
137
|
|
|
@@ -156,9 +163,10 @@ class InputKubeMetricsPersistence(BaseModel):
|
|
|
156
163
|
max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
|
|
157
164
|
r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
|
|
158
165
|
|
|
159
|
-
compress:
|
|
160
|
-
InputKubeMetricsDataCompressionFormat
|
|
161
|
-
|
|
166
|
+
compress: Annotated[
|
|
167
|
+
Optional[InputKubeMetricsDataCompressionFormat],
|
|
168
|
+
PlainValidator(validate_open_enum(False)),
|
|
169
|
+
] = InputKubeMetricsDataCompressionFormat.GZIP
|
|
162
170
|
|
|
163
171
|
dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
|
|
164
172
|
"$CRIBL_HOME/state/kube_metrics"
|
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
10
|
from typing import Any, List, Optional
|
|
8
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
12
|
|
|
@@ -23,14 +26,14 @@ class InputLokiConnection(BaseModel):
|
|
|
23
26
|
pipeline: Optional[str] = None
|
|
24
27
|
|
|
25
28
|
|
|
26
|
-
class InputLokiMode(str, Enum):
|
|
29
|
+
class InputLokiMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
28
31
|
|
|
29
32
|
SMART = "smart"
|
|
30
33
|
ALWAYS = "always"
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
class InputLokiCompression(str, Enum):
|
|
36
|
+
class InputLokiCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
34
37
|
r"""Codec to use to compress the persisted data"""
|
|
35
38
|
|
|
36
39
|
NONE = "none"
|
|
@@ -64,7 +67,9 @@ class InputLokiPqTypedDict(TypedDict):
|
|
|
64
67
|
|
|
65
68
|
|
|
66
69
|
class InputLokiPq(BaseModel):
|
|
67
|
-
mode:
|
|
70
|
+
mode: Annotated[
|
|
71
|
+
Optional[InputLokiMode], PlainValidator(validate_open_enum(False))
|
|
72
|
+
] = InputLokiMode.ALWAYS
|
|
68
73
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
69
74
|
|
|
70
75
|
max_buffer_size: Annotated[
|
|
@@ -88,7 +93,9 @@ class InputLokiPq(BaseModel):
|
|
|
88
93
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
89
94
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
90
95
|
|
|
91
|
-
compress:
|
|
96
|
+
compress: Annotated[
|
|
97
|
+
Optional[InputLokiCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
+
] = InputLokiCompression.NONE
|
|
92
99
|
r"""Codec to use to compress the persisted data"""
|
|
93
100
|
|
|
94
101
|
pq_controls: Annotated[
|
|
@@ -96,14 +103,14 @@ class InputLokiPq(BaseModel):
|
|
|
96
103
|
] = None
|
|
97
104
|
|
|
98
105
|
|
|
99
|
-
class InputLokiMinimumTLSVersion(str, Enum):
|
|
106
|
+
class InputLokiMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
100
107
|
TL_SV1 = "TLSv1"
|
|
101
108
|
TL_SV1_1 = "TLSv1.1"
|
|
102
109
|
TL_SV1_2 = "TLSv1.2"
|
|
103
110
|
TL_SV1_3 = "TLSv1.3"
|
|
104
111
|
|
|
105
112
|
|
|
106
|
-
class InputLokiMaximumTLSVersion(str, Enum):
|
|
113
|
+
class InputLokiMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
107
114
|
TL_SV1 = "TLSv1"
|
|
108
115
|
TL_SV1_1 = "TLSv1.1"
|
|
109
116
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -162,15 +169,23 @@ class InputLokiTLSSettingsServerSide(BaseModel):
|
|
|
162
169
|
] = None
|
|
163
170
|
|
|
164
171
|
min_version: Annotated[
|
|
165
|
-
|
|
172
|
+
Annotated[
|
|
173
|
+
Optional[InputLokiMinimumTLSVersion],
|
|
174
|
+
PlainValidator(validate_open_enum(False)),
|
|
175
|
+
],
|
|
176
|
+
pydantic.Field(alias="minVersion"),
|
|
166
177
|
] = None
|
|
167
178
|
|
|
168
179
|
max_version: Annotated[
|
|
169
|
-
|
|
180
|
+
Annotated[
|
|
181
|
+
Optional[InputLokiMaximumTLSVersion],
|
|
182
|
+
PlainValidator(validate_open_enum(False)),
|
|
183
|
+
],
|
|
184
|
+
pydantic.Field(alias="maxVersion"),
|
|
170
185
|
] = None
|
|
171
186
|
|
|
172
187
|
|
|
173
|
-
class InputLokiAuthenticationType(str, Enum):
|
|
188
|
+
class InputLokiAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
174
189
|
r"""Loki logs authentication type"""
|
|
175
190
|
|
|
176
191
|
NONE = "none"
|
|
@@ -401,7 +416,11 @@ class InputLoki(BaseModel):
|
|
|
401
416
|
r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'."""
|
|
402
417
|
|
|
403
418
|
auth_type: Annotated[
|
|
404
|
-
|
|
419
|
+
Annotated[
|
|
420
|
+
Optional[InputLokiAuthenticationType],
|
|
421
|
+
PlainValidator(validate_open_enum(False)),
|
|
422
|
+
],
|
|
423
|
+
pydantic.Field(alias="authType"),
|
|
405
424
|
] = InputLokiAuthenticationType.NONE
|
|
406
425
|
r"""Loki logs authentication type"""
|
|
407
426
|
|
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
10
|
from typing import Any, List, Optional
|
|
8
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
12
|
|
|
@@ -23,14 +26,14 @@ class InputMetricsConnection(BaseModel):
|
|
|
23
26
|
pipeline: Optional[str] = None
|
|
24
27
|
|
|
25
28
|
|
|
26
|
-
class InputMetricsMode(str, Enum):
|
|
29
|
+
class InputMetricsMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
28
31
|
|
|
29
32
|
SMART = "smart"
|
|
30
33
|
ALWAYS = "always"
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
class InputMetricsCompression(str, Enum):
|
|
36
|
+
class InputMetricsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
34
37
|
r"""Codec to use to compress the persisted data"""
|
|
35
38
|
|
|
36
39
|
NONE = "none"
|
|
@@ -64,7 +67,9 @@ class InputMetricsPqTypedDict(TypedDict):
|
|
|
64
67
|
|
|
65
68
|
|
|
66
69
|
class InputMetricsPq(BaseModel):
|
|
67
|
-
mode:
|
|
70
|
+
mode: Annotated[
|
|
71
|
+
Optional[InputMetricsMode], PlainValidator(validate_open_enum(False))
|
|
72
|
+
] = InputMetricsMode.ALWAYS
|
|
68
73
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
69
74
|
|
|
70
75
|
max_buffer_size: Annotated[
|
|
@@ -88,7 +93,9 @@ class InputMetricsPq(BaseModel):
|
|
|
88
93
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
89
94
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
90
95
|
|
|
91
|
-
compress:
|
|
96
|
+
compress: Annotated[
|
|
97
|
+
Optional[InputMetricsCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
+
] = InputMetricsCompression.NONE
|
|
92
99
|
r"""Codec to use to compress the persisted data"""
|
|
93
100
|
|
|
94
101
|
pq_controls: Annotated[
|
|
@@ -96,14 +103,14 @@ class InputMetricsPq(BaseModel):
|
|
|
96
103
|
] = None
|
|
97
104
|
|
|
98
105
|
|
|
99
|
-
class InputMetricsMinimumTLSVersion(str, Enum):
|
|
106
|
+
class InputMetricsMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
100
107
|
TL_SV1 = "TLSv1"
|
|
101
108
|
TL_SV1_1 = "TLSv1.1"
|
|
102
109
|
TL_SV1_2 = "TLSv1.2"
|
|
103
110
|
TL_SV1_3 = "TLSv1.3"
|
|
104
111
|
|
|
105
112
|
|
|
106
|
-
class InputMetricsMaximumTLSVersion(str, Enum):
|
|
113
|
+
class InputMetricsMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
107
114
|
TL_SV1 = "TLSv1"
|
|
108
115
|
TL_SV1_1 = "TLSv1.1"
|
|
109
116
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -162,11 +169,19 @@ class InputMetricsTLSSettingsServerSide(BaseModel):
|
|
|
162
169
|
] = None
|
|
163
170
|
|
|
164
171
|
min_version: Annotated[
|
|
165
|
-
|
|
172
|
+
Annotated[
|
|
173
|
+
Optional[InputMetricsMinimumTLSVersion],
|
|
174
|
+
PlainValidator(validate_open_enum(False)),
|
|
175
|
+
],
|
|
176
|
+
pydantic.Field(alias="minVersion"),
|
|
166
177
|
] = None
|
|
167
178
|
|
|
168
179
|
max_version: Annotated[
|
|
169
|
-
|
|
180
|
+
Annotated[
|
|
181
|
+
Optional[InputMetricsMaximumTLSVersion],
|
|
182
|
+
PlainValidator(validate_open_enum(False)),
|
|
183
|
+
],
|
|
184
|
+
pydantic.Field(alias="maxVersion"),
|
|
170
185
|
] = None
|
|
171
186
|
|
|
172
187
|
|
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
10
|
from typing import Any, List, Optional
|
|
8
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
12
|
|
|
@@ -23,14 +26,14 @@ class InputModelDrivenTelemetryConnection(BaseModel):
|
|
|
23
26
|
pipeline: Optional[str] = None
|
|
24
27
|
|
|
25
28
|
|
|
26
|
-
class InputModelDrivenTelemetryMode(str, Enum):
|
|
29
|
+
class InputModelDrivenTelemetryMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
28
31
|
|
|
29
32
|
SMART = "smart"
|
|
30
33
|
ALWAYS = "always"
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
class InputModelDrivenTelemetryCompression(str, Enum):
|
|
36
|
+
class InputModelDrivenTelemetryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
34
37
|
r"""Codec to use to compress the persisted data"""
|
|
35
38
|
|
|
36
39
|
NONE = "none"
|
|
@@ -64,7 +67,10 @@ class InputModelDrivenTelemetryPqTypedDict(TypedDict):
|
|
|
64
67
|
|
|
65
68
|
|
|
66
69
|
class InputModelDrivenTelemetryPq(BaseModel):
|
|
67
|
-
mode:
|
|
70
|
+
mode: Annotated[
|
|
71
|
+
Optional[InputModelDrivenTelemetryMode],
|
|
72
|
+
PlainValidator(validate_open_enum(False)),
|
|
73
|
+
] = InputModelDrivenTelemetryMode.ALWAYS
|
|
68
74
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
69
75
|
|
|
70
76
|
max_buffer_size: Annotated[
|
|
@@ -88,9 +94,10 @@ class InputModelDrivenTelemetryPq(BaseModel):
|
|
|
88
94
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
89
95
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
90
96
|
|
|
91
|
-
compress:
|
|
92
|
-
InputModelDrivenTelemetryCompression
|
|
93
|
-
|
|
97
|
+
compress: Annotated[
|
|
98
|
+
Optional[InputModelDrivenTelemetryCompression],
|
|
99
|
+
PlainValidator(validate_open_enum(False)),
|
|
100
|
+
] = InputModelDrivenTelemetryCompression.NONE
|
|
94
101
|
r"""Codec to use to compress the persisted data"""
|
|
95
102
|
|
|
96
103
|
pq_controls: Annotated[
|
|
@@ -99,14 +106,18 @@ class InputModelDrivenTelemetryPq(BaseModel):
|
|
|
99
106
|
] = None
|
|
100
107
|
|
|
101
108
|
|
|
102
|
-
class InputModelDrivenTelemetryMinimumTLSVersion(
|
|
109
|
+
class InputModelDrivenTelemetryMinimumTLSVersion(
|
|
110
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
111
|
+
):
|
|
103
112
|
TL_SV1 = "TLSv1"
|
|
104
113
|
TL_SV1_1 = "TLSv1.1"
|
|
105
114
|
TL_SV1_2 = "TLSv1.2"
|
|
106
115
|
TL_SV1_3 = "TLSv1.3"
|
|
107
116
|
|
|
108
117
|
|
|
109
|
-
class InputModelDrivenTelemetryMaximumTLSVersion(
|
|
118
|
+
class InputModelDrivenTelemetryMaximumTLSVersion(
|
|
119
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
120
|
+
):
|
|
110
121
|
TL_SV1 = "TLSv1"
|
|
111
122
|
TL_SV1_1 = "TLSv1.1"
|
|
112
123
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -119,6 +130,8 @@ class InputModelDrivenTelemetryTLSSettingsServerSideTypedDict(TypedDict):
|
|
|
119
130
|
r"""The name of the predefined certificate"""
|
|
120
131
|
priv_key_path: NotRequired[str]
|
|
121
132
|
r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
|
|
133
|
+
passphrase: NotRequired[str]
|
|
134
|
+
r"""Passphrase to use to decrypt private key"""
|
|
122
135
|
cert_path: NotRequired[str]
|
|
123
136
|
r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
124
137
|
ca_path: NotRequired[str]
|
|
@@ -142,6 +155,9 @@ class InputModelDrivenTelemetryTLSSettingsServerSide(BaseModel):
|
|
|
142
155
|
priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
|
|
143
156
|
r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
|
|
144
157
|
|
|
158
|
+
passphrase: Optional[str] = None
|
|
159
|
+
r"""Passphrase to use to decrypt private key"""
|
|
160
|
+
|
|
145
161
|
cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
|
|
146
162
|
r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
147
163
|
|
|
@@ -160,12 +176,18 @@ class InputModelDrivenTelemetryTLSSettingsServerSide(BaseModel):
|
|
|
160
176
|
] = None
|
|
161
177
|
|
|
162
178
|
min_version: Annotated[
|
|
163
|
-
|
|
179
|
+
Annotated[
|
|
180
|
+
Optional[InputModelDrivenTelemetryMinimumTLSVersion],
|
|
181
|
+
PlainValidator(validate_open_enum(False)),
|
|
182
|
+
],
|
|
164
183
|
pydantic.Field(alias="minVersion"),
|
|
165
184
|
] = None
|
|
166
185
|
|
|
167
186
|
max_version: Annotated[
|
|
168
|
-
|
|
187
|
+
Annotated[
|
|
188
|
+
Optional[InputModelDrivenTelemetryMaximumTLSVersion],
|
|
189
|
+
PlainValidator(validate_open_enum(False)),
|
|
190
|
+
],
|
|
169
191
|
pydantic.Field(alias="maxVersion"),
|
|
170
192
|
] = None
|
|
171
193
|
|
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
10
|
from typing import List, Optional
|
|
8
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
12
|
|
|
@@ -23,14 +26,14 @@ class InputMskConnection(BaseModel):
|
|
|
23
26
|
pipeline: Optional[str] = None
|
|
24
27
|
|
|
25
28
|
|
|
26
|
-
class InputMskMode(str, Enum):
|
|
29
|
+
class InputMskMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
30
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
28
31
|
|
|
29
32
|
SMART = "smart"
|
|
30
33
|
ALWAYS = "always"
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
class InputMskCompression(str, Enum):
|
|
36
|
+
class InputMskCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
34
37
|
r"""Codec to use to compress the persisted data"""
|
|
35
38
|
|
|
36
39
|
NONE = "none"
|
|
@@ -64,7 +67,9 @@ class InputMskPqTypedDict(TypedDict):
|
|
|
64
67
|
|
|
65
68
|
|
|
66
69
|
class InputMskPq(BaseModel):
|
|
67
|
-
mode:
|
|
70
|
+
mode: Annotated[
|
|
71
|
+
Optional[InputMskMode], PlainValidator(validate_open_enum(False))
|
|
72
|
+
] = InputMskMode.ALWAYS
|
|
68
73
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
69
74
|
|
|
70
75
|
max_buffer_size: Annotated[
|
|
@@ -88,7 +93,9 @@ class InputMskPq(BaseModel):
|
|
|
88
93
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
89
94
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
90
95
|
|
|
91
|
-
compress:
|
|
96
|
+
compress: Annotated[
|
|
97
|
+
Optional[InputMskCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
+
] = InputMskCompression.NONE
|
|
92
99
|
r"""Codec to use to compress the persisted data"""
|
|
93
100
|
|
|
94
101
|
pq_controls: Annotated[
|
|
@@ -109,13 +116,6 @@ class InputMskMetadatum(BaseModel):
|
|
|
109
116
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
110
117
|
|
|
111
118
|
|
|
112
|
-
class InputMskSchemaType(str, Enum):
|
|
113
|
-
r"""The schema format used to encode and decode event data"""
|
|
114
|
-
|
|
115
|
-
AVRO = "avro"
|
|
116
|
-
JSON = "json"
|
|
117
|
-
|
|
118
|
-
|
|
119
119
|
class InputMskAuthTypedDict(TypedDict):
|
|
120
120
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
121
121
|
|
|
@@ -135,14 +135,18 @@ class InputMskAuth(BaseModel):
|
|
|
135
135
|
r"""Select or create a secret that references your credentials"""
|
|
136
136
|
|
|
137
137
|
|
|
138
|
-
class InputMskKafkaSchemaRegistryMinimumTLSVersion(
|
|
138
|
+
class InputMskKafkaSchemaRegistryMinimumTLSVersion(
|
|
139
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
140
|
+
):
|
|
139
141
|
TL_SV1 = "TLSv1"
|
|
140
142
|
TL_SV1_1 = "TLSv1.1"
|
|
141
143
|
TL_SV1_2 = "TLSv1.2"
|
|
142
144
|
TL_SV1_3 = "TLSv1.3"
|
|
143
145
|
|
|
144
146
|
|
|
145
|
-
class InputMskKafkaSchemaRegistryMaximumTLSVersion(
|
|
147
|
+
class InputMskKafkaSchemaRegistryMaximumTLSVersion(
|
|
148
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
149
|
+
):
|
|
146
150
|
TL_SV1 = "TLSv1"
|
|
147
151
|
TL_SV1_1 = "TLSv1.1"
|
|
148
152
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -202,12 +206,18 @@ class InputMskKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
|
|
|
202
206
|
r"""Passphrase to use to decrypt private key"""
|
|
203
207
|
|
|
204
208
|
min_version: Annotated[
|
|
205
|
-
|
|
209
|
+
Annotated[
|
|
210
|
+
Optional[InputMskKafkaSchemaRegistryMinimumTLSVersion],
|
|
211
|
+
PlainValidator(validate_open_enum(False)),
|
|
212
|
+
],
|
|
206
213
|
pydantic.Field(alias="minVersion"),
|
|
207
214
|
] = None
|
|
208
215
|
|
|
209
216
|
max_version: Annotated[
|
|
210
|
-
|
|
217
|
+
Annotated[
|
|
218
|
+
Optional[InputMskKafkaSchemaRegistryMaximumTLSVersion],
|
|
219
|
+
PlainValidator(validate_open_enum(False)),
|
|
220
|
+
],
|
|
211
221
|
pydantic.Field(alias="maxVersion"),
|
|
212
222
|
] = None
|
|
213
223
|
|
|
@@ -216,8 +226,6 @@ class InputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
216
226
|
disabled: NotRequired[bool]
|
|
217
227
|
schema_registry_url: NotRequired[str]
|
|
218
228
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
219
|
-
schema_type: NotRequired[InputMskSchemaType]
|
|
220
|
-
r"""The schema format used to encode and decode event data"""
|
|
221
229
|
connection_timeout: NotRequired[float]
|
|
222
230
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
223
231
|
request_timeout: NotRequired[float]
|
|
@@ -237,11 +245,6 @@ class InputMskKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
237
245
|
] = "http://localhost:8081"
|
|
238
246
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
239
247
|
|
|
240
|
-
schema_type: Annotated[
|
|
241
|
-
Optional[InputMskSchemaType], pydantic.Field(alias="schemaType")
|
|
242
|
-
] = InputMskSchemaType.AVRO
|
|
243
|
-
r"""The schema format used to encode and decode event data"""
|
|
244
|
-
|
|
245
248
|
connection_timeout: Annotated[
|
|
246
249
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
247
250
|
] = 30000
|
|
@@ -261,7 +264,7 @@ class InputMskKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
261
264
|
tls: Optional[InputMskKafkaSchemaRegistryTLSSettingsClientSide] = None
|
|
262
265
|
|
|
263
266
|
|
|
264
|
-
class InputMskAuthenticationMethod(str, Enum):
|
|
267
|
+
class InputMskAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
265
268
|
r"""AWS authentication method. Choose Auto to use IAM roles."""
|
|
266
269
|
|
|
267
270
|
AUTO = "auto"
|
|
@@ -269,21 +272,21 @@ class InputMskAuthenticationMethod(str, Enum):
|
|
|
269
272
|
SECRET = "secret"
|
|
270
273
|
|
|
271
274
|
|
|
272
|
-
class InputMskSignatureVersion(str, Enum):
|
|
275
|
+
class InputMskSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
273
276
|
r"""Signature version to use for signing MSK cluster requests"""
|
|
274
277
|
|
|
275
278
|
V2 = "v2"
|
|
276
279
|
V4 = "v4"
|
|
277
280
|
|
|
278
281
|
|
|
279
|
-
class InputMskMinimumTLSVersion(str, Enum):
|
|
282
|
+
class InputMskMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
280
283
|
TL_SV1 = "TLSv1"
|
|
281
284
|
TL_SV1_1 = "TLSv1.1"
|
|
282
285
|
TL_SV1_2 = "TLSv1.2"
|
|
283
286
|
TL_SV1_3 = "TLSv1.3"
|
|
284
287
|
|
|
285
288
|
|
|
286
|
-
class InputMskMaximumTLSVersion(str, Enum):
|
|
289
|
+
class InputMskMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
287
290
|
TL_SV1 = "TLSv1"
|
|
288
291
|
TL_SV1_1 = "TLSv1.1"
|
|
289
292
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -343,11 +346,19 @@ class InputMskTLSSettingsClientSide(BaseModel):
|
|
|
343
346
|
r"""Passphrase to use to decrypt private key"""
|
|
344
347
|
|
|
345
348
|
min_version: Annotated[
|
|
346
|
-
|
|
349
|
+
Annotated[
|
|
350
|
+
Optional[InputMskMinimumTLSVersion],
|
|
351
|
+
PlainValidator(validate_open_enum(False)),
|
|
352
|
+
],
|
|
353
|
+
pydantic.Field(alias="minVersion"),
|
|
347
354
|
] = None
|
|
348
355
|
|
|
349
356
|
max_version: Annotated[
|
|
350
|
-
|
|
357
|
+
Annotated[
|
|
358
|
+
Optional[InputMskMaximumTLSVersion],
|
|
359
|
+
PlainValidator(validate_open_enum(False)),
|
|
360
|
+
],
|
|
361
|
+
pydantic.Field(alias="maxVersion"),
|
|
351
362
|
] = None
|
|
352
363
|
|
|
353
364
|
|
|
@@ -569,7 +580,10 @@ class InputMsk(BaseModel):
|
|
|
569
580
|
r"""Specifies a time window during which @{product} can reauthenticate if needed. Creates the window measuring backward from the moment when credentials are set to expire."""
|
|
570
581
|
|
|
571
582
|
aws_authentication_method: Annotated[
|
|
572
|
-
|
|
583
|
+
Annotated[
|
|
584
|
+
Optional[InputMskAuthenticationMethod],
|
|
585
|
+
PlainValidator(validate_open_enum(False)),
|
|
586
|
+
],
|
|
573
587
|
pydantic.Field(alias="awsAuthenticationMethod"),
|
|
574
588
|
] = InputMskAuthenticationMethod.AUTO
|
|
575
589
|
r"""AWS authentication method. Choose Auto to use IAM roles."""
|
|
@@ -582,7 +596,11 @@ class InputMsk(BaseModel):
|
|
|
582
596
|
r"""MSK cluster service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to MSK cluster-compatible endpoint."""
|
|
583
597
|
|
|
584
598
|
signature_version: Annotated[
|
|
585
|
-
|
|
599
|
+
Annotated[
|
|
600
|
+
Optional[InputMskSignatureVersion],
|
|
601
|
+
PlainValidator(validate_open_enum(False)),
|
|
602
|
+
],
|
|
603
|
+
pydantic.Field(alias="signatureVersion"),
|
|
586
604
|
] = InputMskSignatureVersion.V4
|
|
587
605
|
r"""Signature version to use for signing MSK cluster requests"""
|
|
588
606
|
|