cribl-control-plane 0.0.50rc2__py3-none-any.whl → 0.0.51__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +5 -3
- cribl_control_plane/errors/healthstatus_error.py +2 -8
- cribl_control_plane/groups_sdk.py +4 -4
- cribl_control_plane/health.py +2 -6
- cribl_control_plane/models/__init__.py +31 -56
- cribl_control_plane/models/appmode.py +13 -0
- cribl_control_plane/models/cacheconnection.py +2 -10
- cribl_control_plane/models/cacheconnectionbackfillstatus.py +1 -2
- cribl_control_plane/models/cloudprovider.py +1 -2
- cribl_control_plane/models/configgroup.py +4 -24
- cribl_control_plane/models/configgroupcloud.py +2 -6
- cribl_control_plane/models/createconfiggroupbyproductop.py +2 -8
- cribl_control_plane/models/createinputhectokenbyidop.py +5 -6
- cribl_control_plane/models/createversionpushop.py +5 -5
- cribl_control_plane/models/cribllakedataset.py +2 -8
- cribl_control_plane/models/datasetmetadata.py +2 -8
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +2 -7
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +2 -4
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +2 -4
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +1 -3
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +2 -7
- cribl_control_plane/models/getsummaryop.py +2 -7
- cribl_control_plane/models/getversionshowop.py +5 -6
- cribl_control_plane/models/gitinfo.py +3 -14
- cribl_control_plane/models/hbcriblinfo.py +3 -24
- cribl_control_plane/models/healthstatus.py +4 -7
- cribl_control_plane/models/heartbeatmetadata.py +0 -3
- cribl_control_plane/models/input.py +63 -65
- cribl_control_plane/models/inputappscope.py +14 -34
- cribl_control_plane/models/inputazureblob.py +6 -17
- cribl_control_plane/models/inputcollection.py +4 -11
- cribl_control_plane/models/inputconfluentcloud.py +32 -41
- cribl_control_plane/models/inputcribl.py +4 -11
- cribl_control_plane/models/inputcriblhttp.py +8 -23
- cribl_control_plane/models/inputcribllakehttp.py +10 -22
- cribl_control_plane/models/inputcriblmetrics.py +4 -12
- cribl_control_plane/models/inputcribltcp.py +8 -23
- cribl_control_plane/models/inputcrowdstrike.py +10 -26
- cribl_control_plane/models/inputdatadogagent.py +8 -24
- cribl_control_plane/models/inputdatagen.py +4 -11
- cribl_control_plane/models/inputedgeprometheus.py +24 -58
- cribl_control_plane/models/inputelastic.py +14 -40
- cribl_control_plane/models/inputeventhub.py +6 -15
- cribl_control_plane/models/inputexec.py +6 -14
- cribl_control_plane/models/inputfile.py +6 -15
- cribl_control_plane/models/inputfirehose.py +8 -23
- cribl_control_plane/models/inputgooglepubsub.py +6 -19
- cribl_control_plane/models/inputgrafana.py +24 -67
- cribl_control_plane/models/inputhttp.py +8 -23
- cribl_control_plane/models/inputhttpraw.py +8 -23
- cribl_control_plane/models/inputjournalfiles.py +4 -12
- cribl_control_plane/models/inputkafka.py +28 -41
- cribl_control_plane/models/inputkinesis.py +14 -38
- cribl_control_plane/models/inputkubeevents.py +4 -11
- cribl_control_plane/models/inputkubelogs.py +8 -16
- cribl_control_plane/models/inputkubemetrics.py +8 -16
- cribl_control_plane/models/inputloki.py +10 -29
- cribl_control_plane/models/inputmetrics.py +8 -23
- cribl_control_plane/models/inputmodeldriventelemetry.py +10 -32
- cribl_control_plane/models/inputmsk.py +30 -48
- cribl_control_plane/models/inputnetflow.py +4 -11
- cribl_control_plane/models/inputoffice365mgmt.py +14 -33
- cribl_control_plane/models/inputoffice365msgtrace.py +16 -35
- cribl_control_plane/models/inputoffice365service.py +16 -35
- cribl_control_plane/models/inputopentelemetry.py +16 -38
- cribl_control_plane/models/inputprometheus.py +18 -50
- cribl_control_plane/models/inputprometheusrw.py +10 -30
- cribl_control_plane/models/inputrawudp.py +4 -11
- cribl_control_plane/models/inputs3.py +8 -21
- cribl_control_plane/models/inputs3inventory.py +10 -26
- cribl_control_plane/models/inputsecuritylake.py +10 -27
- cribl_control_plane/models/inputsnmp.py +6 -16
- cribl_control_plane/models/inputsplunk.py +12 -33
- cribl_control_plane/models/inputsplunkhec.py +10 -29
- cribl_control_plane/models/inputsplunksearch.py +14 -33
- cribl_control_plane/models/inputsqs.py +10 -27
- cribl_control_plane/models/inputsyslog.py +16 -43
- cribl_control_plane/models/inputsystemmetrics.py +24 -48
- cribl_control_plane/models/inputsystemstate.py +8 -16
- cribl_control_plane/models/inputtcp.py +10 -29
- cribl_control_plane/models/inputtcpjson.py +10 -29
- cribl_control_plane/models/inputwef.py +14 -37
- cribl_control_plane/models/inputwindowsmetrics.py +24 -44
- cribl_control_plane/models/inputwineventlogs.py +10 -20
- cribl_control_plane/models/inputwiz.py +8 -21
- cribl_control_plane/models/inputwizwebhook.py +8 -23
- cribl_control_plane/models/inputzscalerhec.py +10 -29
- cribl_control_plane/models/lakehouseconnectiontype.py +1 -2
- cribl_control_plane/models/listconfiggroupbyproductop.py +1 -3
- cribl_control_plane/models/masterworkerentry.py +2 -7
- cribl_control_plane/models/nodeactiveupgradestatus.py +1 -2
- cribl_control_plane/models/nodefailedupgradestatus.py +1 -2
- cribl_control_plane/models/nodeprovidedinfo.py +0 -3
- cribl_control_plane/models/nodeskippedupgradestatus.py +1 -2
- cribl_control_plane/models/nodeupgradestate.py +1 -2
- cribl_control_plane/models/nodeupgradestatus.py +5 -13
- cribl_control_plane/models/output.py +79 -84
- cribl_control_plane/models/outputazureblob.py +18 -48
- cribl_control_plane/models/outputazuredataexplorer.py +28 -73
- cribl_control_plane/models/outputazureeventhub.py +18 -40
- cribl_control_plane/models/outputazurelogs.py +12 -35
- cribl_control_plane/models/outputclickhouse.py +20 -55
- cribl_control_plane/models/outputcloudwatch.py +10 -29
- cribl_control_plane/models/outputconfluentcloud.py +44 -71
- cribl_control_plane/models/outputcriblhttp.py +16 -44
- cribl_control_plane/models/outputcribllake.py +16 -46
- cribl_control_plane/models/outputcribltcp.py +18 -45
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +14 -49
- cribl_control_plane/models/outputdatadog.py +20 -48
- cribl_control_plane/models/outputdataset.py +18 -46
- cribl_control_plane/models/outputdiskspool.py +2 -7
- cribl_control_plane/models/outputdls3.py +24 -68
- cribl_control_plane/models/outputdynatracehttp.py +20 -53
- cribl_control_plane/models/outputdynatraceotlp.py +22 -55
- cribl_control_plane/models/outputelastic.py +18 -43
- cribl_control_plane/models/outputelasticcloud.py +12 -36
- cribl_control_plane/models/outputexabeam.py +10 -29
- cribl_control_plane/models/outputfilesystem.py +14 -39
- cribl_control_plane/models/outputgooglechronicle.py +16 -50
- cribl_control_plane/models/outputgooglecloudlogging.py +18 -50
- cribl_control_plane/models/outputgooglecloudstorage.py +24 -66
- cribl_control_plane/models/outputgooglepubsub.py +10 -31
- cribl_control_plane/models/outputgrafanacloud.py +32 -97
- cribl_control_plane/models/outputgraphite.py +14 -31
- cribl_control_plane/models/outputhoneycomb.py +12 -35
- cribl_control_plane/models/outputhumiohec.py +16 -43
- cribl_control_plane/models/outputinfluxdb.py +16 -42
- cribl_control_plane/models/outputkafka.py +40 -69
- cribl_control_plane/models/outputkinesis.py +16 -40
- cribl_control_plane/models/outputloki.py +16 -41
- cribl_control_plane/models/outputminio.py +24 -65
- cribl_control_plane/models/outputmsk.py +42 -77
- cribl_control_plane/models/outputnewrelic.py +18 -43
- cribl_control_plane/models/outputnewrelicevents.py +14 -41
- cribl_control_plane/models/outputopentelemetry.py +26 -67
- cribl_control_plane/models/outputprometheus.py +12 -35
- cribl_control_plane/models/outputring.py +8 -19
- cribl_control_plane/models/outputs3.py +26 -68
- cribl_control_plane/models/outputsecuritylake.py +18 -52
- cribl_control_plane/models/outputsentinel.py +18 -45
- cribl_control_plane/models/outputsentineloneaisiem.py +18 -50
- cribl_control_plane/models/outputservicenow.py +24 -60
- cribl_control_plane/models/outputsignalfx.py +14 -37
- cribl_control_plane/models/outputsns.py +14 -36
- cribl_control_plane/models/outputsplunk.py +24 -60
- cribl_control_plane/models/outputsplunkhec.py +12 -35
- cribl_control_plane/models/outputsplunklb.py +30 -77
- cribl_control_plane/models/outputsqs.py +16 -41
- cribl_control_plane/models/outputstatsd.py +14 -30
- cribl_control_plane/models/outputstatsdext.py +12 -29
- cribl_control_plane/models/outputsumologic.py +12 -35
- cribl_control_plane/models/outputsyslog.py +24 -58
- cribl_control_plane/models/outputtcpjson.py +20 -52
- cribl_control_plane/models/outputwavefront.py +12 -35
- cribl_control_plane/models/outputwebhook.py +22 -58
- cribl_control_plane/models/outputxsiam.py +14 -35
- cribl_control_plane/models/productscore.py +1 -2
- cribl_control_plane/models/rbacresource.py +1 -2
- cribl_control_plane/models/resourcepolicy.py +2 -4
- cribl_control_plane/models/routecloneconf.py +13 -0
- cribl_control_plane/models/routeconf.py +4 -3
- cribl_control_plane/models/runnablejobcollection.py +13 -30
- cribl_control_plane/models/runnablejobexecutor.py +4 -13
- cribl_control_plane/models/runnablejobscheduledsearch.py +2 -7
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +2 -8
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +2 -8
- cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +5 -6
- cribl_control_plane/models/workertypes.py +1 -2
- {cribl_control_plane-0.0.50rc2.dist-info → cribl_control_plane-0.0.51.dist-info}/METADATA +14 -5
- cribl_control_plane-0.0.51.dist-info/RECORD +325 -0
- cribl_control_plane/models/error.py +0 -16
- cribl_control_plane/models/gethealthinfoop.py +0 -17
- cribl_control_plane/models/gitshowresult.py +0 -19
- cribl_control_plane/models/outputdatabricks.py +0 -282
- cribl_control_plane-0.0.50rc2.dist-info/RECORD +0 -327
- {cribl_control_plane-0.0.50rc2.dist-info → cribl_control_plane-0.0.51.dist-info}/WHEEL +0 -0
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputEdgePrometheusConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputEdgePrometheusMode(str, Enum
|
|
26
|
+
class InputEdgePrometheusMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputEdgePrometheusPqCompression(str, Enum
|
|
33
|
+
class InputEdgePrometheusPqCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputEdgePrometheusPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputEdgePrometheusPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputEdgePrometheusMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputEdgePrometheusMode.ALWAYS
|
|
67
|
+
mode: Optional[InputEdgePrometheusMode] = InputEdgePrometheusMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,10 +88,9 @@ class InputEdgePrometheusPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
] = InputEdgePrometheusPqCompression.NONE
|
|
91
|
+
compress: Optional[InputEdgePrometheusPqCompression] = (
|
|
92
|
+
InputEdgePrometheusPqCompression.NONE
|
|
93
|
+
)
|
|
100
94
|
r"""Codec to use to compress the persisted data"""
|
|
101
95
|
|
|
102
96
|
pq_controls: Annotated[
|
|
@@ -104,7 +98,7 @@ class InputEdgePrometheusPq(BaseModel):
|
|
|
104
98
|
] = None
|
|
105
99
|
|
|
106
100
|
|
|
107
|
-
class InputEdgePrometheusDiscoveryType(str, Enum
|
|
101
|
+
class InputEdgePrometheusDiscoveryType(str, Enum):
|
|
108
102
|
r"""Target discovery mechanism. Use static to manually enter a list of targets."""
|
|
109
103
|
|
|
110
104
|
STATIC = "static"
|
|
@@ -114,9 +108,7 @@ class InputEdgePrometheusDiscoveryType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
114
108
|
K8S_PODS = "k8s-pods"
|
|
115
109
|
|
|
116
110
|
|
|
117
|
-
class InputEdgePrometheusPersistenceCompression(
|
|
118
|
-
str, Enum, metaclass=utils.OpenEnumMeta
|
|
119
|
-
):
|
|
111
|
+
class InputEdgePrometheusPersistenceCompression(str, Enum):
|
|
120
112
|
r"""Data compression format. Default is gzip."""
|
|
121
113
|
|
|
122
114
|
NONE = "none"
|
|
@@ -149,10 +141,9 @@ class InputEdgePrometheusDiskSpooling(BaseModel):
|
|
|
149
141
|
max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
|
|
150
142
|
r"""Maximum amount of time to retain data before older buckets are deleted. Examples: 2h, 4d. Default is 24h."""
|
|
151
143
|
|
|
152
|
-
compress:
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
] = InputEdgePrometheusPersistenceCompression.GZIP
|
|
144
|
+
compress: Optional[InputEdgePrometheusPersistenceCompression] = (
|
|
145
|
+
InputEdgePrometheusPersistenceCompression.GZIP
|
|
146
|
+
)
|
|
156
147
|
r"""Data compression format. Default is gzip."""
|
|
157
148
|
|
|
158
149
|
|
|
@@ -169,9 +160,7 @@ class InputEdgePrometheusMetadatum(BaseModel):
|
|
|
169
160
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
170
161
|
|
|
171
162
|
|
|
172
|
-
class InputEdgePrometheusAuthTypeAuthenticationMethod(
|
|
173
|
-
str, Enum, metaclass=utils.OpenEnumMeta
|
|
174
|
-
):
|
|
163
|
+
class InputEdgePrometheusAuthTypeAuthenticationMethod(str, Enum):
|
|
175
164
|
r"""Enter credentials directly, or select a stored secret"""
|
|
176
165
|
|
|
177
166
|
MANUAL = "manual"
|
|
@@ -179,7 +168,7 @@ class InputEdgePrometheusAuthTypeAuthenticationMethod(
|
|
|
179
168
|
KUBERNETES = "kubernetes"
|
|
180
169
|
|
|
181
170
|
|
|
182
|
-
class TargetProtocol(str, Enum
|
|
171
|
+
class TargetProtocol(str, Enum):
|
|
183
172
|
r"""Protocol to use when collecting metrics"""
|
|
184
173
|
|
|
185
174
|
HTTP = "http"
|
|
@@ -201,9 +190,7 @@ class Target(BaseModel):
|
|
|
201
190
|
host: str
|
|
202
191
|
r"""Name of host from which to pull metrics."""
|
|
203
192
|
|
|
204
|
-
protocol:
|
|
205
|
-
Optional[TargetProtocol], PlainValidator(validate_open_enum(False))
|
|
206
|
-
] = TargetProtocol.HTTP
|
|
193
|
+
protocol: Optional[TargetProtocol] = TargetProtocol.HTTP
|
|
207
194
|
r"""Protocol to use when collecting metrics"""
|
|
208
195
|
|
|
209
196
|
port: Optional[float] = 9090
|
|
@@ -213,7 +200,7 @@ class Target(BaseModel):
|
|
|
213
200
|
r"""Path to use when collecting metrics from discovered targets"""
|
|
214
201
|
|
|
215
202
|
|
|
216
|
-
class InputEdgePrometheusRecordType(str, Enum
|
|
203
|
+
class InputEdgePrometheusRecordType(str, Enum):
|
|
217
204
|
r"""DNS Record type to resolve"""
|
|
218
205
|
|
|
219
206
|
SRV = "SRV"
|
|
@@ -221,7 +208,7 @@ class InputEdgePrometheusRecordType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
221
208
|
AAAA = "AAAA"
|
|
222
209
|
|
|
223
210
|
|
|
224
|
-
class ScrapeProtocolProtocol(str, Enum
|
|
211
|
+
class ScrapeProtocolProtocol(str, Enum):
|
|
225
212
|
r"""Protocol to use when collecting metrics"""
|
|
226
213
|
|
|
227
214
|
HTTP = "http"
|
|
@@ -243,9 +230,7 @@ class InputEdgePrometheusSearchFilter(BaseModel):
|
|
|
243
230
|
r"""Search Filter Values, if empty only \"running\" EC2 instances will be returned"""
|
|
244
231
|
|
|
245
232
|
|
|
246
|
-
class InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod(
|
|
247
|
-
str, Enum, metaclass=utils.OpenEnumMeta
|
|
248
|
-
):
|
|
233
|
+
class InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod(str, Enum):
|
|
249
234
|
r"""AWS authentication method. Choose Auto to use IAM roles."""
|
|
250
235
|
|
|
251
236
|
AUTO = "auto"
|
|
@@ -253,7 +238,7 @@ class InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod(
|
|
|
253
238
|
SECRET = "secret"
|
|
254
239
|
|
|
255
240
|
|
|
256
|
-
class InputEdgePrometheusSignatureVersion(str, Enum
|
|
241
|
+
class InputEdgePrometheusSignatureVersion(str, Enum):
|
|
257
242
|
r"""Signature version to use for signing EC2 requests"""
|
|
258
243
|
|
|
259
244
|
V2 = "v2"
|
|
@@ -401,10 +386,7 @@ class InputEdgePrometheus(BaseModel):
|
|
|
401
386
|
r"""Other dimensions to include in events"""
|
|
402
387
|
|
|
403
388
|
discovery_type: Annotated[
|
|
404
|
-
|
|
405
|
-
Optional[InputEdgePrometheusDiscoveryType],
|
|
406
|
-
PlainValidator(validate_open_enum(False)),
|
|
407
|
-
],
|
|
389
|
+
Optional[InputEdgePrometheusDiscoveryType],
|
|
408
390
|
pydantic.Field(alias="discoveryType"),
|
|
409
391
|
] = InputEdgePrometheusDiscoveryType.STATIC
|
|
410
392
|
r"""Target discovery mechanism. Use static to manually enter a list of targets."""
|
|
@@ -421,10 +403,7 @@ class InputEdgePrometheus(BaseModel):
|
|
|
421
403
|
r"""Fields to add to events from this input"""
|
|
422
404
|
|
|
423
405
|
auth_type: Annotated[
|
|
424
|
-
|
|
425
|
-
Optional[InputEdgePrometheusAuthTypeAuthenticationMethod],
|
|
426
|
-
PlainValidator(validate_open_enum(False)),
|
|
427
|
-
],
|
|
406
|
+
Optional[InputEdgePrometheusAuthTypeAuthenticationMethod],
|
|
428
407
|
pydantic.Field(alias="authType"),
|
|
429
408
|
] = InputEdgePrometheusAuthTypeAuthenticationMethod.MANUAL
|
|
430
409
|
r"""Enter credentials directly, or select a stored secret"""
|
|
@@ -437,19 +416,12 @@ class InputEdgePrometheus(BaseModel):
|
|
|
437
416
|
r"""List of DNS names to resolve"""
|
|
438
417
|
|
|
439
418
|
record_type: Annotated[
|
|
440
|
-
|
|
441
|
-
Optional[InputEdgePrometheusRecordType],
|
|
442
|
-
PlainValidator(validate_open_enum(False)),
|
|
443
|
-
],
|
|
444
|
-
pydantic.Field(alias="recordType"),
|
|
419
|
+
Optional[InputEdgePrometheusRecordType], pydantic.Field(alias="recordType")
|
|
445
420
|
] = InputEdgePrometheusRecordType.SRV
|
|
446
421
|
r"""DNS Record type to resolve"""
|
|
447
422
|
|
|
448
423
|
scrape_protocol: Annotated[
|
|
449
|
-
|
|
450
|
-
Optional[ScrapeProtocolProtocol], PlainValidator(validate_open_enum(False))
|
|
451
|
-
],
|
|
452
|
-
pydantic.Field(alias="scrapeProtocol"),
|
|
424
|
+
Optional[ScrapeProtocolProtocol], pydantic.Field(alias="scrapeProtocol")
|
|
453
425
|
] = ScrapeProtocolProtocol.HTTP
|
|
454
426
|
r"""Protocol to use when collecting metrics"""
|
|
455
427
|
|
|
@@ -471,10 +443,7 @@ class InputEdgePrometheus(BaseModel):
|
|
|
471
443
|
r"""EC2 Instance Search Filter"""
|
|
472
444
|
|
|
473
445
|
aws_authentication_method: Annotated[
|
|
474
|
-
|
|
475
|
-
Optional[InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod],
|
|
476
|
-
PlainValidator(validate_open_enum(False)),
|
|
477
|
-
],
|
|
446
|
+
Optional[InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod],
|
|
478
447
|
pydantic.Field(alias="awsAuthenticationMethod"),
|
|
479
448
|
] = InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod.AUTO
|
|
480
449
|
r"""AWS authentication method. Choose Auto to use IAM roles."""
|
|
@@ -490,10 +459,7 @@ class InputEdgePrometheus(BaseModel):
|
|
|
490
459
|
r"""EC2 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to EC2-compatible endpoint."""
|
|
491
460
|
|
|
492
461
|
signature_version: Annotated[
|
|
493
|
-
|
|
494
|
-
Optional[InputEdgePrometheusSignatureVersion],
|
|
495
|
-
PlainValidator(validate_open_enum(False)),
|
|
496
|
-
],
|
|
462
|
+
Optional[InputEdgePrometheusSignatureVersion],
|
|
497
463
|
pydantic.Field(alias="signatureVersion"),
|
|
498
464
|
] = InputEdgePrometheusSignatureVersion.V4
|
|
499
465
|
r"""Signature version to use for signing EC2 requests"""
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Any, List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputElasticConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputElasticMode(str, Enum
|
|
26
|
+
class InputElasticMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputElasticCompression(str, Enum
|
|
33
|
+
class InputElasticCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputElasticPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputElasticPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputElasticMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputElasticMode.ALWAYS
|
|
67
|
+
mode: Optional[InputElasticMode] = InputElasticMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputElasticPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputElasticCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputElasticCompression.NONE
|
|
91
|
+
compress: Optional[InputElasticCompression] = InputElasticCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -103,14 +96,14 @@ class InputElasticPq(BaseModel):
|
|
|
103
96
|
] = None
|
|
104
97
|
|
|
105
98
|
|
|
106
|
-
class InputElasticMinimumTLSVersion(str, Enum
|
|
99
|
+
class InputElasticMinimumTLSVersion(str, Enum):
|
|
107
100
|
TL_SV1 = "TLSv1"
|
|
108
101
|
TL_SV1_1 = "TLSv1.1"
|
|
109
102
|
TL_SV1_2 = "TLSv1.2"
|
|
110
103
|
TL_SV1_3 = "TLSv1.3"
|
|
111
104
|
|
|
112
105
|
|
|
113
|
-
class InputElasticMaximumTLSVersion(str, Enum
|
|
106
|
+
class InputElasticMaximumTLSVersion(str, Enum):
|
|
114
107
|
TL_SV1 = "TLSv1"
|
|
115
108
|
TL_SV1_1 = "TLSv1.1"
|
|
116
109
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -169,30 +162,22 @@ class InputElasticTLSSettingsServerSide(BaseModel):
|
|
|
169
162
|
] = None
|
|
170
163
|
|
|
171
164
|
min_version: Annotated[
|
|
172
|
-
|
|
173
|
-
Optional[InputElasticMinimumTLSVersion],
|
|
174
|
-
PlainValidator(validate_open_enum(False)),
|
|
175
|
-
],
|
|
176
|
-
pydantic.Field(alias="minVersion"),
|
|
165
|
+
Optional[InputElasticMinimumTLSVersion], pydantic.Field(alias="minVersion")
|
|
177
166
|
] = None
|
|
178
167
|
|
|
179
168
|
max_version: Annotated[
|
|
180
|
-
|
|
181
|
-
Optional[InputElasticMaximumTLSVersion],
|
|
182
|
-
PlainValidator(validate_open_enum(False)),
|
|
183
|
-
],
|
|
184
|
-
pydantic.Field(alias="maxVersion"),
|
|
169
|
+
Optional[InputElasticMaximumTLSVersion], pydantic.Field(alias="maxVersion")
|
|
185
170
|
] = None
|
|
186
171
|
|
|
187
172
|
|
|
188
|
-
class InputElasticAuthenticationType(str, Enum
|
|
173
|
+
class InputElasticAuthenticationType(str, Enum):
|
|
189
174
|
NONE = "none"
|
|
190
175
|
BASIC = "basic"
|
|
191
176
|
CREDENTIALS_SECRET = "credentialsSecret"
|
|
192
177
|
AUTH_TOKENS = "authTokens"
|
|
193
178
|
|
|
194
179
|
|
|
195
|
-
class InputElasticAPIVersion(str, Enum
|
|
180
|
+
class InputElasticAPIVersion(str, Enum):
|
|
196
181
|
r"""The API version to use for communicating with the server"""
|
|
197
182
|
|
|
198
183
|
SIX_DOT_8_DOT_4 = "6.8.4"
|
|
@@ -224,7 +209,7 @@ class InputElasticMetadatum(BaseModel):
|
|
|
224
209
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
225
210
|
|
|
226
211
|
|
|
227
|
-
class InputElasticAuthenticationMethod(str, Enum
|
|
212
|
+
class InputElasticAuthenticationMethod(str, Enum):
|
|
228
213
|
r"""Enter credentials directly, or select a stored secret"""
|
|
229
214
|
|
|
230
215
|
NONE = "none"
|
|
@@ -268,11 +253,7 @@ class InputElasticProxyMode(BaseModel):
|
|
|
268
253
|
r"""Amount of time, in seconds, to wait for a proxy request to complete before canceling it"""
|
|
269
254
|
|
|
270
255
|
auth_type: Annotated[
|
|
271
|
-
|
|
272
|
-
Optional[InputElasticAuthenticationMethod],
|
|
273
|
-
PlainValidator(validate_open_enum(False)),
|
|
274
|
-
],
|
|
275
|
-
pydantic.Field(alias="authType"),
|
|
256
|
+
Optional[InputElasticAuthenticationMethod], pydantic.Field(alias="authType")
|
|
276
257
|
] = InputElasticAuthenticationMethod.NONE
|
|
277
258
|
r"""Enter credentials directly, or select a stored secret"""
|
|
278
259
|
|
|
@@ -440,18 +421,11 @@ class InputElastic(BaseModel):
|
|
|
440
421
|
r"""Absolute path on which to listen for Elasticsearch API requests. Defaults to /. _bulk will be appended automatically. For example, /myPath becomes /myPath/_bulk. Requests can then be made to either /myPath/_bulk or /myPath/<myIndexName>/_bulk. Other entries are faked as success."""
|
|
441
422
|
|
|
442
423
|
auth_type: Annotated[
|
|
443
|
-
|
|
444
|
-
Optional[InputElasticAuthenticationType],
|
|
445
|
-
PlainValidator(validate_open_enum(False)),
|
|
446
|
-
],
|
|
447
|
-
pydantic.Field(alias="authType"),
|
|
424
|
+
Optional[InputElasticAuthenticationType], pydantic.Field(alias="authType")
|
|
448
425
|
] = InputElasticAuthenticationType.NONE
|
|
449
426
|
|
|
450
427
|
api_version: Annotated[
|
|
451
|
-
|
|
452
|
-
Optional[InputElasticAPIVersion], PlainValidator(validate_open_enum(False))
|
|
453
|
-
],
|
|
454
|
-
pydantic.Field(alias="apiVersion"),
|
|
428
|
+
Optional[InputElasticAPIVersion], pydantic.Field(alias="apiVersion")
|
|
455
429
|
] = InputElasticAPIVersion.EIGHT_DOT_3_DOT_2
|
|
456
430
|
r"""The API version to use for communicating with the server"""
|
|
457
431
|
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputEventhubConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputEventhubMode(str, Enum
|
|
26
|
+
class InputEventhubMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputEventhubCompression(str, Enum
|
|
33
|
+
class InputEventhubCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputEventhubPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputEventhubPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputEventhubMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputEventhubMode.ALWAYS
|
|
67
|
+
mode: Optional[InputEventhubMode] = InputEventhubMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputEventhubPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputEventhubCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputEventhubCompression.NONE
|
|
91
|
+
compress: Optional[InputEventhubCompression] = InputEventhubCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -103,7 +96,7 @@ class InputEventhubPq(BaseModel):
|
|
|
103
96
|
] = None
|
|
104
97
|
|
|
105
98
|
|
|
106
|
-
class InputEventhubSASLMechanism(str, Enum
|
|
99
|
+
class InputEventhubSASLMechanism(str, Enum):
|
|
107
100
|
PLAIN = "plain"
|
|
108
101
|
OAUTHBEARER = "oauthbearer"
|
|
109
102
|
|
|
@@ -120,9 +113,7 @@ class InputEventhubAuthentication(BaseModel):
|
|
|
120
113
|
|
|
121
114
|
disabled: Optional[bool] = False
|
|
122
115
|
|
|
123
|
-
mechanism:
|
|
124
|
-
Optional[InputEventhubSASLMechanism], PlainValidator(validate_open_enum(False))
|
|
125
|
-
] = InputEventhubSASLMechanism.PLAIN
|
|
116
|
+
mechanism: Optional[InputEventhubSASLMechanism] = InputEventhubSASLMechanism.PLAIN
|
|
126
117
|
|
|
127
118
|
|
|
128
119
|
class InputEventhubTLSSettingsClientSideTypedDict(TypedDict):
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputExecConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputExecMode(str, Enum
|
|
26
|
+
class InputExecMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputExecCompression(str, Enum
|
|
33
|
+
class InputExecCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputExecPqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputExecPq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputExecMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputExecMode.ALWAYS
|
|
67
|
+
mode: Optional[InputExecMode] = InputExecMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputExecPq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputExecCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputExecCompression.NONE
|
|
91
|
+
compress: Optional[InputExecCompression] = InputExecCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -103,7 +96,7 @@ class InputExecPq(BaseModel):
|
|
|
103
96
|
] = None
|
|
104
97
|
|
|
105
98
|
|
|
106
|
-
class ScheduleType(str, Enum
|
|
99
|
+
class ScheduleType(str, Enum):
|
|
107
100
|
r"""Select a schedule type; either an interval (in seconds) or a cron-style schedule."""
|
|
108
101
|
|
|
109
102
|
INTERVAL = "interval"
|
|
@@ -197,8 +190,7 @@ class InputExec(BaseModel):
|
|
|
197
190
|
r"""Maximum number of retry attempts in the event that the command fails"""
|
|
198
191
|
|
|
199
192
|
schedule_type: Annotated[
|
|
200
|
-
|
|
201
|
-
pydantic.Field(alias="scheduleType"),
|
|
193
|
+
Optional[ScheduleType], pydantic.Field(alias="scheduleType")
|
|
202
194
|
] = ScheduleType.INTERVAL
|
|
203
195
|
r"""Select a schedule type; either an interval (in seconds) or a cron-style schedule."""
|
|
204
196
|
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputFileConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputFilePqMode(str, Enum
|
|
26
|
+
class InputFilePqMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputFileCompression(str, Enum
|
|
33
|
+
class InputFileCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputFilePqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputFilePq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputFilePqMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputFilePqMode.ALWAYS
|
|
67
|
+
mode: Optional[InputFilePqMode] = InputFilePqMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputFilePq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputFileCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputFileCompression.NONE
|
|
91
|
+
compress: Optional[InputFileCompression] = InputFileCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -103,7 +96,7 @@ class InputFilePq(BaseModel):
|
|
|
103
96
|
] = None
|
|
104
97
|
|
|
105
98
|
|
|
106
|
-
class InputFileMode(str, Enum
|
|
99
|
+
class InputFileMode(str, Enum):
|
|
107
100
|
r"""Choose how to discover files to monitor"""
|
|
108
101
|
|
|
109
102
|
AUTO = "auto"
|
|
@@ -207,9 +200,7 @@ class InputFile(BaseModel):
|
|
|
207
200
|
|
|
208
201
|
pq: Optional[InputFilePq] = None
|
|
209
202
|
|
|
210
|
-
mode:
|
|
211
|
-
Optional[InputFileMode], PlainValidator(validate_open_enum(False))
|
|
212
|
-
] = InputFileMode.AUTO
|
|
203
|
+
mode: Optional[InputFileMode] = InputFileMode.AUTO
|
|
213
204
|
r"""Choose how to discover files to monitor"""
|
|
214
205
|
|
|
215
206
|
interval: Optional[float] = 10
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Any, List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
@@ -26,14 +23,14 @@ class InputFirehoseConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputFirehoseMode(str, Enum
|
|
26
|
+
class InputFirehoseMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputFirehoseCompression(str, Enum
|
|
33
|
+
class InputFirehoseCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -67,9 +64,7 @@ class InputFirehosePqTypedDict(TypedDict):
|
|
|
67
64
|
|
|
68
65
|
|
|
69
66
|
class InputFirehosePq(BaseModel):
|
|
70
|
-
mode:
|
|
71
|
-
Optional[InputFirehoseMode], PlainValidator(validate_open_enum(False))
|
|
72
|
-
] = InputFirehoseMode.ALWAYS
|
|
67
|
+
mode: Optional[InputFirehoseMode] = InputFirehoseMode.ALWAYS
|
|
73
68
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
74
69
|
|
|
75
70
|
max_buffer_size: Annotated[
|
|
@@ -93,9 +88,7 @@ class InputFirehosePq(BaseModel):
|
|
|
93
88
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
94
89
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
95
90
|
|
|
96
|
-
compress:
|
|
97
|
-
Optional[InputFirehoseCompression], PlainValidator(validate_open_enum(False))
|
|
98
|
-
] = InputFirehoseCompression.NONE
|
|
91
|
+
compress: Optional[InputFirehoseCompression] = InputFirehoseCompression.NONE
|
|
99
92
|
r"""Codec to use to compress the persisted data"""
|
|
100
93
|
|
|
101
94
|
pq_controls: Annotated[
|
|
@@ -103,14 +96,14 @@ class InputFirehosePq(BaseModel):
|
|
|
103
96
|
] = None
|
|
104
97
|
|
|
105
98
|
|
|
106
|
-
class InputFirehoseMinimumTLSVersion(str, Enum
|
|
99
|
+
class InputFirehoseMinimumTLSVersion(str, Enum):
|
|
107
100
|
TL_SV1 = "TLSv1"
|
|
108
101
|
TL_SV1_1 = "TLSv1.1"
|
|
109
102
|
TL_SV1_2 = "TLSv1.2"
|
|
110
103
|
TL_SV1_3 = "TLSv1.3"
|
|
111
104
|
|
|
112
105
|
|
|
113
|
-
class InputFirehoseMaximumTLSVersion(str, Enum
|
|
106
|
+
class InputFirehoseMaximumTLSVersion(str, Enum):
|
|
114
107
|
TL_SV1 = "TLSv1"
|
|
115
108
|
TL_SV1_1 = "TLSv1.1"
|
|
116
109
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -169,19 +162,11 @@ class InputFirehoseTLSSettingsServerSide(BaseModel):
|
|
|
169
162
|
] = None
|
|
170
163
|
|
|
171
164
|
min_version: Annotated[
|
|
172
|
-
|
|
173
|
-
Optional[InputFirehoseMinimumTLSVersion],
|
|
174
|
-
PlainValidator(validate_open_enum(False)),
|
|
175
|
-
],
|
|
176
|
-
pydantic.Field(alias="minVersion"),
|
|
165
|
+
Optional[InputFirehoseMinimumTLSVersion], pydantic.Field(alias="minVersion")
|
|
177
166
|
] = None
|
|
178
167
|
|
|
179
168
|
max_version: Annotated[
|
|
180
|
-
|
|
181
|
-
Optional[InputFirehoseMaximumTLSVersion],
|
|
182
|
-
PlainValidator(validate_open_enum(False)),
|
|
183
|
-
],
|
|
184
|
-
pydantic.Field(alias="maxVersion"),
|
|
169
|
+
Optional[InputFirehoseMaximumTLSVersion], pydantic.Field(alias="maxVersion")
|
|
185
170
|
] = None
|
|
186
171
|
|
|
187
172
|
|