cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/{outputs.py → destinations.py} +69 -71
- cribl_control_plane/errors/healthstatus_error.py +2 -8
- cribl_control_plane/models/__init__.py +5347 -115
- cribl_control_plane/models/createinputop.py +18216 -2
- cribl_control_plane/models/createoutputop.py +18417 -4
- cribl_control_plane/models/createoutputtestbyidop.py +2 -2
- cribl_control_plane/models/deleteoutputbyidop.py +2 -2
- cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
- cribl_control_plane/models/getoutputbyidop.py +2 -2
- cribl_control_plane/models/getoutputpqbyidop.py +2 -2
- cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
- cribl_control_plane/models/healthstatus.py +4 -7
- cribl_control_plane/models/inputappscope.py +16 -36
- cribl_control_plane/models/inputazureblob.py +8 -19
- cribl_control_plane/models/inputcollection.py +6 -15
- cribl_control_plane/models/inputconfluentcloud.py +22 -45
- cribl_control_plane/models/inputcribl.py +6 -13
- cribl_control_plane/models/inputcriblhttp.py +12 -27
- cribl_control_plane/models/inputcribllakehttp.py +14 -26
- cribl_control_plane/models/inputcriblmetrics.py +6 -14
- cribl_control_plane/models/inputcribltcp.py +12 -27
- cribl_control_plane/models/inputcrowdstrike.py +12 -28
- cribl_control_plane/models/inputdatadogagent.py +12 -28
- cribl_control_plane/models/inputdatagen.py +6 -13
- cribl_control_plane/models/inputedgeprometheus.py +33 -64
- cribl_control_plane/models/inputelastic.py +18 -44
- cribl_control_plane/models/inputeventhub.py +10 -19
- cribl_control_plane/models/inputexec.py +8 -16
- cribl_control_plane/models/inputfile.py +8 -17
- cribl_control_plane/models/inputfirehose.py +12 -27
- cribl_control_plane/models/inputgooglepubsub.py +10 -23
- cribl_control_plane/models/inputgrafana_union.py +39 -81
- cribl_control_plane/models/inputhttp.py +12 -27
- cribl_control_plane/models/inputhttpraw.py +12 -27
- cribl_control_plane/models/inputjournalfiles.py +8 -16
- cribl_control_plane/models/inputkafka.py +18 -45
- cribl_control_plane/models/inputkinesis.py +18 -42
- cribl_control_plane/models/inputkubeevents.py +6 -13
- cribl_control_plane/models/inputkubelogs.py +10 -18
- cribl_control_plane/models/inputkubemetrics.py +10 -18
- cribl_control_plane/models/inputloki.py +14 -33
- cribl_control_plane/models/inputmetrics.py +10 -25
- cribl_control_plane/models/inputmodeldriventelemetry.py +14 -33
- cribl_control_plane/models/inputmsk.py +20 -52
- cribl_control_plane/models/inputnetflow.py +8 -15
- cribl_control_plane/models/inputoffice365mgmt.py +18 -37
- cribl_control_plane/models/inputoffice365msgtrace.py +20 -41
- cribl_control_plane/models/inputoffice365service.py +20 -41
- cribl_control_plane/models/inputopentelemetry.py +20 -42
- cribl_control_plane/models/inputprometheus.py +22 -54
- cribl_control_plane/models/inputprometheusrw.py +14 -34
- cribl_control_plane/models/inputrawudp.py +8 -15
- cribl_control_plane/models/inputs3.py +10 -23
- cribl_control_plane/models/inputs3inventory.py +12 -28
- cribl_control_plane/models/inputsecuritylake.py +12 -29
- cribl_control_plane/models/inputsnmp.py +10 -20
- cribl_control_plane/models/inputsplunk.py +16 -37
- cribl_control_plane/models/inputsplunkhec.py +14 -33
- cribl_control_plane/models/inputsplunksearch.py +18 -37
- cribl_control_plane/models/inputsqs.py +14 -31
- cribl_control_plane/models/inputsyslog_union.py +29 -53
- cribl_control_plane/models/inputsystemmetrics.py +26 -50
- cribl_control_plane/models/inputsystemstate.py +10 -18
- cribl_control_plane/models/inputtcp.py +14 -33
- cribl_control_plane/models/inputtcpjson.py +14 -33
- cribl_control_plane/models/inputwef.py +22 -45
- cribl_control_plane/models/inputwindowsmetrics.py +26 -46
- cribl_control_plane/models/inputwineventlogs.py +12 -22
- cribl_control_plane/models/inputwiz.py +12 -25
- cribl_control_plane/models/inputzscalerhec.py +14 -33
- cribl_control_plane/models/listoutputop.py +2 -2
- cribl_control_plane/models/output.py +3 -6
- cribl_control_plane/models/outputazureblob.py +20 -52
- cribl_control_plane/models/outputazuredataexplorer.py +30 -77
- cribl_control_plane/models/outputazureeventhub.py +20 -44
- cribl_control_plane/models/outputazurelogs.py +14 -37
- cribl_control_plane/models/outputclickhouse.py +22 -59
- cribl_control_plane/models/outputcloudwatch.py +12 -33
- cribl_control_plane/models/outputconfluentcloud.py +32 -75
- cribl_control_plane/models/outputcriblhttp.py +18 -46
- cribl_control_plane/models/outputcribllake.py +18 -48
- cribl_control_plane/models/outputcribltcp.py +20 -47
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
- cribl_control_plane/models/outputdatadog.py +22 -50
- cribl_control_plane/models/outputdataset.py +20 -48
- cribl_control_plane/models/outputdefault.py +2 -5
- cribl_control_plane/models/outputdevnull.py +2 -5
- cribl_control_plane/models/outputdiskspool.py +4 -9
- cribl_control_plane/models/outputdls3.py +26 -72
- cribl_control_plane/models/outputdynatracehttp.py +22 -57
- cribl_control_plane/models/outputdynatraceotlp.py +24 -59
- cribl_control_plane/models/outputelastic.py +20 -45
- cribl_control_plane/models/outputelasticcloud.py +14 -40
- cribl_control_plane/models/outputexabeam.py +12 -33
- cribl_control_plane/models/outputfilesystem.py +16 -41
- cribl_control_plane/models/outputgooglechronicle.py +18 -54
- cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
- cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
- cribl_control_plane/models/outputgooglepubsub.py +16 -39
- cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
- cribl_control_plane/models/outputgraphite.py +16 -35
- cribl_control_plane/models/outputhoneycomb.py +14 -37
- cribl_control_plane/models/outputhumiohec.py +18 -47
- cribl_control_plane/models/outputinfluxdb.py +18 -44
- cribl_control_plane/models/outputkafka.py +28 -73
- cribl_control_plane/models/outputkinesis.py +18 -44
- cribl_control_plane/models/outputloki.py +18 -43
- cribl_control_plane/models/outputminio.py +26 -69
- cribl_control_plane/models/outputmsk.py +30 -81
- cribl_control_plane/models/outputnetflow.py +2 -5
- cribl_control_plane/models/outputnewrelic.py +20 -45
- cribl_control_plane/models/outputnewrelicevents.py +16 -45
- cribl_control_plane/models/outputopentelemetry.py +28 -69
- cribl_control_plane/models/outputprometheus.py +14 -37
- cribl_control_plane/models/outputring.py +10 -21
- cribl_control_plane/models/outputrouter.py +2 -5
- cribl_control_plane/models/outputs3.py +28 -72
- cribl_control_plane/models/outputsecuritylake.py +20 -56
- cribl_control_plane/models/outputsentinel.py +20 -49
- cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
- cribl_control_plane/models/outputservicenow.py +26 -64
- cribl_control_plane/models/outputsignalfx.py +16 -39
- cribl_control_plane/models/outputsnmp.py +2 -5
- cribl_control_plane/models/outputsns.py +16 -40
- cribl_control_plane/models/outputsplunk.py +26 -64
- cribl_control_plane/models/outputsplunkhec.py +14 -37
- cribl_control_plane/models/outputsplunklb.py +36 -83
- cribl_control_plane/models/outputsqs.py +18 -45
- cribl_control_plane/models/outputstatsd.py +16 -34
- cribl_control_plane/models/outputstatsdext.py +14 -33
- cribl_control_plane/models/outputsumologic.py +14 -37
- cribl_control_plane/models/outputsyslog.py +26 -60
- cribl_control_plane/models/outputtcpjson.py +22 -54
- cribl_control_plane/models/outputwavefront.py +14 -37
- cribl_control_plane/models/outputwebhook.py +24 -60
- cribl_control_plane/models/outputxsiam.py +16 -37
- cribl_control_plane/models/updateoutputbyidop.py +4 -4
- cribl_control_plane/sdk.py +3 -5
- cribl_control_plane/sources.py +8 -10
- {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +13 -13
- cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
- cribl_control_plane-0.0.15.dist-info/RECORD +0 -215
- {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Any, List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputSplunkHecType(str, Enum
|
|
11
|
+
class InputSplunkHecType(str, Enum):
|
|
15
12
|
SPLUNK_HEC = "splunk_hec"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputSplunkHecConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputSplunkHecMode(str, Enum
|
|
26
|
+
class InputSplunkHecMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputSplunkHecCompression(str, Enum
|
|
33
|
+
class InputSplunkHecCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputSplunkHecPqTypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputSplunkHecPq(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputSplunkHecMode], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputSplunkHecMode.ALWAYS
|
|
58
|
+
mode: Optional[InputSplunkHecMode] = InputSplunkHecMode.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,13 +79,11 @@ class InputSplunkHecPq(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
Optional[InputSplunkHecCompression], PlainValidator(validate_open_enum(False))
|
|
89
|
-
] = InputSplunkHecCompression.NONE
|
|
82
|
+
compress: Optional[InputSplunkHecCompression] = InputSplunkHecCompression.NONE
|
|
90
83
|
r"""Codec to use to compress the persisted data"""
|
|
91
84
|
|
|
92
85
|
|
|
93
|
-
class InputSplunkHecAuthenticationMethod(str, Enum
|
|
86
|
+
class InputSplunkHecAuthenticationMethod(str, Enum):
|
|
94
87
|
r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
|
|
95
88
|
|
|
96
89
|
MANUAL = "manual"
|
|
@@ -128,11 +121,7 @@ class InputSplunkHecAuthToken(BaseModel):
|
|
|
128
121
|
token: Any
|
|
129
122
|
|
|
130
123
|
auth_type: Annotated[
|
|
131
|
-
|
|
132
|
-
Optional[InputSplunkHecAuthenticationMethod],
|
|
133
|
-
PlainValidator(validate_open_enum(False)),
|
|
134
|
-
],
|
|
135
|
-
pydantic.Field(alias="authType"),
|
|
124
|
+
Optional[InputSplunkHecAuthenticationMethod], pydantic.Field(alias="authType")
|
|
136
125
|
] = InputSplunkHecAuthenticationMethod.MANUAL
|
|
137
126
|
r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
|
|
138
127
|
|
|
@@ -152,14 +141,14 @@ class InputSplunkHecAuthToken(BaseModel):
|
|
|
152
141
|
r"""Fields to add to events referencing this token"""
|
|
153
142
|
|
|
154
143
|
|
|
155
|
-
class InputSplunkHecMinimumTLSVersion(str, Enum
|
|
144
|
+
class InputSplunkHecMinimumTLSVersion(str, Enum):
|
|
156
145
|
TL_SV1 = "TLSv1"
|
|
157
146
|
TL_SV1_1 = "TLSv1.1"
|
|
158
147
|
TL_SV1_2 = "TLSv1.2"
|
|
159
148
|
TL_SV1_3 = "TLSv1.3"
|
|
160
149
|
|
|
161
150
|
|
|
162
|
-
class InputSplunkHecMaximumTLSVersion(str, Enum
|
|
151
|
+
class InputSplunkHecMaximumTLSVersion(str, Enum):
|
|
163
152
|
TL_SV1 = "TLSv1"
|
|
164
153
|
TL_SV1_1 = "TLSv1.1"
|
|
165
154
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -218,19 +207,11 @@ class InputSplunkHecTLSSettingsServerSide(BaseModel):
|
|
|
218
207
|
] = None
|
|
219
208
|
|
|
220
209
|
min_version: Annotated[
|
|
221
|
-
|
|
222
|
-
Optional[InputSplunkHecMinimumTLSVersion],
|
|
223
|
-
PlainValidator(validate_open_enum(False)),
|
|
224
|
-
],
|
|
225
|
-
pydantic.Field(alias="minVersion"),
|
|
210
|
+
Optional[InputSplunkHecMinimumTLSVersion], pydantic.Field(alias="minVersion")
|
|
226
211
|
] = None
|
|
227
212
|
|
|
228
213
|
max_version: Annotated[
|
|
229
|
-
|
|
230
|
-
Optional[InputSplunkHecMaximumTLSVersion],
|
|
231
|
-
PlainValidator(validate_open_enum(False)),
|
|
232
|
-
],
|
|
233
|
-
pydantic.Field(alias="maxVersion"),
|
|
214
|
+
Optional[InputSplunkHecMaximumTLSVersion], pydantic.Field(alias="maxVersion")
|
|
234
215
|
] = None
|
|
235
216
|
|
|
236
217
|
|
|
@@ -248,11 +229,11 @@ class InputSplunkHecMetadatum(BaseModel):
|
|
|
248
229
|
|
|
249
230
|
|
|
250
231
|
class InputSplunkHecTypedDict(TypedDict):
|
|
251
|
-
type: InputSplunkHecType
|
|
252
232
|
port: float
|
|
253
233
|
r"""Port to listen on"""
|
|
254
234
|
id: NotRequired[str]
|
|
255
235
|
r"""Unique ID for this input"""
|
|
236
|
+
type: NotRequired[InputSplunkHecType]
|
|
256
237
|
disabled: NotRequired[bool]
|
|
257
238
|
pipeline: NotRequired[str]
|
|
258
239
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -321,14 +302,14 @@ class InputSplunkHecTypedDict(TypedDict):
|
|
|
321
302
|
|
|
322
303
|
|
|
323
304
|
class InputSplunkHec(BaseModel):
|
|
324
|
-
type: Annotated[InputSplunkHecType, PlainValidator(validate_open_enum(False))]
|
|
325
|
-
|
|
326
305
|
port: float
|
|
327
306
|
r"""Port to listen on"""
|
|
328
307
|
|
|
329
308
|
id: Optional[str] = None
|
|
330
309
|
r"""Unique ID for this input"""
|
|
331
310
|
|
|
311
|
+
type: Optional[InputSplunkHecType] = None
|
|
312
|
+
|
|
332
313
|
disabled: Optional[bool] = False
|
|
333
314
|
|
|
334
315
|
pipeline: Optional[str] = None
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputSplunkSearchType(str, Enum
|
|
11
|
+
class InputSplunkSearchType(str, Enum):
|
|
15
12
|
SPLUNK_SEARCH = "splunk_search"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputSplunkSearchConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputSplunkSearchMode(str, Enum
|
|
26
|
+
class InputSplunkSearchMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputSplunkSearchCompression(str, Enum
|
|
33
|
+
class InputSplunkSearchCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputSplunkSearchPqTypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputSplunkSearchPq(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputSplunkSearchMode], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputSplunkSearchMode.ALWAYS
|
|
58
|
+
mode: Optional[InputSplunkSearchMode] = InputSplunkSearchMode.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,14 +79,11 @@ class InputSplunkSearchPq(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
Optional[InputSplunkSearchCompression],
|
|
89
|
-
PlainValidator(validate_open_enum(False)),
|
|
90
|
-
] = InputSplunkSearchCompression.NONE
|
|
82
|
+
compress: Optional[InputSplunkSearchCompression] = InputSplunkSearchCompression.NONE
|
|
91
83
|
r"""Codec to use to compress the persisted data"""
|
|
92
84
|
|
|
93
85
|
|
|
94
|
-
class OutputMode(str, Enum
|
|
86
|
+
class OutputMode(str, Enum):
|
|
95
87
|
r"""Format of the returned output"""
|
|
96
88
|
|
|
97
89
|
CSV = "csv"
|
|
@@ -124,7 +116,7 @@ class EndpointHeader(BaseModel):
|
|
|
124
116
|
r"""JavaScript expression to compute the header's value, normally enclosed in backticks (e.g., `${earliest}`). If a constant, use single quotes (e.g., 'earliest'). Values without delimiters (e.g., earliest) are evaluated as strings."""
|
|
125
117
|
|
|
126
118
|
|
|
127
|
-
class InputSplunkSearchLogLevel(str, Enum
|
|
119
|
+
class InputSplunkSearchLogLevel(str, Enum):
|
|
128
120
|
r"""Collector runtime log level (verbosity)"""
|
|
129
121
|
|
|
130
122
|
ERROR = "error"
|
|
@@ -146,7 +138,7 @@ class InputSplunkSearchMetadatum(BaseModel):
|
|
|
146
138
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
147
139
|
|
|
148
140
|
|
|
149
|
-
class InputSplunkSearchRetryType(str, Enum
|
|
141
|
+
class InputSplunkSearchRetryType(str, Enum):
|
|
150
142
|
r"""The algorithm to use when performing HTTP retries"""
|
|
151
143
|
|
|
152
144
|
NONE = "none"
|
|
@@ -174,9 +166,7 @@ class InputSplunkSearchRetryRulesTypedDict(TypedDict):
|
|
|
174
166
|
|
|
175
167
|
|
|
176
168
|
class InputSplunkSearchRetryRules(BaseModel):
|
|
177
|
-
type:
|
|
178
|
-
Optional[InputSplunkSearchRetryType], PlainValidator(validate_open_enum(False))
|
|
179
|
-
] = InputSplunkSearchRetryType.BACKOFF
|
|
169
|
+
type: Optional[InputSplunkSearchRetryType] = InputSplunkSearchRetryType.BACKOFF
|
|
180
170
|
r"""The algorithm to use when performing HTTP retries"""
|
|
181
171
|
|
|
182
172
|
interval: Optional[float] = 1000
|
|
@@ -207,7 +197,7 @@ class InputSplunkSearchRetryRules(BaseModel):
|
|
|
207
197
|
r"""Retry request when a connection reset (ECONNRESET) error occurs"""
|
|
208
198
|
|
|
209
199
|
|
|
210
|
-
class InputSplunkSearchAuthenticationType(str, Enum
|
|
200
|
+
class InputSplunkSearchAuthenticationType(str, Enum):
|
|
211
201
|
r"""Splunk Search authentication type"""
|
|
212
202
|
|
|
213
203
|
NONE = "none"
|
|
@@ -249,11 +239,11 @@ class InputSplunkSearchOauthHeader(BaseModel):
|
|
|
249
239
|
|
|
250
240
|
|
|
251
241
|
class InputSplunkSearchTypedDict(TypedDict):
|
|
252
|
-
type: InputSplunkSearchType
|
|
253
242
|
search: str
|
|
254
243
|
r"""Enter Splunk search here. Examples: 'index=myAppLogs level=error channel=myApp' OR '| mstats avg(myStat) as myStat WHERE index=myStatsIndex.'"""
|
|
255
244
|
id: NotRequired[str]
|
|
256
245
|
r"""Unique ID for this input"""
|
|
246
|
+
type: NotRequired[InputSplunkSearchType]
|
|
257
247
|
disabled: NotRequired[bool]
|
|
258
248
|
pipeline: NotRequired[str]
|
|
259
249
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -341,14 +331,14 @@ class InputSplunkSearchTypedDict(TypedDict):
|
|
|
341
331
|
|
|
342
332
|
|
|
343
333
|
class InputSplunkSearch(BaseModel):
|
|
344
|
-
type: Annotated[InputSplunkSearchType, PlainValidator(validate_open_enum(False))]
|
|
345
|
-
|
|
346
334
|
search: str
|
|
347
335
|
r"""Enter Splunk search here. Examples: 'index=myAppLogs level=error channel=myApp' OR '| mstats avg(myStat) as myStat WHERE index=myStatsIndex.'"""
|
|
348
336
|
|
|
349
337
|
id: Optional[str] = None
|
|
350
338
|
r"""Unique ID for this input"""
|
|
351
339
|
|
|
340
|
+
type: Optional[InputSplunkSearchType] = None
|
|
341
|
+
|
|
352
342
|
disabled: Optional[bool] = False
|
|
353
343
|
|
|
354
344
|
pipeline: Optional[str] = None
|
|
@@ -392,10 +382,9 @@ class InputSplunkSearch(BaseModel):
|
|
|
392
382
|
endpoint: Optional[str] = "/services/search/v2/jobs/export"
|
|
393
383
|
r"""REST API used to create a search"""
|
|
394
384
|
|
|
395
|
-
output_mode: Annotated[
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
] = OutputMode.JSON
|
|
385
|
+
output_mode: Annotated[Optional[OutputMode], pydantic.Field(alias="outputMode")] = (
|
|
386
|
+
OutputMode.JSON
|
|
387
|
+
)
|
|
399
388
|
r"""Format of the returned output"""
|
|
400
389
|
|
|
401
390
|
endpoint_params: Annotated[
|
|
@@ -409,11 +398,7 @@ class InputSplunkSearch(BaseModel):
|
|
|
409
398
|
r"""Optional request headers to send to the endpoint"""
|
|
410
399
|
|
|
411
400
|
log_level: Annotated[
|
|
412
|
-
|
|
413
|
-
Optional[InputSplunkSearchLogLevel],
|
|
414
|
-
PlainValidator(validate_open_enum(False)),
|
|
415
|
-
],
|
|
416
|
-
pydantic.Field(alias="logLevel"),
|
|
401
|
+
Optional[InputSplunkSearchLogLevel], pydantic.Field(alias="logLevel")
|
|
417
402
|
] = None
|
|
418
403
|
r"""Collector runtime log level (verbosity)"""
|
|
419
404
|
|
|
@@ -474,11 +459,7 @@ class InputSplunkSearch(BaseModel):
|
|
|
474
459
|
r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
|
|
475
460
|
|
|
476
461
|
auth_type: Annotated[
|
|
477
|
-
|
|
478
|
-
Optional[InputSplunkSearchAuthenticationType],
|
|
479
|
-
PlainValidator(validate_open_enum(False)),
|
|
480
|
-
],
|
|
481
|
-
pydantic.Field(alias="authType"),
|
|
462
|
+
Optional[InputSplunkSearchAuthenticationType], pydantic.Field(alias="authType")
|
|
482
463
|
] = InputSplunkSearchAuthenticationType.BASIC
|
|
483
464
|
r"""Splunk Search authentication type"""
|
|
484
465
|
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputSqsType(str, Enum
|
|
11
|
+
class InputSqsType(str, Enum):
|
|
15
12
|
SQS = "sqs"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputSqsConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputSqsMode(str, Enum
|
|
26
|
+
class InputSqsMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputSqsCompression(str, Enum
|
|
33
|
+
class InputSqsCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputSqsPqTypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputSqsPq(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputSqsMode], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputSqsMode.ALWAYS
|
|
58
|
+
mode: Optional[InputSqsMode] = InputSqsMode.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,20 +79,18 @@ class InputSqsPq(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
Optional[InputSqsCompression], PlainValidator(validate_open_enum(False))
|
|
89
|
-
] = InputSqsCompression.NONE
|
|
82
|
+
compress: Optional[InputSqsCompression] = InputSqsCompression.NONE
|
|
90
83
|
r"""Codec to use to compress the persisted data"""
|
|
91
84
|
|
|
92
85
|
|
|
93
|
-
class InputSqsQueueType(str, Enum
|
|
86
|
+
class InputSqsQueueType(str, Enum):
|
|
94
87
|
r"""The queue type used (or created)"""
|
|
95
88
|
|
|
96
89
|
STANDARD = "standard"
|
|
97
90
|
FIFO = "fifo"
|
|
98
91
|
|
|
99
92
|
|
|
100
|
-
class InputSqsAuthenticationMethod(str, Enum
|
|
93
|
+
class InputSqsAuthenticationMethod(str, Enum):
|
|
101
94
|
r"""AWS authentication method. Choose Auto to use IAM roles."""
|
|
102
95
|
|
|
103
96
|
AUTO = "auto"
|
|
@@ -105,7 +98,7 @@ class InputSqsAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
105
98
|
SECRET = "secret"
|
|
106
99
|
|
|
107
100
|
|
|
108
|
-
class InputSqsSignatureVersion(str, Enum
|
|
101
|
+
class InputSqsSignatureVersion(str, Enum):
|
|
109
102
|
r"""Signature version to use for signing SQS requests"""
|
|
110
103
|
|
|
111
104
|
V2 = "v2"
|
|
@@ -126,11 +119,11 @@ class InputSqsMetadatum(BaseModel):
|
|
|
126
119
|
|
|
127
120
|
|
|
128
121
|
class InputSqsTypedDict(TypedDict):
|
|
129
|
-
type: InputSqsType
|
|
130
122
|
queue_name: str
|
|
131
123
|
r"""The name, URL, or ARN of the SQS queue to read events from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can only be evaluated at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
|
|
132
124
|
id: NotRequired[str]
|
|
133
125
|
r"""Unique ID for this input"""
|
|
126
|
+
type: NotRequired[InputSqsType]
|
|
134
127
|
disabled: NotRequired[bool]
|
|
135
128
|
pipeline: NotRequired[str]
|
|
136
129
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -189,14 +182,14 @@ class InputSqsTypedDict(TypedDict):
|
|
|
189
182
|
|
|
190
183
|
|
|
191
184
|
class InputSqs(BaseModel):
|
|
192
|
-
type: Annotated[InputSqsType, PlainValidator(validate_open_enum(False))]
|
|
193
|
-
|
|
194
185
|
queue_name: Annotated[str, pydantic.Field(alias="queueName")]
|
|
195
186
|
r"""The name, URL, or ARN of the SQS queue to read events from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can only be evaluated at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
|
|
196
187
|
|
|
197
188
|
id: Optional[str] = None
|
|
198
189
|
r"""Unique ID for this input"""
|
|
199
190
|
|
|
191
|
+
type: Optional[InputSqsType] = None
|
|
192
|
+
|
|
200
193
|
disabled: Optional[bool] = False
|
|
201
194
|
|
|
202
195
|
pipeline: Optional[str] = None
|
|
@@ -222,10 +215,7 @@ class InputSqs(BaseModel):
|
|
|
222
215
|
pq: Optional[InputSqsPq] = None
|
|
223
216
|
|
|
224
217
|
queue_type: Annotated[
|
|
225
|
-
|
|
226
|
-
Optional[InputSqsQueueType], PlainValidator(validate_open_enum(False))
|
|
227
|
-
],
|
|
228
|
-
pydantic.Field(alias="queueType"),
|
|
218
|
+
Optional[InputSqsQueueType], pydantic.Field(alias="queueType")
|
|
229
219
|
] = InputSqsQueueType.STANDARD
|
|
230
220
|
r"""The queue type used (or created)"""
|
|
231
221
|
|
|
@@ -238,10 +228,7 @@ class InputSqs(BaseModel):
|
|
|
238
228
|
r"""Create queue if it does not exist"""
|
|
239
229
|
|
|
240
230
|
aws_authentication_method: Annotated[
|
|
241
|
-
|
|
242
|
-
Optional[InputSqsAuthenticationMethod],
|
|
243
|
-
PlainValidator(validate_open_enum(False)),
|
|
244
|
-
],
|
|
231
|
+
Optional[InputSqsAuthenticationMethod],
|
|
245
232
|
pydantic.Field(alias="awsAuthenticationMethod"),
|
|
246
233
|
] = InputSqsAuthenticationMethod.AUTO
|
|
247
234
|
r"""AWS authentication method. Choose Auto to use IAM roles."""
|
|
@@ -257,11 +244,7 @@ class InputSqs(BaseModel):
|
|
|
257
244
|
r"""SQS service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to SQS-compatible endpoint."""
|
|
258
245
|
|
|
259
246
|
signature_version: Annotated[
|
|
260
|
-
|
|
261
|
-
Optional[InputSqsSignatureVersion],
|
|
262
|
-
PlainValidator(validate_open_enum(False)),
|
|
263
|
-
],
|
|
264
|
-
pydantic.Field(alias="signatureVersion"),
|
|
247
|
+
Optional[InputSqsSignatureVersion], pydantic.Field(alias="signatureVersion")
|
|
265
248
|
] = InputSqsSignatureVersion.V4
|
|
266
249
|
r"""Signature version to use for signing SQS requests"""
|
|
267
250
|
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Any, List, Optional, Union
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputSyslogType2(str, Enum
|
|
11
|
+
class InputSyslogType2(str, Enum):
|
|
15
12
|
SYSLOG = "syslog"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputSyslogConnection2(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputSyslogMode2(str, Enum
|
|
26
|
+
class InputSyslogMode2(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputSyslogCompression2(str, Enum
|
|
33
|
+
class InputSyslogCompression2(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputSyslogPq2TypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputSyslogPq2(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputSyslogMode2], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputSyslogMode2.ALWAYS
|
|
58
|
+
mode: Optional[InputSyslogMode2] = InputSyslogMode2.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,20 +79,18 @@ class InputSyslogPq2(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
Optional[InputSyslogCompression2], PlainValidator(validate_open_enum(False))
|
|
89
|
-
] = InputSyslogCompression2.NONE
|
|
82
|
+
compress: Optional[InputSyslogCompression2] = InputSyslogCompression2.NONE
|
|
90
83
|
r"""Codec to use to compress the persisted data"""
|
|
91
84
|
|
|
92
85
|
|
|
93
|
-
class InputSyslogMinimumTLSVersion2(str, Enum
|
|
86
|
+
class InputSyslogMinimumTLSVersion2(str, Enum):
|
|
94
87
|
TL_SV1 = "TLSv1"
|
|
95
88
|
TL_SV1_1 = "TLSv1.1"
|
|
96
89
|
TL_SV1_2 = "TLSv1.2"
|
|
97
90
|
TL_SV1_3 = "TLSv1.3"
|
|
98
91
|
|
|
99
92
|
|
|
100
|
-
class InputSyslogMaximumTLSVersion2(str, Enum
|
|
93
|
+
class InputSyslogMaximumTLSVersion2(str, Enum):
|
|
101
94
|
TL_SV1 = "TLSv1"
|
|
102
95
|
TL_SV1_1 = "TLSv1.1"
|
|
103
96
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -156,19 +149,11 @@ class InputSyslogTLSSettingsServerSide2(BaseModel):
|
|
|
156
149
|
] = None
|
|
157
150
|
|
|
158
151
|
min_version: Annotated[
|
|
159
|
-
|
|
160
|
-
Optional[InputSyslogMinimumTLSVersion2],
|
|
161
|
-
PlainValidator(validate_open_enum(False)),
|
|
162
|
-
],
|
|
163
|
-
pydantic.Field(alias="minVersion"),
|
|
152
|
+
Optional[InputSyslogMinimumTLSVersion2], pydantic.Field(alias="minVersion")
|
|
164
153
|
] = None
|
|
165
154
|
|
|
166
155
|
max_version: Annotated[
|
|
167
|
-
|
|
168
|
-
Optional[InputSyslogMaximumTLSVersion2],
|
|
169
|
-
PlainValidator(validate_open_enum(False)),
|
|
170
|
-
],
|
|
171
|
-
pydantic.Field(alias="maxVersion"),
|
|
156
|
+
Optional[InputSyslogMaximumTLSVersion2], pydantic.Field(alias="maxVersion")
|
|
172
157
|
] = None
|
|
173
158
|
|
|
174
159
|
|
|
@@ -185,7 +170,7 @@ class InputSyslogMetadatum2(BaseModel):
|
|
|
185
170
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
186
171
|
|
|
187
172
|
|
|
188
|
-
class
|
|
173
|
+
class InputSyslogSyslog2TypedDict(TypedDict):
|
|
189
174
|
type: InputSyslogType2
|
|
190
175
|
tcp_port: float
|
|
191
176
|
r"""Enter TCP port number to listen on. Not required if listening on UDP."""
|
|
@@ -249,8 +234,8 @@ class InputSyslog2TypedDict(TypedDict):
|
|
|
249
234
|
r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
|
|
250
235
|
|
|
251
236
|
|
|
252
|
-
class
|
|
253
|
-
type:
|
|
237
|
+
class InputSyslogSyslog2(BaseModel):
|
|
238
|
+
type: InputSyslogType2
|
|
254
239
|
|
|
255
240
|
tcp_port: Annotated[float, pydantic.Field(alias="tcpPort")]
|
|
256
241
|
r"""Enter TCP port number to listen on. Not required if listening on UDP."""
|
|
@@ -381,7 +366,7 @@ class InputSyslog2(BaseModel):
|
|
|
381
366
|
r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
|
|
382
367
|
|
|
383
368
|
|
|
384
|
-
class InputSyslogType1(str, Enum
|
|
369
|
+
class InputSyslogType1(str, Enum):
|
|
385
370
|
SYSLOG = "syslog"
|
|
386
371
|
|
|
387
372
|
|
|
@@ -396,14 +381,14 @@ class InputSyslogConnection1(BaseModel):
|
|
|
396
381
|
pipeline: Optional[str] = None
|
|
397
382
|
|
|
398
383
|
|
|
399
|
-
class InputSyslogMode1(str, Enum
|
|
384
|
+
class InputSyslogMode1(str, Enum):
|
|
400
385
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
401
386
|
|
|
402
387
|
SMART = "smart"
|
|
403
388
|
ALWAYS = "always"
|
|
404
389
|
|
|
405
390
|
|
|
406
|
-
class InputSyslogCompression1(str, Enum
|
|
391
|
+
class InputSyslogCompression1(str, Enum):
|
|
407
392
|
r"""Codec to use to compress the persisted data"""
|
|
408
393
|
|
|
409
394
|
NONE = "none"
|
|
@@ -428,9 +413,7 @@ class InputSyslogPq1TypedDict(TypedDict):
|
|
|
428
413
|
|
|
429
414
|
|
|
430
415
|
class InputSyslogPq1(BaseModel):
|
|
431
|
-
mode:
|
|
432
|
-
Optional[InputSyslogMode1], PlainValidator(validate_open_enum(False))
|
|
433
|
-
] = InputSyslogMode1.ALWAYS
|
|
416
|
+
mode: Optional[InputSyslogMode1] = InputSyslogMode1.ALWAYS
|
|
434
417
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
435
418
|
|
|
436
419
|
max_buffer_size: Annotated[
|
|
@@ -454,20 +437,18 @@ class InputSyslogPq1(BaseModel):
|
|
|
454
437
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
455
438
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
456
439
|
|
|
457
|
-
compress:
|
|
458
|
-
Optional[InputSyslogCompression1], PlainValidator(validate_open_enum(False))
|
|
459
|
-
] = InputSyslogCompression1.NONE
|
|
440
|
+
compress: Optional[InputSyslogCompression1] = InputSyslogCompression1.NONE
|
|
460
441
|
r"""Codec to use to compress the persisted data"""
|
|
461
442
|
|
|
462
443
|
|
|
463
|
-
class InputSyslogMinimumTLSVersion1(str, Enum
|
|
444
|
+
class InputSyslogMinimumTLSVersion1(str, Enum):
|
|
464
445
|
TL_SV1 = "TLSv1"
|
|
465
446
|
TL_SV1_1 = "TLSv1.1"
|
|
466
447
|
TL_SV1_2 = "TLSv1.2"
|
|
467
448
|
TL_SV1_3 = "TLSv1.3"
|
|
468
449
|
|
|
469
450
|
|
|
470
|
-
class InputSyslogMaximumTLSVersion1(str, Enum
|
|
451
|
+
class InputSyslogMaximumTLSVersion1(str, Enum):
|
|
471
452
|
TL_SV1 = "TLSv1"
|
|
472
453
|
TL_SV1_1 = "TLSv1.1"
|
|
473
454
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -526,19 +507,11 @@ class InputSyslogTLSSettingsServerSide1(BaseModel):
|
|
|
526
507
|
] = None
|
|
527
508
|
|
|
528
509
|
min_version: Annotated[
|
|
529
|
-
|
|
530
|
-
Optional[InputSyslogMinimumTLSVersion1],
|
|
531
|
-
PlainValidator(validate_open_enum(False)),
|
|
532
|
-
],
|
|
533
|
-
pydantic.Field(alias="minVersion"),
|
|
510
|
+
Optional[InputSyslogMinimumTLSVersion1], pydantic.Field(alias="minVersion")
|
|
534
511
|
] = None
|
|
535
512
|
|
|
536
513
|
max_version: Annotated[
|
|
537
|
-
|
|
538
|
-
Optional[InputSyslogMaximumTLSVersion1],
|
|
539
|
-
PlainValidator(validate_open_enum(False)),
|
|
540
|
-
],
|
|
541
|
-
pydantic.Field(alias="maxVersion"),
|
|
514
|
+
Optional[InputSyslogMaximumTLSVersion1], pydantic.Field(alias="maxVersion")
|
|
542
515
|
] = None
|
|
543
516
|
|
|
544
517
|
|
|
@@ -555,7 +528,7 @@ class InputSyslogMetadatum1(BaseModel):
|
|
|
555
528
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
556
529
|
|
|
557
530
|
|
|
558
|
-
class
|
|
531
|
+
class InputSyslogSyslog1TypedDict(TypedDict):
|
|
559
532
|
type: InputSyslogType1
|
|
560
533
|
udp_port: float
|
|
561
534
|
r"""Enter UDP port number to listen on. Not required if listening on TCP."""
|
|
@@ -619,8 +592,8 @@ class InputSyslog1TypedDict(TypedDict):
|
|
|
619
592
|
r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
|
|
620
593
|
|
|
621
594
|
|
|
622
|
-
class
|
|
623
|
-
type:
|
|
595
|
+
class InputSyslogSyslog1(BaseModel):
|
|
596
|
+
type: InputSyslogType1
|
|
624
597
|
|
|
625
598
|
udp_port: Annotated[float, pydantic.Field(alias="udpPort")]
|
|
626
599
|
r"""Enter UDP port number to listen on. Not required if listening on TCP."""
|
|
@@ -752,8 +725,11 @@ class InputSyslog1(BaseModel):
|
|
|
752
725
|
|
|
753
726
|
|
|
754
727
|
InputSyslogUnionTypedDict = TypeAliasType(
|
|
755
|
-
"InputSyslogUnionTypedDict",
|
|
728
|
+
"InputSyslogUnionTypedDict",
|
|
729
|
+
Union[InputSyslogSyslog1TypedDict, InputSyslogSyslog2TypedDict],
|
|
756
730
|
)
|
|
757
731
|
|
|
758
732
|
|
|
759
|
-
InputSyslogUnion = TypeAliasType(
|
|
733
|
+
InputSyslogUnion = TypeAliasType(
|
|
734
|
+
"InputSyslogUnion", Union[InputSyslogSyslog1, InputSyslogSyslog2]
|
|
735
|
+
)
|