cribl-control-plane 0.0.37a1__py3-none-any.whl → 0.0.38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/commits.py +56 -46
- cribl_control_plane/commits_files.py +12 -12
- cribl_control_plane/models/__init__.py +533 -215
- cribl_control_plane/models/createversioncommitop.py +26 -1
- cribl_control_plane/models/createversionrevertop.py +4 -2
- cribl_control_plane/models/createversionundoop.py +4 -2
- cribl_control_plane/models/getpacksbyidop.py +37 -0
- cribl_control_plane/models/getversioncountop.py +3 -2
- cribl_control_plane/models/getversiondiffop.py +3 -2
- cribl_control_plane/models/getversionfilesop.py +3 -2
- cribl_control_plane/models/getversionop.py +4 -2
- cribl_control_plane/models/getversionshowop.py +3 -2
- cribl_control_plane/models/getversionstatusop.py +4 -2
- cribl_control_plane/models/gitrevertparams.py +3 -3
- cribl_control_plane/models/input.py +77 -76
- cribl_control_plane/models/inputcrowdstrike.py +2 -2
- cribl_control_plane/models/inputgrafana.py +678 -67
- cribl_control_plane/models/inputs3.py +2 -2
- cribl_control_plane/models/inputs3inventory.py +2 -2
- cribl_control_plane/models/inputsecuritylake.py +2 -2
- cribl_control_plane/models/inputsyslog.py +406 -37
- cribl_control_plane/models/inputwizwebhook.py +393 -0
- cribl_control_plane/models/output.py +75 -77
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +1 -1
- cribl_control_plane/models/outputgooglepubsub.py +7 -28
- cribl_control_plane/models/outputgrafanacloud.py +565 -69
- cribl_control_plane/models/packinfo.py +5 -5
- cribl_control_plane/models/packinstallinfo.py +5 -5
- cribl_control_plane/models/packrequestbody_union.py +140 -0
- cribl_control_plane/models/packupgraderequest.py +26 -0
- cribl_control_plane/models/updatepacksbyidop.py +9 -28
- cribl_control_plane/packs.py +236 -116
- cribl_control_plane/statuses.py +6 -6
- {cribl_control_plane-0.0.37a1.dist-info → cribl_control_plane-0.0.38.dist-info}/METADATA +4 -3
- {cribl_control_plane-0.0.37a1.dist-info → cribl_control_plane-0.0.38.dist-info}/RECORD +37 -34
- cribl_control_plane/models/packrequestbody.py +0 -75
- {cribl_control_plane-0.0.37a1.dist-info → cribl_control_plane-0.0.38.dist-info}/WHEEL +0 -0
|
@@ -196,7 +196,7 @@ class InputS3TypedDict(TypedDict):
|
|
|
196
196
|
skip_on_error: NotRequired[bool]
|
|
197
197
|
r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
|
|
198
198
|
include_sqs_metadata: NotRequired[bool]
|
|
199
|
-
r"""
|
|
199
|
+
r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
|
|
200
200
|
enable_assume_role: NotRequired[bool]
|
|
201
201
|
r"""Use Assume Role credentials to access Amazon S3"""
|
|
202
202
|
assume_role_arn: NotRequired[str]
|
|
@@ -337,7 +337,7 @@ class InputS3(BaseModel):
|
|
|
337
337
|
include_sqs_metadata: Annotated[
|
|
338
338
|
Optional[bool], pydantic.Field(alias="includeSqsMetadata")
|
|
339
339
|
] = False
|
|
340
|
-
r"""
|
|
340
|
+
r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
|
|
341
341
|
|
|
342
342
|
enable_assume_role: Annotated[
|
|
343
343
|
Optional[bool], pydantic.Field(alias="enableAssumeRole")
|
|
@@ -201,7 +201,7 @@ class InputS3InventoryTypedDict(TypedDict):
|
|
|
201
201
|
skip_on_error: NotRequired[bool]
|
|
202
202
|
r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
|
|
203
203
|
include_sqs_metadata: NotRequired[bool]
|
|
204
|
-
r"""
|
|
204
|
+
r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
|
|
205
205
|
enable_assume_role: NotRequired[bool]
|
|
206
206
|
r"""Use Assume Role credentials to access Amazon S3"""
|
|
207
207
|
assume_role_arn: NotRequired[str]
|
|
@@ -346,7 +346,7 @@ class InputS3Inventory(BaseModel):
|
|
|
346
346
|
include_sqs_metadata: Annotated[
|
|
347
347
|
Optional[bool], pydantic.Field(alias="includeSqsMetadata")
|
|
348
348
|
] = False
|
|
349
|
-
r"""
|
|
349
|
+
r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
|
|
350
350
|
|
|
351
351
|
enable_assume_role: Annotated[
|
|
352
352
|
Optional[bool], pydantic.Field(alias="enableAssumeRole")
|
|
@@ -201,7 +201,7 @@ class InputSecurityLakeTypedDict(TypedDict):
|
|
|
201
201
|
skip_on_error: NotRequired[bool]
|
|
202
202
|
r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
|
|
203
203
|
include_sqs_metadata: NotRequired[bool]
|
|
204
|
-
r"""
|
|
204
|
+
r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
|
|
205
205
|
enable_assume_role: NotRequired[bool]
|
|
206
206
|
r"""Use Assume Role credentials to access Amazon S3"""
|
|
207
207
|
assume_role_arn: NotRequired[str]
|
|
@@ -342,7 +342,7 @@ class InputSecurityLake(BaseModel):
|
|
|
342
342
|
include_sqs_metadata: Annotated[
|
|
343
343
|
Optional[bool], pydantic.Field(alias="includeSqsMetadata")
|
|
344
344
|
] = False
|
|
345
|
-
r"""
|
|
345
|
+
r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
|
|
346
346
|
|
|
347
347
|
enable_assume_role: Annotated[
|
|
348
348
|
Optional[bool], pydantic.Field(alias="enableAssumeRole")
|
|
@@ -4,41 +4,41 @@ from __future__ import annotations
|
|
|
4
4
|
from cribl_control_plane.types import BaseModel
|
|
5
5
|
from enum import Enum
|
|
6
6
|
import pydantic
|
|
7
|
-
from typing import Any, List, Optional
|
|
8
|
-
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
7
|
+
from typing import Any, List, Optional, Union
|
|
8
|
+
from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
class
|
|
11
|
+
class InputSyslogType2(str, Enum):
|
|
12
12
|
SYSLOG = "syslog"
|
|
13
13
|
|
|
14
14
|
|
|
15
|
-
class
|
|
15
|
+
class InputSyslogConnection2TypedDict(TypedDict):
|
|
16
16
|
output: str
|
|
17
17
|
pipeline: NotRequired[str]
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
class
|
|
20
|
+
class InputSyslogConnection2(BaseModel):
|
|
21
21
|
output: str
|
|
22
22
|
|
|
23
23
|
pipeline: Optional[str] = None
|
|
24
24
|
|
|
25
25
|
|
|
26
|
-
class
|
|
26
|
+
class InputSyslogMode2(str, Enum):
|
|
27
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
28
28
|
|
|
29
29
|
SMART = "smart"
|
|
30
30
|
ALWAYS = "always"
|
|
31
31
|
|
|
32
32
|
|
|
33
|
-
class
|
|
33
|
+
class InputSyslogCompression2(str, Enum):
|
|
34
34
|
r"""Codec to use to compress the persisted data"""
|
|
35
35
|
|
|
36
36
|
NONE = "none"
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
-
class
|
|
41
|
-
mode: NotRequired[
|
|
40
|
+
class InputSyslogPq2TypedDict(TypedDict):
|
|
41
|
+
mode: NotRequired[InputSyslogMode2]
|
|
42
42
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
43
43
|
max_buffer_size: NotRequired[float]
|
|
44
44
|
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
@@ -50,12 +50,12 @@ class InputSyslogPqTypedDict(TypedDict):
|
|
|
50
50
|
r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
|
|
51
51
|
path: NotRequired[str]
|
|
52
52
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
|
-
compress: NotRequired[
|
|
53
|
+
compress: NotRequired[InputSyslogCompression2]
|
|
54
54
|
r"""Codec to use to compress the persisted data"""
|
|
55
55
|
|
|
56
56
|
|
|
57
|
-
class
|
|
58
|
-
mode: Optional[
|
|
57
|
+
class InputSyslogPq2(BaseModel):
|
|
58
|
+
mode: Optional[InputSyslogMode2] = InputSyslogMode2.ALWAYS
|
|
59
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
60
60
|
|
|
61
61
|
max_buffer_size: Annotated[
|
|
@@ -79,25 +79,25 @@ class InputSyslogPq(BaseModel):
|
|
|
79
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
80
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
81
81
|
|
|
82
|
-
compress: Optional[
|
|
82
|
+
compress: Optional[InputSyslogCompression2] = InputSyslogCompression2.NONE
|
|
83
83
|
r"""Codec to use to compress the persisted data"""
|
|
84
84
|
|
|
85
85
|
|
|
86
|
-
class
|
|
86
|
+
class InputSyslogMinimumTLSVersion2(str, Enum):
|
|
87
87
|
TL_SV1 = "TLSv1"
|
|
88
88
|
TL_SV1_1 = "TLSv1.1"
|
|
89
89
|
TL_SV1_2 = "TLSv1.2"
|
|
90
90
|
TL_SV1_3 = "TLSv1.3"
|
|
91
91
|
|
|
92
92
|
|
|
93
|
-
class
|
|
93
|
+
class InputSyslogMaximumTLSVersion2(str, Enum):
|
|
94
94
|
TL_SV1 = "TLSv1"
|
|
95
95
|
TL_SV1_1 = "TLSv1.1"
|
|
96
96
|
TL_SV1_2 = "TLSv1.2"
|
|
97
97
|
TL_SV1_3 = "TLSv1.3"
|
|
98
98
|
|
|
99
99
|
|
|
100
|
-
class
|
|
100
|
+
class InputSyslogTLSSettingsServerSide2TypedDict(TypedDict):
|
|
101
101
|
disabled: NotRequired[bool]
|
|
102
102
|
certificate_name: NotRequired[str]
|
|
103
103
|
r"""The name of the predefined certificate"""
|
|
@@ -113,11 +113,11 @@ class InputSyslogTLSSettingsServerSideTypedDict(TypedDict):
|
|
|
113
113
|
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
114
114
|
reject_unauthorized: NotRequired[Any]
|
|
115
115
|
common_name_regex: NotRequired[Any]
|
|
116
|
-
min_version: NotRequired[
|
|
117
|
-
max_version: NotRequired[
|
|
116
|
+
min_version: NotRequired[InputSyslogMinimumTLSVersion2]
|
|
117
|
+
max_version: NotRequired[InputSyslogMaximumTLSVersion2]
|
|
118
118
|
|
|
119
119
|
|
|
120
|
-
class
|
|
120
|
+
class InputSyslogTLSSettingsServerSide2(BaseModel):
|
|
121
121
|
disabled: Optional[bool] = True
|
|
122
122
|
|
|
123
123
|
certificate_name: Annotated[
|
|
@@ -149,29 +149,31 @@ class InputSyslogTLSSettingsServerSide(BaseModel):
|
|
|
149
149
|
] = None
|
|
150
150
|
|
|
151
151
|
min_version: Annotated[
|
|
152
|
-
Optional[
|
|
152
|
+
Optional[InputSyslogMinimumTLSVersion2], pydantic.Field(alias="minVersion")
|
|
153
153
|
] = None
|
|
154
154
|
|
|
155
155
|
max_version: Annotated[
|
|
156
|
-
Optional[
|
|
156
|
+
Optional[InputSyslogMaximumTLSVersion2], pydantic.Field(alias="maxVersion")
|
|
157
157
|
] = None
|
|
158
158
|
|
|
159
159
|
|
|
160
|
-
class
|
|
160
|
+
class InputSyslogMetadatum2TypedDict(TypedDict):
|
|
161
161
|
name: str
|
|
162
162
|
value: str
|
|
163
163
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
164
164
|
|
|
165
165
|
|
|
166
|
-
class
|
|
166
|
+
class InputSyslogMetadatum2(BaseModel):
|
|
167
167
|
name: str
|
|
168
168
|
|
|
169
169
|
value: str
|
|
170
170
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
171
171
|
|
|
172
172
|
|
|
173
|
-
class
|
|
174
|
-
type:
|
|
173
|
+
class InputSyslogSyslog2TypedDict(TypedDict):
|
|
174
|
+
type: InputSyslogType2
|
|
175
|
+
tcp_port: float
|
|
176
|
+
r"""Enter TCP port number to listen on. Not required if listening on UDP."""
|
|
175
177
|
id: NotRequired[str]
|
|
176
178
|
r"""Unique ID for this input"""
|
|
177
179
|
disabled: NotRequired[bool]
|
|
@@ -185,15 +187,13 @@ class InputSyslogTypedDict(TypedDict):
|
|
|
185
187
|
r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
|
|
186
188
|
streamtags: NotRequired[List[str]]
|
|
187
189
|
r"""Tags for filtering and grouping in @{product}"""
|
|
188
|
-
connections: NotRequired[List[
|
|
190
|
+
connections: NotRequired[List[InputSyslogConnection2TypedDict]]
|
|
189
191
|
r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
|
|
190
|
-
pq: NotRequired[
|
|
192
|
+
pq: NotRequired[InputSyslogPq2TypedDict]
|
|
191
193
|
host: NotRequired[str]
|
|
192
194
|
r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
|
|
193
195
|
udp_port: NotRequired[float]
|
|
194
196
|
r"""Enter UDP port number to listen on. Not required if listening on TCP."""
|
|
195
|
-
tcp_port: NotRequired[float]
|
|
196
|
-
r"""Enter TCP port number to listen on. Not required if listening on UDP."""
|
|
197
197
|
max_buffer_size: NotRequired[float]
|
|
198
198
|
r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
|
|
199
199
|
ip_whitelist_regex: NotRequired[str]
|
|
@@ -222,8 +222,8 @@ class InputSyslogTypedDict(TypedDict):
|
|
|
222
222
|
r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
|
|
223
223
|
socket_max_lifespan: NotRequired[float]
|
|
224
224
|
r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
|
|
225
|
-
tls: NotRequired[
|
|
226
|
-
metadata: NotRequired[List[
|
|
225
|
+
tls: NotRequired[InputSyslogTLSSettingsServerSide2TypedDict]
|
|
226
|
+
metadata: NotRequired[List[InputSyslogMetadatum2TypedDict]]
|
|
227
227
|
r"""Fields to add to events from this input"""
|
|
228
228
|
udp_socket_rx_buf_size: NotRequired[float]
|
|
229
229
|
r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
|
|
@@ -234,8 +234,11 @@ class InputSyslogTypedDict(TypedDict):
|
|
|
234
234
|
r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
|
|
235
235
|
|
|
236
236
|
|
|
237
|
-
class
|
|
238
|
-
type:
|
|
237
|
+
class InputSyslogSyslog2(BaseModel):
|
|
238
|
+
type: InputSyslogType2
|
|
239
|
+
|
|
240
|
+
tcp_port: Annotated[float, pydantic.Field(alias="tcpPort")]
|
|
241
|
+
r"""Enter TCP port number to listen on. Not required if listening on UDP."""
|
|
239
242
|
|
|
240
243
|
id: Optional[str] = None
|
|
241
244
|
r"""Unique ID for this input"""
|
|
@@ -259,10 +262,10 @@ class InputSyslog(BaseModel):
|
|
|
259
262
|
streamtags: Optional[List[str]] = None
|
|
260
263
|
r"""Tags for filtering and grouping in @{product}"""
|
|
261
264
|
|
|
262
|
-
connections: Optional[List[
|
|
265
|
+
connections: Optional[List[InputSyslogConnection2]] = None
|
|
263
266
|
r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
|
|
264
267
|
|
|
265
|
-
pq: Optional[
|
|
268
|
+
pq: Optional[InputSyslogPq2] = None
|
|
266
269
|
|
|
267
270
|
host: Optional[str] = "0.0.0.0"
|
|
268
271
|
r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
|
|
@@ -270,6 +273,361 @@ class InputSyslog(BaseModel):
|
|
|
270
273
|
udp_port: Annotated[Optional[float], pydantic.Field(alias="udpPort")] = None
|
|
271
274
|
r"""Enter UDP port number to listen on. Not required if listening on TCP."""
|
|
272
275
|
|
|
276
|
+
max_buffer_size: Annotated[
|
|
277
|
+
Optional[float], pydantic.Field(alias="maxBufferSize")
|
|
278
|
+
] = 1000
|
|
279
|
+
r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
|
|
280
|
+
|
|
281
|
+
ip_whitelist_regex: Annotated[
|
|
282
|
+
Optional[str], pydantic.Field(alias="ipWhitelistRegex")
|
|
283
|
+
] = "/.*/"
|
|
284
|
+
r"""Regex matching IP addresses that are allowed to send data"""
|
|
285
|
+
|
|
286
|
+
timestamp_timezone: Annotated[
|
|
287
|
+
Optional[str], pydantic.Field(alias="timestampTimezone")
|
|
288
|
+
] = "local"
|
|
289
|
+
r"""Timezone to assign to timestamps without timezone info"""
|
|
290
|
+
|
|
291
|
+
single_msg_udp_packets: Annotated[
|
|
292
|
+
Optional[bool], pydantic.Field(alias="singleMsgUdpPackets")
|
|
293
|
+
] = False
|
|
294
|
+
r"""Treat UDP packet data received as full syslog message"""
|
|
295
|
+
|
|
296
|
+
enable_proxy_header: Annotated[
|
|
297
|
+
Optional[bool], pydantic.Field(alias="enableProxyHeader")
|
|
298
|
+
] = False
|
|
299
|
+
r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
|
|
300
|
+
|
|
301
|
+
keep_fields_list: Annotated[
|
|
302
|
+
Optional[List[str]], pydantic.Field(alias="keepFieldsList")
|
|
303
|
+
] = None
|
|
304
|
+
r"""Wildcard list of fields to keep from source data; * = ALL (default)"""
|
|
305
|
+
|
|
306
|
+
octet_counting: Annotated[Optional[bool], pydantic.Field(alias="octetCounting")] = (
|
|
307
|
+
False
|
|
308
|
+
)
|
|
309
|
+
r"""Enable if incoming messages use octet counting per RFC 6587."""
|
|
310
|
+
|
|
311
|
+
infer_framing: Annotated[Optional[bool], pydantic.Field(alias="inferFraming")] = (
|
|
312
|
+
True
|
|
313
|
+
)
|
|
314
|
+
r"""Enable if we should infer the syslog framing of the incoming messages."""
|
|
315
|
+
|
|
316
|
+
strictly_infer_octet_counting: Annotated[
|
|
317
|
+
Optional[bool], pydantic.Field(alias="strictlyInferOctetCounting")
|
|
318
|
+
] = True
|
|
319
|
+
r"""Enable if we should infer octet counting only if the messages comply with RFC 5424."""
|
|
320
|
+
|
|
321
|
+
allow_non_standard_app_name: Annotated[
|
|
322
|
+
Optional[bool], pydantic.Field(alias="allowNonStandardAppName")
|
|
323
|
+
] = False
|
|
324
|
+
r"""Enable if RFC 3164-formatted messages have hyphens in the app name portion of the TAG section. If disabled, only alphanumeric characters and underscores are allowed. Ignored for RFC 5424-formatted messages."""
|
|
325
|
+
|
|
326
|
+
max_active_cxn: Annotated[Optional[float], pydantic.Field(alias="maxActiveCxn")] = (
|
|
327
|
+
1000
|
|
328
|
+
)
|
|
329
|
+
r"""Maximum number of active connections allowed per Worker Process for TCP connections. Use 0 for unlimited."""
|
|
330
|
+
|
|
331
|
+
socket_idle_timeout: Annotated[
|
|
332
|
+
Optional[float], pydantic.Field(alias="socketIdleTimeout")
|
|
333
|
+
] = 0
|
|
334
|
+
r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
|
|
335
|
+
|
|
336
|
+
socket_ending_max_wait: Annotated[
|
|
337
|
+
Optional[float], pydantic.Field(alias="socketEndingMaxWait")
|
|
338
|
+
] = 30
|
|
339
|
+
r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
|
|
340
|
+
|
|
341
|
+
socket_max_lifespan: Annotated[
|
|
342
|
+
Optional[float], pydantic.Field(alias="socketMaxLifespan")
|
|
343
|
+
] = 0
|
|
344
|
+
r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
|
|
345
|
+
|
|
346
|
+
tls: Optional[InputSyslogTLSSettingsServerSide2] = None
|
|
347
|
+
|
|
348
|
+
metadata: Optional[List[InputSyslogMetadatum2]] = None
|
|
349
|
+
r"""Fields to add to events from this input"""
|
|
350
|
+
|
|
351
|
+
udp_socket_rx_buf_size: Annotated[
|
|
352
|
+
Optional[float], pydantic.Field(alias="udpSocketRxBufSize")
|
|
353
|
+
] = None
|
|
354
|
+
r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
|
|
355
|
+
|
|
356
|
+
enable_load_balancing: Annotated[
|
|
357
|
+
Optional[bool], pydantic.Field(alias="enableLoadBalancing")
|
|
358
|
+
] = False
|
|
359
|
+
r"""Load balance traffic across all Worker Processes"""
|
|
360
|
+
|
|
361
|
+
description: Optional[str] = None
|
|
362
|
+
|
|
363
|
+
enable_enhanced_proxy_header_parsing: Annotated[
|
|
364
|
+
Optional[bool], pydantic.Field(alias="enableEnhancedProxyHeaderParsing")
|
|
365
|
+
] = None
|
|
366
|
+
r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
class InputSyslogType1(str, Enum):
|
|
370
|
+
SYSLOG = "syslog"
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
class InputSyslogConnection1TypedDict(TypedDict):
|
|
374
|
+
output: str
|
|
375
|
+
pipeline: NotRequired[str]
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
class InputSyslogConnection1(BaseModel):
|
|
379
|
+
output: str
|
|
380
|
+
|
|
381
|
+
pipeline: Optional[str] = None
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
class InputSyslogMode1(str, Enum):
|
|
385
|
+
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
386
|
+
|
|
387
|
+
SMART = "smart"
|
|
388
|
+
ALWAYS = "always"
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
class InputSyslogCompression1(str, Enum):
|
|
392
|
+
r"""Codec to use to compress the persisted data"""
|
|
393
|
+
|
|
394
|
+
NONE = "none"
|
|
395
|
+
GZIP = "gzip"
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
class InputSyslogPq1TypedDict(TypedDict):
|
|
399
|
+
mode: NotRequired[InputSyslogMode1]
|
|
400
|
+
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
401
|
+
max_buffer_size: NotRequired[float]
|
|
402
|
+
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
403
|
+
commit_frequency: NotRequired[float]
|
|
404
|
+
r"""The number of events to send downstream before committing that Stream has read them"""
|
|
405
|
+
max_file_size: NotRequired[str]
|
|
406
|
+
r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
|
|
407
|
+
max_size: NotRequired[str]
|
|
408
|
+
r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
|
|
409
|
+
path: NotRequired[str]
|
|
410
|
+
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
411
|
+
compress: NotRequired[InputSyslogCompression1]
|
|
412
|
+
r"""Codec to use to compress the persisted data"""
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
class InputSyslogPq1(BaseModel):
|
|
416
|
+
mode: Optional[InputSyslogMode1] = InputSyslogMode1.ALWAYS
|
|
417
|
+
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
418
|
+
|
|
419
|
+
max_buffer_size: Annotated[
|
|
420
|
+
Optional[float], pydantic.Field(alias="maxBufferSize")
|
|
421
|
+
] = 1000
|
|
422
|
+
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
423
|
+
|
|
424
|
+
commit_frequency: Annotated[
|
|
425
|
+
Optional[float], pydantic.Field(alias="commitFrequency")
|
|
426
|
+
] = 42
|
|
427
|
+
r"""The number of events to send downstream before committing that Stream has read them"""
|
|
428
|
+
|
|
429
|
+
max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
|
|
430
|
+
"1 MB"
|
|
431
|
+
)
|
|
432
|
+
r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
|
|
433
|
+
|
|
434
|
+
max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
|
|
435
|
+
r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
|
|
436
|
+
|
|
437
|
+
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
438
|
+
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
439
|
+
|
|
440
|
+
compress: Optional[InputSyslogCompression1] = InputSyslogCompression1.NONE
|
|
441
|
+
r"""Codec to use to compress the persisted data"""
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
class InputSyslogMinimumTLSVersion1(str, Enum):
|
|
445
|
+
TL_SV1 = "TLSv1"
|
|
446
|
+
TL_SV1_1 = "TLSv1.1"
|
|
447
|
+
TL_SV1_2 = "TLSv1.2"
|
|
448
|
+
TL_SV1_3 = "TLSv1.3"
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
class InputSyslogMaximumTLSVersion1(str, Enum):
|
|
452
|
+
TL_SV1 = "TLSv1"
|
|
453
|
+
TL_SV1_1 = "TLSv1.1"
|
|
454
|
+
TL_SV1_2 = "TLSv1.2"
|
|
455
|
+
TL_SV1_3 = "TLSv1.3"
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
class InputSyslogTLSSettingsServerSide1TypedDict(TypedDict):
|
|
459
|
+
disabled: NotRequired[bool]
|
|
460
|
+
certificate_name: NotRequired[str]
|
|
461
|
+
r"""The name of the predefined certificate"""
|
|
462
|
+
priv_key_path: NotRequired[str]
|
|
463
|
+
r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
|
|
464
|
+
passphrase: NotRequired[str]
|
|
465
|
+
r"""Passphrase to use to decrypt private key"""
|
|
466
|
+
cert_path: NotRequired[str]
|
|
467
|
+
r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
468
|
+
ca_path: NotRequired[str]
|
|
469
|
+
r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
470
|
+
request_cert: NotRequired[bool]
|
|
471
|
+
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
472
|
+
reject_unauthorized: NotRequired[Any]
|
|
473
|
+
common_name_regex: NotRequired[Any]
|
|
474
|
+
min_version: NotRequired[InputSyslogMinimumTLSVersion1]
|
|
475
|
+
max_version: NotRequired[InputSyslogMaximumTLSVersion1]
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
class InputSyslogTLSSettingsServerSide1(BaseModel):
|
|
479
|
+
disabled: Optional[bool] = True
|
|
480
|
+
|
|
481
|
+
certificate_name: Annotated[
|
|
482
|
+
Optional[str], pydantic.Field(alias="certificateName")
|
|
483
|
+
] = None
|
|
484
|
+
r"""The name of the predefined certificate"""
|
|
485
|
+
|
|
486
|
+
priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
|
|
487
|
+
r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
|
|
488
|
+
|
|
489
|
+
passphrase: Optional[str] = None
|
|
490
|
+
r"""Passphrase to use to decrypt private key"""
|
|
491
|
+
|
|
492
|
+
cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
|
|
493
|
+
r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
494
|
+
|
|
495
|
+
ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
|
|
496
|
+
r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
|
|
497
|
+
|
|
498
|
+
request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
|
|
499
|
+
r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
|
|
500
|
+
|
|
501
|
+
reject_unauthorized: Annotated[
|
|
502
|
+
Optional[Any], pydantic.Field(alias="rejectUnauthorized")
|
|
503
|
+
] = None
|
|
504
|
+
|
|
505
|
+
common_name_regex: Annotated[
|
|
506
|
+
Optional[Any], pydantic.Field(alias="commonNameRegex")
|
|
507
|
+
] = None
|
|
508
|
+
|
|
509
|
+
min_version: Annotated[
|
|
510
|
+
Optional[InputSyslogMinimumTLSVersion1], pydantic.Field(alias="minVersion")
|
|
511
|
+
] = None
|
|
512
|
+
|
|
513
|
+
max_version: Annotated[
|
|
514
|
+
Optional[InputSyslogMaximumTLSVersion1], pydantic.Field(alias="maxVersion")
|
|
515
|
+
] = None
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
class InputSyslogMetadatum1TypedDict(TypedDict):
|
|
519
|
+
name: str
|
|
520
|
+
value: str
|
|
521
|
+
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
class InputSyslogMetadatum1(BaseModel):
|
|
525
|
+
name: str
|
|
526
|
+
|
|
527
|
+
value: str
|
|
528
|
+
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
class InputSyslogSyslog1TypedDict(TypedDict):
|
|
532
|
+
type: InputSyslogType1
|
|
533
|
+
udp_port: float
|
|
534
|
+
r"""Enter UDP port number to listen on. Not required if listening on TCP."""
|
|
535
|
+
id: NotRequired[str]
|
|
536
|
+
r"""Unique ID for this input"""
|
|
537
|
+
disabled: NotRequired[bool]
|
|
538
|
+
pipeline: NotRequired[str]
|
|
539
|
+
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
540
|
+
send_to_routes: NotRequired[bool]
|
|
541
|
+
r"""Select whether to send data to Routes, or directly to Destinations."""
|
|
542
|
+
environment: NotRequired[str]
|
|
543
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
544
|
+
pq_enabled: NotRequired[bool]
|
|
545
|
+
r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
|
|
546
|
+
streamtags: NotRequired[List[str]]
|
|
547
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
548
|
+
connections: NotRequired[List[InputSyslogConnection1TypedDict]]
|
|
549
|
+
r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
|
|
550
|
+
pq: NotRequired[InputSyslogPq1TypedDict]
|
|
551
|
+
host: NotRequired[str]
|
|
552
|
+
r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
|
|
553
|
+
tcp_port: NotRequired[float]
|
|
554
|
+
r"""Enter TCP port number to listen on. Not required if listening on UDP."""
|
|
555
|
+
max_buffer_size: NotRequired[float]
|
|
556
|
+
r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
|
|
557
|
+
ip_whitelist_regex: NotRequired[str]
|
|
558
|
+
r"""Regex matching IP addresses that are allowed to send data"""
|
|
559
|
+
timestamp_timezone: NotRequired[str]
|
|
560
|
+
r"""Timezone to assign to timestamps without timezone info"""
|
|
561
|
+
single_msg_udp_packets: NotRequired[bool]
|
|
562
|
+
r"""Treat UDP packet data received as full syslog message"""
|
|
563
|
+
enable_proxy_header: NotRequired[bool]
|
|
564
|
+
r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
|
|
565
|
+
keep_fields_list: NotRequired[List[str]]
|
|
566
|
+
r"""Wildcard list of fields to keep from source data; * = ALL (default)"""
|
|
567
|
+
octet_counting: NotRequired[bool]
|
|
568
|
+
r"""Enable if incoming messages use octet counting per RFC 6587."""
|
|
569
|
+
infer_framing: NotRequired[bool]
|
|
570
|
+
r"""Enable if we should infer the syslog framing of the incoming messages."""
|
|
571
|
+
strictly_infer_octet_counting: NotRequired[bool]
|
|
572
|
+
r"""Enable if we should infer octet counting only if the messages comply with RFC 5424."""
|
|
573
|
+
allow_non_standard_app_name: NotRequired[bool]
|
|
574
|
+
r"""Enable if RFC 3164-formatted messages have hyphens in the app name portion of the TAG section. If disabled, only alphanumeric characters and underscores are allowed. Ignored for RFC 5424-formatted messages."""
|
|
575
|
+
max_active_cxn: NotRequired[float]
|
|
576
|
+
r"""Maximum number of active connections allowed per Worker Process for TCP connections. Use 0 for unlimited."""
|
|
577
|
+
socket_idle_timeout: NotRequired[float]
|
|
578
|
+
r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
|
|
579
|
+
socket_ending_max_wait: NotRequired[float]
|
|
580
|
+
r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
|
|
581
|
+
socket_max_lifespan: NotRequired[float]
|
|
582
|
+
r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
|
|
583
|
+
tls: NotRequired[InputSyslogTLSSettingsServerSide1TypedDict]
|
|
584
|
+
metadata: NotRequired[List[InputSyslogMetadatum1TypedDict]]
|
|
585
|
+
r"""Fields to add to events from this input"""
|
|
586
|
+
udp_socket_rx_buf_size: NotRequired[float]
|
|
587
|
+
r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
|
|
588
|
+
enable_load_balancing: NotRequired[bool]
|
|
589
|
+
r"""Load balance traffic across all Worker Processes"""
|
|
590
|
+
description: NotRequired[str]
|
|
591
|
+
enable_enhanced_proxy_header_parsing: NotRequired[bool]
|
|
592
|
+
r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
|
|
593
|
+
|
|
594
|
+
|
|
595
|
+
class InputSyslogSyslog1(BaseModel):
|
|
596
|
+
type: InputSyslogType1
|
|
597
|
+
|
|
598
|
+
udp_port: Annotated[float, pydantic.Field(alias="udpPort")]
|
|
599
|
+
r"""Enter UDP port number to listen on. Not required if listening on TCP."""
|
|
600
|
+
|
|
601
|
+
id: Optional[str] = None
|
|
602
|
+
r"""Unique ID for this input"""
|
|
603
|
+
|
|
604
|
+
disabled: Optional[bool] = False
|
|
605
|
+
|
|
606
|
+
pipeline: Optional[str] = None
|
|
607
|
+
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
608
|
+
|
|
609
|
+
send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
|
|
610
|
+
True
|
|
611
|
+
)
|
|
612
|
+
r"""Select whether to send data to Routes, or directly to Destinations."""
|
|
613
|
+
|
|
614
|
+
environment: Optional[str] = None
|
|
615
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
616
|
+
|
|
617
|
+
pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
|
|
618
|
+
r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
|
|
619
|
+
|
|
620
|
+
streamtags: Optional[List[str]] = None
|
|
621
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
622
|
+
|
|
623
|
+
connections: Optional[List[InputSyslogConnection1]] = None
|
|
624
|
+
r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
|
|
625
|
+
|
|
626
|
+
pq: Optional[InputSyslogPq1] = None
|
|
627
|
+
|
|
628
|
+
host: Optional[str] = "0.0.0.0"
|
|
629
|
+
r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
|
|
630
|
+
|
|
273
631
|
tcp_port: Annotated[Optional[float], pydantic.Field(alias="tcpPort")] = None
|
|
274
632
|
r"""Enter TCP port number to listen on. Not required if listening on UDP."""
|
|
275
633
|
|
|
@@ -343,9 +701,9 @@ class InputSyslog(BaseModel):
|
|
|
343
701
|
] = 0
|
|
344
702
|
r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
|
|
345
703
|
|
|
346
|
-
tls: Optional[
|
|
704
|
+
tls: Optional[InputSyslogTLSSettingsServerSide1] = None
|
|
347
705
|
|
|
348
|
-
metadata: Optional[List[
|
|
706
|
+
metadata: Optional[List[InputSyslogMetadatum1]] = None
|
|
349
707
|
r"""Fields to add to events from this input"""
|
|
350
708
|
|
|
351
709
|
udp_socket_rx_buf_size: Annotated[
|
|
@@ -364,3 +722,14 @@ class InputSyslog(BaseModel):
|
|
|
364
722
|
Optional[bool], pydantic.Field(alias="enableEnhancedProxyHeaderParsing")
|
|
365
723
|
] = None
|
|
366
724
|
r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
InputSyslogTypedDict = TypeAliasType(
|
|
728
|
+
"InputSyslogTypedDict",
|
|
729
|
+
Union[InputSyslogSyslog1TypedDict, InputSyslogSyslog2TypedDict],
|
|
730
|
+
)
|
|
731
|
+
|
|
732
|
+
|
|
733
|
+
InputSyslog = TypeAliasType(
|
|
734
|
+
"InputSyslog", Union[InputSyslogSyslog1, InputSyslogSyslog2]
|
|
735
|
+
)
|