cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,759 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import Any, List, Optional, Union
11
+ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
12
+
13
+
14
+ class InputSyslogType2(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ SYSLOG = "syslog"
16
+
17
+
18
+ class InputSyslogConnection2TypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputSyslogConnection2(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputSyslogMode2(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputSyslogCompression2(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputSyslogPq2TypedDict(TypedDict):
44
+ mode: NotRequired[InputSyslogMode2]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputSyslogCompression2]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputSyslogPq2(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputSyslogMode2], PlainValidator(validate_open_enum(False))
63
+ ] = InputSyslogMode2.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputSyslogCompression2], PlainValidator(validate_open_enum(False))
89
+ ] = InputSyslogCompression2.NONE
90
+ r"""Codec to use to compress the persisted data"""
91
+
92
+
93
+ class InputSyslogMinimumTLSVersion2(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ TL_SV1 = "TLSv1"
95
+ TL_SV1_1 = "TLSv1.1"
96
+ TL_SV1_2 = "TLSv1.2"
97
+ TL_SV1_3 = "TLSv1.3"
98
+
99
+
100
+ class InputSyslogMaximumTLSVersion2(str, Enum, metaclass=utils.OpenEnumMeta):
101
+ TL_SV1 = "TLSv1"
102
+ TL_SV1_1 = "TLSv1.1"
103
+ TL_SV1_2 = "TLSv1.2"
104
+ TL_SV1_3 = "TLSv1.3"
105
+
106
+
107
+ class InputSyslogTLSSettingsServerSide2TypedDict(TypedDict):
108
+ disabled: NotRequired[bool]
109
+ certificate_name: NotRequired[str]
110
+ r"""The name of the predefined certificate"""
111
+ priv_key_path: NotRequired[str]
112
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
113
+ passphrase: NotRequired[str]
114
+ r"""Passphrase to use to decrypt private key"""
115
+ cert_path: NotRequired[str]
116
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
117
+ ca_path: NotRequired[str]
118
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
119
+ request_cert: NotRequired[bool]
120
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
121
+ reject_unauthorized: NotRequired[Any]
122
+ common_name_regex: NotRequired[Any]
123
+ min_version: NotRequired[InputSyslogMinimumTLSVersion2]
124
+ max_version: NotRequired[InputSyslogMaximumTLSVersion2]
125
+
126
+
127
+ class InputSyslogTLSSettingsServerSide2(BaseModel):
128
+ disabled: Optional[bool] = True
129
+
130
+ certificate_name: Annotated[
131
+ Optional[str], pydantic.Field(alias="certificateName")
132
+ ] = None
133
+ r"""The name of the predefined certificate"""
134
+
135
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
136
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
137
+
138
+ passphrase: Optional[str] = None
139
+ r"""Passphrase to use to decrypt private key"""
140
+
141
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
142
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
143
+
144
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
145
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
146
+
147
+ request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
148
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
149
+
150
+ reject_unauthorized: Annotated[
151
+ Optional[Any], pydantic.Field(alias="rejectUnauthorized")
152
+ ] = None
153
+
154
+ common_name_regex: Annotated[
155
+ Optional[Any], pydantic.Field(alias="commonNameRegex")
156
+ ] = None
157
+
158
+ min_version: Annotated[
159
+ Annotated[
160
+ Optional[InputSyslogMinimumTLSVersion2],
161
+ PlainValidator(validate_open_enum(False)),
162
+ ],
163
+ pydantic.Field(alias="minVersion"),
164
+ ] = None
165
+
166
+ max_version: Annotated[
167
+ Annotated[
168
+ Optional[InputSyslogMaximumTLSVersion2],
169
+ PlainValidator(validate_open_enum(False)),
170
+ ],
171
+ pydantic.Field(alias="maxVersion"),
172
+ ] = None
173
+
174
+
175
+ class InputSyslogMetadatum2TypedDict(TypedDict):
176
+ name: str
177
+ value: str
178
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
179
+
180
+
181
+ class InputSyslogMetadatum2(BaseModel):
182
+ name: str
183
+
184
+ value: str
185
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
186
+
187
+
188
+ class InputSyslog2TypedDict(TypedDict):
189
+ type: InputSyslogType2
190
+ tcp_port: float
191
+ r"""Enter TCP port number to listen on. Not required if listening on UDP."""
192
+ id: NotRequired[str]
193
+ r"""Unique ID for this input"""
194
+ disabled: NotRequired[bool]
195
+ pipeline: NotRequired[str]
196
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
197
+ send_to_routes: NotRequired[bool]
198
+ r"""Select whether to send data to Routes, or directly to Destinations."""
199
+ environment: NotRequired[str]
200
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
201
+ pq_enabled: NotRequired[bool]
202
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
203
+ streamtags: NotRequired[List[str]]
204
+ r"""Tags for filtering and grouping in @{product}"""
205
+ connections: NotRequired[List[InputSyslogConnection2TypedDict]]
206
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
207
+ pq: NotRequired[InputSyslogPq2TypedDict]
208
+ host: NotRequired[str]
209
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
210
+ udp_port: NotRequired[float]
211
+ r"""Enter UDP port number to listen on. Not required if listening on TCP."""
212
+ max_buffer_size: NotRequired[float]
213
+ r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
214
+ ip_whitelist_regex: NotRequired[str]
215
+ r"""Regex matching IP addresses that are allowed to send data"""
216
+ timestamp_timezone: NotRequired[str]
217
+ r"""Timezone to assign to timestamps without timezone info"""
218
+ single_msg_udp_packets: NotRequired[bool]
219
+ r"""Treat UDP packet data received as full syslog message"""
220
+ enable_proxy_header: NotRequired[bool]
221
+ r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
222
+ keep_fields_list: NotRequired[List[str]]
223
+ r"""Wildcard list of fields to keep from source data; * = ALL (default)"""
224
+ octet_counting: NotRequired[bool]
225
+ r"""Enable if incoming messages use octet counting per RFC 6587."""
226
+ infer_framing: NotRequired[bool]
227
+ r"""Enable if we should infer the syslog framing of the incoming messages."""
228
+ strictly_infer_octet_counting: NotRequired[bool]
229
+ r"""Enable if we should infer octet counting only if the messages comply with RFC 5424."""
230
+ allow_non_standard_app_name: NotRequired[bool]
231
+ r"""Enable if RFC 3164-formatted messages have hyphens in the app name portion of the TAG section. If disabled, only alphanumeric characters and underscores are allowed. Ignored for RFC 5424-formatted messages."""
232
+ max_active_cxn: NotRequired[float]
233
+ r"""Maximum number of active connections allowed per Worker Process for TCP connections. Use 0 for unlimited."""
234
+ socket_idle_timeout: NotRequired[float]
235
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
236
+ socket_ending_max_wait: NotRequired[float]
237
+ r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
238
+ socket_max_lifespan: NotRequired[float]
239
+ r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
240
+ tls: NotRequired[InputSyslogTLSSettingsServerSide2TypedDict]
241
+ metadata: NotRequired[List[InputSyslogMetadatum2TypedDict]]
242
+ r"""Fields to add to events from this input"""
243
+ udp_socket_rx_buf_size: NotRequired[float]
244
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
245
+ enable_load_balancing: NotRequired[bool]
246
+ r"""Load balance traffic across all Worker Processes"""
247
+ description: NotRequired[str]
248
+ enable_enhanced_proxy_header_parsing: NotRequired[bool]
249
+ r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
250
+
251
+
252
+ class InputSyslog2(BaseModel):
253
+ type: Annotated[InputSyslogType2, PlainValidator(validate_open_enum(False))]
254
+
255
+ tcp_port: Annotated[float, pydantic.Field(alias="tcpPort")]
256
+ r"""Enter TCP port number to listen on. Not required if listening on UDP."""
257
+
258
+ id: Optional[str] = None
259
+ r"""Unique ID for this input"""
260
+
261
+ disabled: Optional[bool] = False
262
+
263
+ pipeline: Optional[str] = None
264
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
265
+
266
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
267
+ True
268
+ )
269
+ r"""Select whether to send data to Routes, or directly to Destinations."""
270
+
271
+ environment: Optional[str] = None
272
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
273
+
274
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
275
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
276
+
277
+ streamtags: Optional[List[str]] = None
278
+ r"""Tags for filtering and grouping in @{product}"""
279
+
280
+ connections: Optional[List[InputSyslogConnection2]] = None
281
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
282
+
283
+ pq: Optional[InputSyslogPq2] = None
284
+
285
+ host: Optional[str] = "0.0.0.0"
286
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
287
+
288
+ udp_port: Annotated[Optional[float], pydantic.Field(alias="udpPort")] = None
289
+ r"""Enter UDP port number to listen on. Not required if listening on TCP."""
290
+
291
+ max_buffer_size: Annotated[
292
+ Optional[float], pydantic.Field(alias="maxBufferSize")
293
+ ] = 1000
294
+ r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
295
+
296
+ ip_whitelist_regex: Annotated[
297
+ Optional[str], pydantic.Field(alias="ipWhitelistRegex")
298
+ ] = "/.*/"
299
+ r"""Regex matching IP addresses that are allowed to send data"""
300
+
301
+ timestamp_timezone: Annotated[
302
+ Optional[str], pydantic.Field(alias="timestampTimezone")
303
+ ] = "local"
304
+ r"""Timezone to assign to timestamps without timezone info"""
305
+
306
+ single_msg_udp_packets: Annotated[
307
+ Optional[bool], pydantic.Field(alias="singleMsgUdpPackets")
308
+ ] = False
309
+ r"""Treat UDP packet data received as full syslog message"""
310
+
311
+ enable_proxy_header: Annotated[
312
+ Optional[bool], pydantic.Field(alias="enableProxyHeader")
313
+ ] = False
314
+ r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
315
+
316
+ keep_fields_list: Annotated[
317
+ Optional[List[str]], pydantic.Field(alias="keepFieldsList")
318
+ ] = None
319
+ r"""Wildcard list of fields to keep from source data; * = ALL (default)"""
320
+
321
+ octet_counting: Annotated[Optional[bool], pydantic.Field(alias="octetCounting")] = (
322
+ False
323
+ )
324
+ r"""Enable if incoming messages use octet counting per RFC 6587."""
325
+
326
+ infer_framing: Annotated[Optional[bool], pydantic.Field(alias="inferFraming")] = (
327
+ True
328
+ )
329
+ r"""Enable if we should infer the syslog framing of the incoming messages."""
330
+
331
+ strictly_infer_octet_counting: Annotated[
332
+ Optional[bool], pydantic.Field(alias="strictlyInferOctetCounting")
333
+ ] = True
334
+ r"""Enable if we should infer octet counting only if the messages comply with RFC 5424."""
335
+
336
+ allow_non_standard_app_name: Annotated[
337
+ Optional[bool], pydantic.Field(alias="allowNonStandardAppName")
338
+ ] = False
339
+ r"""Enable if RFC 3164-formatted messages have hyphens in the app name portion of the TAG section. If disabled, only alphanumeric characters and underscores are allowed. Ignored for RFC 5424-formatted messages."""
340
+
341
+ max_active_cxn: Annotated[Optional[float], pydantic.Field(alias="maxActiveCxn")] = (
342
+ 1000
343
+ )
344
+ r"""Maximum number of active connections allowed per Worker Process for TCP connections. Use 0 for unlimited."""
345
+
346
+ socket_idle_timeout: Annotated[
347
+ Optional[float], pydantic.Field(alias="socketIdleTimeout")
348
+ ] = 0
349
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
350
+
351
+ socket_ending_max_wait: Annotated[
352
+ Optional[float], pydantic.Field(alias="socketEndingMaxWait")
353
+ ] = 30
354
+ r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
355
+
356
+ socket_max_lifespan: Annotated[
357
+ Optional[float], pydantic.Field(alias="socketMaxLifespan")
358
+ ] = 0
359
+ r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
360
+
361
+ tls: Optional[InputSyslogTLSSettingsServerSide2] = None
362
+
363
+ metadata: Optional[List[InputSyslogMetadatum2]] = None
364
+ r"""Fields to add to events from this input"""
365
+
366
+ udp_socket_rx_buf_size: Annotated[
367
+ Optional[float], pydantic.Field(alias="udpSocketRxBufSize")
368
+ ] = None
369
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
370
+
371
+ enable_load_balancing: Annotated[
372
+ Optional[bool], pydantic.Field(alias="enableLoadBalancing")
373
+ ] = False
374
+ r"""Load balance traffic across all Worker Processes"""
375
+
376
+ description: Optional[str] = None
377
+
378
+ enable_enhanced_proxy_header_parsing: Annotated[
379
+ Optional[bool], pydantic.Field(alias="enableEnhancedProxyHeaderParsing")
380
+ ] = None
381
+ r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
382
+
383
+
384
+ class InputSyslogType1(str, Enum, metaclass=utils.OpenEnumMeta):
385
+ SYSLOG = "syslog"
386
+
387
+
388
+ class InputSyslogConnection1TypedDict(TypedDict):
389
+ output: str
390
+ pipeline: NotRequired[str]
391
+
392
+
393
+ class InputSyslogConnection1(BaseModel):
394
+ output: str
395
+
396
+ pipeline: Optional[str] = None
397
+
398
+
399
+ class InputSyslogMode1(str, Enum, metaclass=utils.OpenEnumMeta):
400
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
401
+
402
+ SMART = "smart"
403
+ ALWAYS = "always"
404
+
405
+
406
+ class InputSyslogCompression1(str, Enum, metaclass=utils.OpenEnumMeta):
407
+ r"""Codec to use to compress the persisted data"""
408
+
409
+ NONE = "none"
410
+ GZIP = "gzip"
411
+
412
+
413
+ class InputSyslogPq1TypedDict(TypedDict):
414
+ mode: NotRequired[InputSyslogMode1]
415
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
416
+ max_buffer_size: NotRequired[float]
417
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
418
+ commit_frequency: NotRequired[float]
419
+ r"""The number of events to send downstream before committing that Stream has read them"""
420
+ max_file_size: NotRequired[str]
421
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
422
+ max_size: NotRequired[str]
423
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
424
+ path: NotRequired[str]
425
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
426
+ compress: NotRequired[InputSyslogCompression1]
427
+ r"""Codec to use to compress the persisted data"""
428
+
429
+
430
+ class InputSyslogPq1(BaseModel):
431
+ mode: Annotated[
432
+ Optional[InputSyslogMode1], PlainValidator(validate_open_enum(False))
433
+ ] = InputSyslogMode1.ALWAYS
434
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
435
+
436
+ max_buffer_size: Annotated[
437
+ Optional[float], pydantic.Field(alias="maxBufferSize")
438
+ ] = 1000
439
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
440
+
441
+ commit_frequency: Annotated[
442
+ Optional[float], pydantic.Field(alias="commitFrequency")
443
+ ] = 42
444
+ r"""The number of events to send downstream before committing that Stream has read them"""
445
+
446
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
447
+ "1 MB"
448
+ )
449
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
450
+
451
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
452
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
453
+
454
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
455
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
456
+
457
+ compress: Annotated[
458
+ Optional[InputSyslogCompression1], PlainValidator(validate_open_enum(False))
459
+ ] = InputSyslogCompression1.NONE
460
+ r"""Codec to use to compress the persisted data"""
461
+
462
+
463
+ class InputSyslogMinimumTLSVersion1(str, Enum, metaclass=utils.OpenEnumMeta):
464
+ TL_SV1 = "TLSv1"
465
+ TL_SV1_1 = "TLSv1.1"
466
+ TL_SV1_2 = "TLSv1.2"
467
+ TL_SV1_3 = "TLSv1.3"
468
+
469
+
470
+ class InputSyslogMaximumTLSVersion1(str, Enum, metaclass=utils.OpenEnumMeta):
471
+ TL_SV1 = "TLSv1"
472
+ TL_SV1_1 = "TLSv1.1"
473
+ TL_SV1_2 = "TLSv1.2"
474
+ TL_SV1_3 = "TLSv1.3"
475
+
476
+
477
+ class InputSyslogTLSSettingsServerSide1TypedDict(TypedDict):
478
+ disabled: NotRequired[bool]
479
+ certificate_name: NotRequired[str]
480
+ r"""The name of the predefined certificate"""
481
+ priv_key_path: NotRequired[str]
482
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
483
+ passphrase: NotRequired[str]
484
+ r"""Passphrase to use to decrypt private key"""
485
+ cert_path: NotRequired[str]
486
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
487
+ ca_path: NotRequired[str]
488
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
489
+ request_cert: NotRequired[bool]
490
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
491
+ reject_unauthorized: NotRequired[Any]
492
+ common_name_regex: NotRequired[Any]
493
+ min_version: NotRequired[InputSyslogMinimumTLSVersion1]
494
+ max_version: NotRequired[InputSyslogMaximumTLSVersion1]
495
+
496
+
497
+ class InputSyslogTLSSettingsServerSide1(BaseModel):
498
+ disabled: Optional[bool] = True
499
+
500
+ certificate_name: Annotated[
501
+ Optional[str], pydantic.Field(alias="certificateName")
502
+ ] = None
503
+ r"""The name of the predefined certificate"""
504
+
505
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
506
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
507
+
508
+ passphrase: Optional[str] = None
509
+ r"""Passphrase to use to decrypt private key"""
510
+
511
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
512
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
513
+
514
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
515
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
516
+
517
+ request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
518
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
519
+
520
+ reject_unauthorized: Annotated[
521
+ Optional[Any], pydantic.Field(alias="rejectUnauthorized")
522
+ ] = None
523
+
524
+ common_name_regex: Annotated[
525
+ Optional[Any], pydantic.Field(alias="commonNameRegex")
526
+ ] = None
527
+
528
+ min_version: Annotated[
529
+ Annotated[
530
+ Optional[InputSyslogMinimumTLSVersion1],
531
+ PlainValidator(validate_open_enum(False)),
532
+ ],
533
+ pydantic.Field(alias="minVersion"),
534
+ ] = None
535
+
536
+ max_version: Annotated[
537
+ Annotated[
538
+ Optional[InputSyslogMaximumTLSVersion1],
539
+ PlainValidator(validate_open_enum(False)),
540
+ ],
541
+ pydantic.Field(alias="maxVersion"),
542
+ ] = None
543
+
544
+
545
+ class InputSyslogMetadatum1TypedDict(TypedDict):
546
+ name: str
547
+ value: str
548
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
549
+
550
+
551
+ class InputSyslogMetadatum1(BaseModel):
552
+ name: str
553
+
554
+ value: str
555
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
556
+
557
+
558
+ class InputSyslog1TypedDict(TypedDict):
559
+ type: InputSyslogType1
560
+ udp_port: float
561
+ r"""Enter UDP port number to listen on. Not required if listening on TCP."""
562
+ id: NotRequired[str]
563
+ r"""Unique ID for this input"""
564
+ disabled: NotRequired[bool]
565
+ pipeline: NotRequired[str]
566
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
567
+ send_to_routes: NotRequired[bool]
568
+ r"""Select whether to send data to Routes, or directly to Destinations."""
569
+ environment: NotRequired[str]
570
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
571
+ pq_enabled: NotRequired[bool]
572
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
573
+ streamtags: NotRequired[List[str]]
574
+ r"""Tags for filtering and grouping in @{product}"""
575
+ connections: NotRequired[List[InputSyslogConnection1TypedDict]]
576
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
577
+ pq: NotRequired[InputSyslogPq1TypedDict]
578
+ host: NotRequired[str]
579
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
580
+ tcp_port: NotRequired[float]
581
+ r"""Enter TCP port number to listen on. Not required if listening on UDP."""
582
+ max_buffer_size: NotRequired[float]
583
+ r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
584
+ ip_whitelist_regex: NotRequired[str]
585
+ r"""Regex matching IP addresses that are allowed to send data"""
586
+ timestamp_timezone: NotRequired[str]
587
+ r"""Timezone to assign to timestamps without timezone info"""
588
+ single_msg_udp_packets: NotRequired[bool]
589
+ r"""Treat UDP packet data received as full syslog message"""
590
+ enable_proxy_header: NotRequired[bool]
591
+ r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
592
+ keep_fields_list: NotRequired[List[str]]
593
+ r"""Wildcard list of fields to keep from source data; * = ALL (default)"""
594
+ octet_counting: NotRequired[bool]
595
+ r"""Enable if incoming messages use octet counting per RFC 6587."""
596
+ infer_framing: NotRequired[bool]
597
+ r"""Enable if we should infer the syslog framing of the incoming messages."""
598
+ strictly_infer_octet_counting: NotRequired[bool]
599
+ r"""Enable if we should infer octet counting only if the messages comply with RFC 5424."""
600
+ allow_non_standard_app_name: NotRequired[bool]
601
+ r"""Enable if RFC 3164-formatted messages have hyphens in the app name portion of the TAG section. If disabled, only alphanumeric characters and underscores are allowed. Ignored for RFC 5424-formatted messages."""
602
+ max_active_cxn: NotRequired[float]
603
+ r"""Maximum number of active connections allowed per Worker Process for TCP connections. Use 0 for unlimited."""
604
+ socket_idle_timeout: NotRequired[float]
605
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
606
+ socket_ending_max_wait: NotRequired[float]
607
+ r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
608
+ socket_max_lifespan: NotRequired[float]
609
+ r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
610
+ tls: NotRequired[InputSyslogTLSSettingsServerSide1TypedDict]
611
+ metadata: NotRequired[List[InputSyslogMetadatum1TypedDict]]
612
+ r"""Fields to add to events from this input"""
613
+ udp_socket_rx_buf_size: NotRequired[float]
614
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
615
+ enable_load_balancing: NotRequired[bool]
616
+ r"""Load balance traffic across all Worker Processes"""
617
+ description: NotRequired[str]
618
+ enable_enhanced_proxy_header_parsing: NotRequired[bool]
619
+ r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
620
+
621
+
622
+ class InputSyslog1(BaseModel):
623
+ type: Annotated[InputSyslogType1, PlainValidator(validate_open_enum(False))]
624
+
625
+ udp_port: Annotated[float, pydantic.Field(alias="udpPort")]
626
+ r"""Enter UDP port number to listen on. Not required if listening on TCP."""
627
+
628
+ id: Optional[str] = None
629
+ r"""Unique ID for this input"""
630
+
631
+ disabled: Optional[bool] = False
632
+
633
+ pipeline: Optional[str] = None
634
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
635
+
636
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
637
+ True
638
+ )
639
+ r"""Select whether to send data to Routes, or directly to Destinations."""
640
+
641
+ environment: Optional[str] = None
642
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
643
+
644
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
645
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
646
+
647
+ streamtags: Optional[List[str]] = None
648
+ r"""Tags for filtering and grouping in @{product}"""
649
+
650
+ connections: Optional[List[InputSyslogConnection1]] = None
651
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
652
+
653
+ pq: Optional[InputSyslogPq1] = None
654
+
655
+ host: Optional[str] = "0.0.0.0"
656
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
657
+
658
+ tcp_port: Annotated[Optional[float], pydantic.Field(alias="tcpPort")] = None
659
+ r"""Enter TCP port number to listen on. Not required if listening on UDP."""
660
+
661
+ max_buffer_size: Annotated[
662
+ Optional[float], pydantic.Field(alias="maxBufferSize")
663
+ ] = 1000
664
+ r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
665
+
666
+ ip_whitelist_regex: Annotated[
667
+ Optional[str], pydantic.Field(alias="ipWhitelistRegex")
668
+ ] = "/.*/"
669
+ r"""Regex matching IP addresses that are allowed to send data"""
670
+
671
+ timestamp_timezone: Annotated[
672
+ Optional[str], pydantic.Field(alias="timestampTimezone")
673
+ ] = "local"
674
+ r"""Timezone to assign to timestamps without timezone info"""
675
+
676
+ single_msg_udp_packets: Annotated[
677
+ Optional[bool], pydantic.Field(alias="singleMsgUdpPackets")
678
+ ] = False
679
+ r"""Treat UDP packet data received as full syslog message"""
680
+
681
+ enable_proxy_header: Annotated[
682
+ Optional[bool], pydantic.Field(alias="enableProxyHeader")
683
+ ] = False
684
+ r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
685
+
686
+ keep_fields_list: Annotated[
687
+ Optional[List[str]], pydantic.Field(alias="keepFieldsList")
688
+ ] = None
689
+ r"""Wildcard list of fields to keep from source data; * = ALL (default)"""
690
+
691
+ octet_counting: Annotated[Optional[bool], pydantic.Field(alias="octetCounting")] = (
692
+ False
693
+ )
694
+ r"""Enable if incoming messages use octet counting per RFC 6587."""
695
+
696
+ infer_framing: Annotated[Optional[bool], pydantic.Field(alias="inferFraming")] = (
697
+ True
698
+ )
699
+ r"""Enable if we should infer the syslog framing of the incoming messages."""
700
+
701
+ strictly_infer_octet_counting: Annotated[
702
+ Optional[bool], pydantic.Field(alias="strictlyInferOctetCounting")
703
+ ] = True
704
+ r"""Enable if we should infer octet counting only if the messages comply with RFC 5424."""
705
+
706
+ allow_non_standard_app_name: Annotated[
707
+ Optional[bool], pydantic.Field(alias="allowNonStandardAppName")
708
+ ] = False
709
+ r"""Enable if RFC 3164-formatted messages have hyphens in the app name portion of the TAG section. If disabled, only alphanumeric characters and underscores are allowed. Ignored for RFC 5424-formatted messages."""
710
+
711
+ max_active_cxn: Annotated[Optional[float], pydantic.Field(alias="maxActiveCxn")] = (
712
+ 1000
713
+ )
714
+ r"""Maximum number of active connections allowed per Worker Process for TCP connections. Use 0 for unlimited."""
715
+
716
+ socket_idle_timeout: Annotated[
717
+ Optional[float], pydantic.Field(alias="socketIdleTimeout")
718
+ ] = 0
719
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
720
+
721
+ socket_ending_max_wait: Annotated[
722
+ Optional[float], pydantic.Field(alias="socketEndingMaxWait")
723
+ ] = 30
724
+ r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
725
+
726
+ socket_max_lifespan: Annotated[
727
+ Optional[float], pydantic.Field(alias="socketMaxLifespan")
728
+ ] = 0
729
+ r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
730
+
731
+ tls: Optional[InputSyslogTLSSettingsServerSide1] = None
732
+
733
+ metadata: Optional[List[InputSyslogMetadatum1]] = None
734
+ r"""Fields to add to events from this input"""
735
+
736
+ udp_socket_rx_buf_size: Annotated[
737
+ Optional[float], pydantic.Field(alias="udpSocketRxBufSize")
738
+ ] = None
739
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
740
+
741
+ enable_load_balancing: Annotated[
742
+ Optional[bool], pydantic.Field(alias="enableLoadBalancing")
743
+ ] = False
744
+ r"""Load balance traffic across all Worker Processes"""
745
+
746
+ description: Optional[str] = None
747
+
748
+ enable_enhanced_proxy_header_parsing: Annotated[
749
+ Optional[bool], pydantic.Field(alias="enableEnhancedProxyHeaderParsing")
750
+ ] = None
751
+ r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
752
+
753
+
754
+ InputSyslogUnionTypedDict = TypeAliasType(
755
+ "InputSyslogUnionTypedDict", Union[InputSyslog1TypedDict, InputSyslog2TypedDict]
756
+ )
757
+
758
+
759
+ InputSyslogUnion = TypeAliasType("InputSyslogUnion", Union[InputSyslog1, InputSyslog2])