cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,274 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import Any, List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputSnmpType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ SNMP = "snmp"
16
+
17
+
18
+ class InputSnmpConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputSnmpConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputSnmpMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputSnmpCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputSnmpPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputSnmpMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputSnmpCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputSnmpPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputSnmpMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputSnmpMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputSnmpCompression], PlainValidator(validate_open_enum(False))
89
+ ] = InputSnmpCompression.NONE
90
+ r"""Codec to use to compress the persisted data"""
91
+
92
+
93
+ class AuthenticationProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ NONE = "none"
95
+ MD5 = "md5"
96
+ SHA = "sha"
97
+ SHA224 = "sha224"
98
+ SHA256 = "sha256"
99
+ SHA384 = "sha384"
100
+ SHA512 = "sha512"
101
+
102
+
103
+ class V3UserTypedDict(TypedDict):
104
+ name: str
105
+ auth_protocol: NotRequired[AuthenticationProtocol]
106
+ auth_key: NotRequired[Any]
107
+ priv_protocol: NotRequired[str]
108
+
109
+
110
+ class V3User(BaseModel):
111
+ name: str
112
+
113
+ auth_protocol: Annotated[
114
+ Annotated[
115
+ Optional[AuthenticationProtocol], PlainValidator(validate_open_enum(False))
116
+ ],
117
+ pydantic.Field(alias="authProtocol"),
118
+ ] = AuthenticationProtocol.NONE
119
+
120
+ auth_key: Annotated[Optional[Any], pydantic.Field(alias="authKey")] = None
121
+
122
+ priv_protocol: Annotated[Optional[str], pydantic.Field(alias="privProtocol")] = (
123
+ "none"
124
+ )
125
+
126
+
127
+ class SNMPv3AuthenticationTypedDict(TypedDict):
128
+ r"""Authentication parameters for SNMPv3 trap. Set the log level to debug if you are experiencing authentication or decryption issues."""
129
+
130
+ v3_auth_enabled: NotRequired[bool]
131
+ allow_unmatched_trap: NotRequired[bool]
132
+ r"""Pass through traps that don't match any of the configured users. @{product} will not attempt to decrypt these traps."""
133
+ v3_users: NotRequired[List[V3UserTypedDict]]
134
+ r"""User credentials for receiving v3 traps"""
135
+
136
+
137
+ class SNMPv3Authentication(BaseModel):
138
+ r"""Authentication parameters for SNMPv3 trap. Set the log level to debug if you are experiencing authentication or decryption issues."""
139
+
140
+ v3_auth_enabled: Annotated[
141
+ Optional[bool], pydantic.Field(alias="v3AuthEnabled")
142
+ ] = False
143
+
144
+ allow_unmatched_trap: Annotated[
145
+ Optional[bool], pydantic.Field(alias="allowUnmatchedTrap")
146
+ ] = False
147
+ r"""Pass through traps that don't match any of the configured users. @{product} will not attempt to decrypt these traps."""
148
+
149
+ v3_users: Annotated[Optional[List[V3User]], pydantic.Field(alias="v3Users")] = None
150
+ r"""User credentials for receiving v3 traps"""
151
+
152
+
153
+ class InputSnmpMetadatumTypedDict(TypedDict):
154
+ name: str
155
+ value: str
156
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
157
+
158
+
159
+ class InputSnmpMetadatum(BaseModel):
160
+ name: str
161
+
162
+ value: str
163
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
164
+
165
+
166
+ class InputSnmpTypedDict(TypedDict):
167
+ type: InputSnmpType
168
+ id: NotRequired[str]
169
+ r"""Unique ID for this input"""
170
+ disabled: NotRequired[bool]
171
+ pipeline: NotRequired[str]
172
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
173
+ send_to_routes: NotRequired[bool]
174
+ r"""Select whether to send data to Routes, or directly to Destinations."""
175
+ environment: NotRequired[str]
176
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
177
+ pq_enabled: NotRequired[bool]
178
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
179
+ streamtags: NotRequired[List[str]]
180
+ r"""Tags for filtering and grouping in @{product}"""
181
+ connections: NotRequired[List[InputSnmpConnectionTypedDict]]
182
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
183
+ pq: NotRequired[InputSnmpPqTypedDict]
184
+ host: NotRequired[str]
185
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
186
+ port: NotRequired[float]
187
+ r"""UDP port to receive SNMP traps on. Defaults to 162."""
188
+ snmp_v3_auth: NotRequired[SNMPv3AuthenticationTypedDict]
189
+ r"""Authentication parameters for SNMPv3 trap. Set the log level to debug if you are experiencing authentication or decryption issues."""
190
+ max_buffer_size: NotRequired[float]
191
+ r"""Maximum number of events to buffer when downstream is blocking."""
192
+ ip_whitelist_regex: NotRequired[str]
193
+ r"""Regex matching IP addresses that are allowed to send data"""
194
+ metadata: NotRequired[List[InputSnmpMetadatumTypedDict]]
195
+ r"""Fields to add to events from this input"""
196
+ udp_socket_rx_buf_size: NotRequired[float]
197
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
198
+ varbinds_with_types: NotRequired[bool]
199
+ r"""If enabled, parses varbinds as an array of objects that include OID, value, and type"""
200
+ best_effort_parsing: NotRequired[bool]
201
+ r"""If enabled, the parser will attempt to parse varbind octet strings as UTF-8, first, otherwise will fallback to other methods"""
202
+ description: NotRequired[str]
203
+
204
+
205
+ class InputSnmp(BaseModel):
206
+ type: Annotated[InputSnmpType, PlainValidator(validate_open_enum(False))]
207
+
208
+ id: Optional[str] = None
209
+ r"""Unique ID for this input"""
210
+
211
+ disabled: Optional[bool] = False
212
+
213
+ pipeline: Optional[str] = None
214
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
215
+
216
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
217
+ True
218
+ )
219
+ r"""Select whether to send data to Routes, or directly to Destinations."""
220
+
221
+ environment: Optional[str] = None
222
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
223
+
224
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
225
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
226
+
227
+ streamtags: Optional[List[str]] = None
228
+ r"""Tags for filtering and grouping in @{product}"""
229
+
230
+ connections: Optional[List[InputSnmpConnection]] = None
231
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
232
+
233
+ pq: Optional[InputSnmpPq] = None
234
+
235
+ host: Optional[str] = "0.0.0.0"
236
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
237
+
238
+ port: Optional[float] = 162
239
+ r"""UDP port to receive SNMP traps on. Defaults to 162."""
240
+
241
+ snmp_v3_auth: Annotated[
242
+ Optional[SNMPv3Authentication], pydantic.Field(alias="snmpV3Auth")
243
+ ] = None
244
+ r"""Authentication parameters for SNMPv3 trap. Set the log level to debug if you are experiencing authentication or decryption issues."""
245
+
246
+ max_buffer_size: Annotated[
247
+ Optional[float], pydantic.Field(alias="maxBufferSize")
248
+ ] = 1000
249
+ r"""Maximum number of events to buffer when downstream is blocking."""
250
+
251
+ ip_whitelist_regex: Annotated[
252
+ Optional[str], pydantic.Field(alias="ipWhitelistRegex")
253
+ ] = "/.*/"
254
+ r"""Regex matching IP addresses that are allowed to send data"""
255
+
256
+ metadata: Optional[List[InputSnmpMetadatum]] = None
257
+ r"""Fields to add to events from this input"""
258
+
259
+ udp_socket_rx_buf_size: Annotated[
260
+ Optional[float], pydantic.Field(alias="udpSocketRxBufSize")
261
+ ] = None
262
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
263
+
264
+ varbinds_with_types: Annotated[
265
+ Optional[bool], pydantic.Field(alias="varbindsWithTypes")
266
+ ] = False
267
+ r"""If enabled, parses varbinds as an array of objects that include OID, value, and type"""
268
+
269
+ best_effort_parsing: Annotated[
270
+ Optional[bool], pydantic.Field(alias="bestEffortParsing")
271
+ ] = False
272
+ r"""If enabled, the parser will attempt to parse varbind octet strings as UTF-8, first, otherwise will fallback to other methods"""
273
+
274
+ description: Optional[str] = None
@@ -0,0 +1,387 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import Any, List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputSplunkType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ SPLUNK = "splunk"
16
+
17
+
18
+ class InputSplunkConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputSplunkConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputSplunkMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputSplunkPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputSplunkPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputSplunkMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputSplunkPqCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputSplunkPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputSplunkMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputSplunkMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputSplunkPqCompression], PlainValidator(validate_open_enum(False))
89
+ ] = InputSplunkPqCompression.NONE
90
+ r"""Codec to use to compress the persisted data"""
91
+
92
+
93
+ class InputSplunkMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ TL_SV1 = "TLSv1"
95
+ TL_SV1_1 = "TLSv1.1"
96
+ TL_SV1_2 = "TLSv1.2"
97
+ TL_SV1_3 = "TLSv1.3"
98
+
99
+
100
+ class InputSplunkMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
101
+ TL_SV1 = "TLSv1"
102
+ TL_SV1_1 = "TLSv1.1"
103
+ TL_SV1_2 = "TLSv1.2"
104
+ TL_SV1_3 = "TLSv1.3"
105
+
106
+
107
+ class InputSplunkTLSSettingsServerSideTypedDict(TypedDict):
108
+ disabled: NotRequired[bool]
109
+ certificate_name: NotRequired[str]
110
+ r"""The name of the predefined certificate"""
111
+ priv_key_path: NotRequired[str]
112
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
113
+ passphrase: NotRequired[str]
114
+ r"""Passphrase to use to decrypt private key"""
115
+ cert_path: NotRequired[str]
116
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
117
+ ca_path: NotRequired[str]
118
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
119
+ request_cert: NotRequired[bool]
120
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
121
+ reject_unauthorized: NotRequired[Any]
122
+ common_name_regex: NotRequired[Any]
123
+ min_version: NotRequired[InputSplunkMinimumTLSVersion]
124
+ max_version: NotRequired[InputSplunkMaximumTLSVersion]
125
+
126
+
127
+ class InputSplunkTLSSettingsServerSide(BaseModel):
128
+ disabled: Optional[bool] = True
129
+
130
+ certificate_name: Annotated[
131
+ Optional[str], pydantic.Field(alias="certificateName")
132
+ ] = None
133
+ r"""The name of the predefined certificate"""
134
+
135
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
136
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
137
+
138
+ passphrase: Optional[str] = None
139
+ r"""Passphrase to use to decrypt private key"""
140
+
141
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
142
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
143
+
144
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
145
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
146
+
147
+ request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
148
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
149
+
150
+ reject_unauthorized: Annotated[
151
+ Optional[Any], pydantic.Field(alias="rejectUnauthorized")
152
+ ] = None
153
+
154
+ common_name_regex: Annotated[
155
+ Optional[Any], pydantic.Field(alias="commonNameRegex")
156
+ ] = None
157
+
158
+ min_version: Annotated[
159
+ Annotated[
160
+ Optional[InputSplunkMinimumTLSVersion],
161
+ PlainValidator(validate_open_enum(False)),
162
+ ],
163
+ pydantic.Field(alias="minVersion"),
164
+ ] = None
165
+
166
+ max_version: Annotated[
167
+ Annotated[
168
+ Optional[InputSplunkMaximumTLSVersion],
169
+ PlainValidator(validate_open_enum(False)),
170
+ ],
171
+ pydantic.Field(alias="maxVersion"),
172
+ ] = None
173
+
174
+
175
+ class InputSplunkMetadatumTypedDict(TypedDict):
176
+ name: str
177
+ value: str
178
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
179
+
180
+
181
+ class InputSplunkMetadatum(BaseModel):
182
+ name: str
183
+
184
+ value: str
185
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
186
+
187
+
188
+ class InputSplunkAuthTokenTypedDict(TypedDict):
189
+ token: str
190
+ r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
191
+ description: NotRequired[str]
192
+
193
+
194
+ class InputSplunkAuthToken(BaseModel):
195
+ token: str
196
+ r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
197
+
198
+ description: Optional[str] = None
199
+
200
+
201
+ class InputSplunkMaxS2SVersion(str, Enum, metaclass=utils.OpenEnumMeta):
202
+ r"""The highest S2S protocol version to advertise during handshake"""
203
+
204
+ V3 = "v3"
205
+ V4 = "v4"
206
+
207
+
208
+ class InputSplunkCompression(str, Enum, metaclass=utils.OpenEnumMeta):
209
+ r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
210
+
211
+ DISABLED = "disabled"
212
+ AUTO = "auto"
213
+ ALWAYS = "always"
214
+
215
+
216
+ class InputSplunkTypedDict(TypedDict):
217
+ type: InputSplunkType
218
+ port: float
219
+ r"""Port to listen on"""
220
+ id: NotRequired[str]
221
+ r"""Unique ID for this input"""
222
+ disabled: NotRequired[bool]
223
+ pipeline: NotRequired[str]
224
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
225
+ send_to_routes: NotRequired[bool]
226
+ r"""Select whether to send data to Routes, or directly to Destinations."""
227
+ environment: NotRequired[str]
228
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
229
+ pq_enabled: NotRequired[bool]
230
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
231
+ streamtags: NotRequired[List[str]]
232
+ r"""Tags for filtering and grouping in @{product}"""
233
+ connections: NotRequired[List[InputSplunkConnectionTypedDict]]
234
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
235
+ pq: NotRequired[InputSplunkPqTypedDict]
236
+ host: NotRequired[str]
237
+ r"""Address to bind on. Defaults to 0.0.0.0 (all addresses)."""
238
+ tls: NotRequired[InputSplunkTLSSettingsServerSideTypedDict]
239
+ ip_whitelist_regex: NotRequired[str]
240
+ r"""Regex matching IP addresses that are allowed to establish a connection"""
241
+ max_active_cxn: NotRequired[float]
242
+ r"""Maximum number of active connections allowed per Worker Process. Use 0 for unlimited."""
243
+ socket_idle_timeout: NotRequired[float]
244
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
245
+ socket_ending_max_wait: NotRequired[float]
246
+ r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
247
+ socket_max_lifespan: NotRequired[float]
248
+ r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
249
+ enable_proxy_header: NotRequired[bool]
250
+ r"""Enable if the connection is proxied by a device that supports proxy protocol v1 or v2"""
251
+ metadata: NotRequired[List[InputSplunkMetadatumTypedDict]]
252
+ r"""Fields to add to events from this input"""
253
+ breaker_rulesets: NotRequired[List[str]]
254
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
255
+ stale_channel_flush_ms: NotRequired[float]
256
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
257
+ auth_tokens: NotRequired[List[InputSplunkAuthTokenTypedDict]]
258
+ r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
259
+ max_s2_sversion: NotRequired[InputSplunkMaxS2SVersion]
260
+ r"""The highest S2S protocol version to advertise during handshake"""
261
+ description: NotRequired[str]
262
+ use_fwd_timezone: NotRequired[bool]
263
+ r"""Event Breakers will determine events' time zone from UF-provided metadata, when TZ can't be inferred from the raw event"""
264
+ drop_control_fields: NotRequired[bool]
265
+ r"""Drop Splunk control fields such as `crcSalt` and `_savedPort`. If disabled, control fields are stored in the internal field `__ctrlFields`."""
266
+ extract_metrics: NotRequired[bool]
267
+ r"""Extract and process Splunk-generated metrics as Cribl metrics"""
268
+ compress: NotRequired[InputSplunkCompression]
269
+ r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
270
+
271
+
272
+ class InputSplunk(BaseModel):
273
+ type: Annotated[InputSplunkType, PlainValidator(validate_open_enum(False))]
274
+
275
+ port: float
276
+ r"""Port to listen on"""
277
+
278
+ id: Optional[str] = None
279
+ r"""Unique ID for this input"""
280
+
281
+ disabled: Optional[bool] = False
282
+
283
+ pipeline: Optional[str] = None
284
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
285
+
286
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
287
+ True
288
+ )
289
+ r"""Select whether to send data to Routes, or directly to Destinations."""
290
+
291
+ environment: Optional[str] = None
292
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
293
+
294
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
295
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
296
+
297
+ streamtags: Optional[List[str]] = None
298
+ r"""Tags for filtering and grouping in @{product}"""
299
+
300
+ connections: Optional[List[InputSplunkConnection]] = None
301
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
302
+
303
+ pq: Optional[InputSplunkPq] = None
304
+
305
+ host: Optional[str] = "0.0.0.0"
306
+ r"""Address to bind on. Defaults to 0.0.0.0 (all addresses)."""
307
+
308
+ tls: Optional[InputSplunkTLSSettingsServerSide] = None
309
+
310
+ ip_whitelist_regex: Annotated[
311
+ Optional[str], pydantic.Field(alias="ipWhitelistRegex")
312
+ ] = "/.*/"
313
+ r"""Regex matching IP addresses that are allowed to establish a connection"""
314
+
315
+ max_active_cxn: Annotated[Optional[float], pydantic.Field(alias="maxActiveCxn")] = (
316
+ 1000
317
+ )
318
+ r"""Maximum number of active connections allowed per Worker Process. Use 0 for unlimited."""
319
+
320
+ socket_idle_timeout: Annotated[
321
+ Optional[float], pydantic.Field(alias="socketIdleTimeout")
322
+ ] = 0
323
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. After this time, the connection will be closed. Leave at 0 for no inactive socket monitoring."""
324
+
325
+ socket_ending_max_wait: Annotated[
326
+ Optional[float], pydantic.Field(alias="socketEndingMaxWait")
327
+ ] = 30
328
+ r"""How long the server will wait after initiating a closure for a client to close its end of the connection. If the client doesn't close the connection within this time, the server will forcefully terminate the socket to prevent resource leaks and ensure efficient connection cleanup and system stability. Leave at 0 for no inactive socket monitoring."""
329
+
330
+ socket_max_lifespan: Annotated[
331
+ Optional[float], pydantic.Field(alias="socketMaxLifespan")
332
+ ] = 0
333
+ r"""The maximum duration a socket can remain open, even if active. This helps manage resources and mitigate issues caused by TCP pinning. Set to 0 to disable."""
334
+
335
+ enable_proxy_header: Annotated[
336
+ Optional[bool], pydantic.Field(alias="enableProxyHeader")
337
+ ] = False
338
+ r"""Enable if the connection is proxied by a device that supports proxy protocol v1 or v2"""
339
+
340
+ metadata: Optional[List[InputSplunkMetadatum]] = None
341
+ r"""Fields to add to events from this input"""
342
+
343
+ breaker_rulesets: Annotated[
344
+ Optional[List[str]], pydantic.Field(alias="breakerRulesets")
345
+ ] = None
346
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
347
+
348
+ stale_channel_flush_ms: Annotated[
349
+ Optional[float], pydantic.Field(alias="staleChannelFlushMs")
350
+ ] = 10000
351
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
352
+
353
+ auth_tokens: Annotated[
354
+ Optional[List[InputSplunkAuthToken]], pydantic.Field(alias="authTokens")
355
+ ] = None
356
+ r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
357
+
358
+ max_s2_sversion: Annotated[
359
+ Annotated[
360
+ Optional[InputSplunkMaxS2SVersion],
361
+ PlainValidator(validate_open_enum(False)),
362
+ ],
363
+ pydantic.Field(alias="maxS2Sversion"),
364
+ ] = InputSplunkMaxS2SVersion.V3
365
+ r"""The highest S2S protocol version to advertise during handshake"""
366
+
367
+ description: Optional[str] = None
368
+
369
+ use_fwd_timezone: Annotated[
370
+ Optional[bool], pydantic.Field(alias="useFwdTimezone")
371
+ ] = True
372
+ r"""Event Breakers will determine events' time zone from UF-provided metadata, when TZ can't be inferred from the raw event"""
373
+
374
+ drop_control_fields: Annotated[
375
+ Optional[bool], pydantic.Field(alias="dropControlFields")
376
+ ] = True
377
+ r"""Drop Splunk control fields such as `crcSalt` and `_savedPort`. If disabled, control fields are stored in the internal field `__ctrlFields`."""
378
+
379
+ extract_metrics: Annotated[
380
+ Optional[bool], pydantic.Field(alias="extractMetrics")
381
+ ] = False
382
+ r"""Extract and process Splunk-generated metrics as Cribl metrics"""
383
+
384
+ compress: Annotated[
385
+ Optional[InputSplunkCompression], PlainValidator(validate_open_enum(False))
386
+ ] = InputSplunkCompression.DISABLED
387
+ r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""