cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,585 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputConfluentCloudType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ CONFLUENT_CLOUD = "confluent_cloud"
16
+
17
+
18
+ class InputConfluentCloudConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputConfluentCloudConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputConfluentCloudPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputConfluentCloudMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputConfluentCloudCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputConfluentCloudPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputConfluentCloudMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputConfluentCloudMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputConfluentCloudCompression],
89
+ PlainValidator(validate_open_enum(False)),
90
+ ] = InputConfluentCloudCompression.NONE
91
+ r"""Codec to use to compress the persisted data"""
92
+
93
+
94
+ class InputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
95
+ TL_SV1 = "TLSv1"
96
+ TL_SV1_1 = "TLSv1.1"
97
+ TL_SV1_2 = "TLSv1.2"
98
+ TL_SV1_3 = "TLSv1.3"
99
+
100
+
101
+ class InputConfluentCloudMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
102
+ TL_SV1 = "TLSv1"
103
+ TL_SV1_1 = "TLSv1.1"
104
+ TL_SV1_2 = "TLSv1.2"
105
+ TL_SV1_3 = "TLSv1.3"
106
+
107
+
108
+ class InputConfluentCloudTLSSettingsClientSideTypedDict(TypedDict):
109
+ disabled: NotRequired[bool]
110
+ reject_unauthorized: NotRequired[bool]
111
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
112
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
113
+ """
114
+ servername: NotRequired[str]
115
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
116
+ certificate_name: NotRequired[str]
117
+ r"""The name of the predefined certificate"""
118
+ ca_path: NotRequired[str]
119
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
120
+ priv_key_path: NotRequired[str]
121
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
122
+ cert_path: NotRequired[str]
123
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
124
+ passphrase: NotRequired[str]
125
+ r"""Passphrase to use to decrypt private key"""
126
+ min_version: NotRequired[InputConfluentCloudMinimumTLSVersion]
127
+ max_version: NotRequired[InputConfluentCloudMaximumTLSVersion]
128
+
129
+
130
+ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
131
+ disabled: Optional[bool] = False
132
+
133
+ reject_unauthorized: Annotated[
134
+ Optional[bool], pydantic.Field(alias="rejectUnauthorized")
135
+ ] = True
136
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
137
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
138
+ """
139
+
140
+ servername: Optional[str] = None
141
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
142
+
143
+ certificate_name: Annotated[
144
+ Optional[str], pydantic.Field(alias="certificateName")
145
+ ] = None
146
+ r"""The name of the predefined certificate"""
147
+
148
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
149
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
150
+
151
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
152
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
153
+
154
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
155
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
156
+
157
+ passphrase: Optional[str] = None
158
+ r"""Passphrase to use to decrypt private key"""
159
+
160
+ min_version: Annotated[
161
+ Annotated[
162
+ Optional[InputConfluentCloudMinimumTLSVersion],
163
+ PlainValidator(validate_open_enum(False)),
164
+ ],
165
+ pydantic.Field(alias="minVersion"),
166
+ ] = None
167
+
168
+ max_version: Annotated[
169
+ Annotated[
170
+ Optional[InputConfluentCloudMaximumTLSVersion],
171
+ PlainValidator(validate_open_enum(False)),
172
+ ],
173
+ pydantic.Field(alias="maxVersion"),
174
+ ] = None
175
+
176
+
177
+ class InputConfluentCloudAuthTypedDict(TypedDict):
178
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
179
+
180
+ disabled: NotRequired[bool]
181
+ credentials_secret: NotRequired[str]
182
+ r"""Select or create a secret that references your credentials"""
183
+
184
+
185
+ class InputConfluentCloudAuth(BaseModel):
186
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
187
+
188
+ disabled: Optional[bool] = True
189
+
190
+ credentials_secret: Annotated[
191
+ Optional[str], pydantic.Field(alias="credentialsSecret")
192
+ ] = None
193
+ r"""Select or create a secret that references your credentials"""
194
+
195
+
196
+ class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
197
+ str, Enum, metaclass=utils.OpenEnumMeta
198
+ ):
199
+ TL_SV1 = "TLSv1"
200
+ TL_SV1_1 = "TLSv1.1"
201
+ TL_SV1_2 = "TLSv1.2"
202
+ TL_SV1_3 = "TLSv1.3"
203
+
204
+
205
+ class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
206
+ str, Enum, metaclass=utils.OpenEnumMeta
207
+ ):
208
+ TL_SV1 = "TLSv1"
209
+ TL_SV1_1 = "TLSv1.1"
210
+ TL_SV1_2 = "TLSv1.2"
211
+ TL_SV1_3 = "TLSv1.3"
212
+
213
+
214
+ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSideTypedDict(TypedDict):
215
+ disabled: NotRequired[bool]
216
+ reject_unauthorized: NotRequired[bool]
217
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
218
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
219
+ """
220
+ servername: NotRequired[str]
221
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
222
+ certificate_name: NotRequired[str]
223
+ r"""The name of the predefined certificate"""
224
+ ca_path: NotRequired[str]
225
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
226
+ priv_key_path: NotRequired[str]
227
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
228
+ cert_path: NotRequired[str]
229
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
230
+ passphrase: NotRequired[str]
231
+ r"""Passphrase to use to decrypt private key"""
232
+ min_version: NotRequired[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion]
233
+ max_version: NotRequired[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion]
234
+
235
+
236
+ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
237
+ disabled: Optional[bool] = True
238
+
239
+ reject_unauthorized: Annotated[
240
+ Optional[bool], pydantic.Field(alias="rejectUnauthorized")
241
+ ] = True
242
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
243
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
244
+ """
245
+
246
+ servername: Optional[str] = None
247
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
248
+
249
+ certificate_name: Annotated[
250
+ Optional[str], pydantic.Field(alias="certificateName")
251
+ ] = None
252
+ r"""The name of the predefined certificate"""
253
+
254
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
255
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
256
+
257
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
258
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
259
+
260
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
261
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
262
+
263
+ passphrase: Optional[str] = None
264
+ r"""Passphrase to use to decrypt private key"""
265
+
266
+ min_version: Annotated[
267
+ Annotated[
268
+ Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
269
+ PlainValidator(validate_open_enum(False)),
270
+ ],
271
+ pydantic.Field(alias="minVersion"),
272
+ ] = None
273
+
274
+ max_version: Annotated[
275
+ Annotated[
276
+ Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
277
+ PlainValidator(validate_open_enum(False)),
278
+ ],
279
+ pydantic.Field(alias="maxVersion"),
280
+ ] = None
281
+
282
+
283
+ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
284
+ disabled: NotRequired[bool]
285
+ schema_registry_url: NotRequired[str]
286
+ r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
287
+ connection_timeout: NotRequired[float]
288
+ r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
289
+ request_timeout: NotRequired[float]
290
+ r"""Maximum time to wait for the Schema Registry to respond to a request"""
291
+ max_retries: NotRequired[float]
292
+ r"""Maximum number of times to try fetching schemas from the Schema Registry"""
293
+ auth: NotRequired[InputConfluentCloudAuthTypedDict]
294
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
295
+ tls: NotRequired[
296
+ InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSideTypedDict
297
+ ]
298
+
299
+
300
+ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
301
+ disabled: Optional[bool] = True
302
+
303
+ schema_registry_url: Annotated[
304
+ Optional[str], pydantic.Field(alias="schemaRegistryURL")
305
+ ] = "http://localhost:8081"
306
+ r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
307
+
308
+ connection_timeout: Annotated[
309
+ Optional[float], pydantic.Field(alias="connectionTimeout")
310
+ ] = 30000
311
+ r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
312
+
313
+ request_timeout: Annotated[
314
+ Optional[float], pydantic.Field(alias="requestTimeout")
315
+ ] = 30000
316
+ r"""Maximum time to wait for the Schema Registry to respond to a request"""
317
+
318
+ max_retries: Annotated[Optional[float], pydantic.Field(alias="maxRetries")] = 1
319
+ r"""Maximum number of times to try fetching schemas from the Schema Registry"""
320
+
321
+ auth: Optional[InputConfluentCloudAuth] = None
322
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
323
+
324
+ tls: Optional[InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
325
+
326
+
327
+ class InputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
328
+ PLAIN = "plain"
329
+ SCRAM_SHA_256 = "scram-sha-256"
330
+ SCRAM_SHA_512 = "scram-sha-512"
331
+ KERBEROS = "kerberos"
332
+
333
+
334
+ class InputConfluentCloudAuthenticationTypedDict(TypedDict):
335
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
336
+
337
+ disabled: NotRequired[bool]
338
+ mechanism: NotRequired[InputConfluentCloudSASLMechanism]
339
+
340
+
341
+ class InputConfluentCloudAuthentication(BaseModel):
342
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
343
+
344
+ disabled: Optional[bool] = True
345
+
346
+ mechanism: Annotated[
347
+ Optional[InputConfluentCloudSASLMechanism],
348
+ PlainValidator(validate_open_enum(False)),
349
+ ] = InputConfluentCloudSASLMechanism.PLAIN
350
+
351
+
352
+ class InputConfluentCloudMetadatumTypedDict(TypedDict):
353
+ name: str
354
+ value: str
355
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
356
+
357
+
358
+ class InputConfluentCloudMetadatum(BaseModel):
359
+ name: str
360
+
361
+ value: str
362
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
363
+
364
+
365
+ class InputConfluentCloudTypedDict(TypedDict):
366
+ type: InputConfluentCloudType
367
+ brokers: List[str]
368
+ r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
369
+ topics: List[str]
370
+ r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
371
+ id: NotRequired[str]
372
+ r"""Unique ID for this input"""
373
+ disabled: NotRequired[bool]
374
+ pipeline: NotRequired[str]
375
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
376
+ send_to_routes: NotRequired[bool]
377
+ r"""Select whether to send data to Routes, or directly to Destinations."""
378
+ environment: NotRequired[str]
379
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
380
+ pq_enabled: NotRequired[bool]
381
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
382
+ streamtags: NotRequired[List[str]]
383
+ r"""Tags for filtering and grouping in @{product}"""
384
+ connections: NotRequired[List[InputConfluentCloudConnectionTypedDict]]
385
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
386
+ pq: NotRequired[InputConfluentCloudPqTypedDict]
387
+ tls: NotRequired[InputConfluentCloudTLSSettingsClientSideTypedDict]
388
+ group_id: NotRequired[str]
389
+ r"""The consumer group to which this instance belongs. Defaults to 'Cribl'."""
390
+ from_beginning: NotRequired[bool]
391
+ r"""Leave enabled if you want the Source, upon first subscribing to a topic, to read starting with the earliest available message"""
392
+ kafka_schema_registry: NotRequired[
393
+ InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict
394
+ ]
395
+ connection_timeout: NotRequired[float]
396
+ r"""Maximum time to wait for a connection to complete successfully"""
397
+ request_timeout: NotRequired[float]
398
+ r"""Maximum time to wait for Kafka to respond to a request"""
399
+ max_retries: NotRequired[float]
400
+ r"""If messages are failing, you can set the maximum number of retries as high as 100 to prevent loss of data"""
401
+ max_back_off: NotRequired[float]
402
+ r"""The maximum wait time for a retry, in milliseconds. Default (and minimum) is 30,000 ms (30 seconds); maximum is 180,000 ms (180 seconds)."""
403
+ initial_backoff: NotRequired[float]
404
+ r"""Initial value used to calculate the retry, in milliseconds. Maximum is 600,000 ms (10 minutes)."""
405
+ backoff_rate: NotRequired[float]
406
+ r"""Set the backoff multiplier (2-20) to control the retry frequency for failed messages. For faster retries, use a lower multiplier. For slower retries with more delay between attempts, use a higher multiplier. The multiplier is used in an exponential backoff formula; see the Kafka [documentation](https://kafka.js.org/docs/retry-detailed) for details."""
407
+ authentication_timeout: NotRequired[float]
408
+ r"""Maximum time to wait for Kafka to respond to an authentication request"""
409
+ reauthentication_threshold: NotRequired[float]
410
+ r"""Specifies a time window during which @{product} can reauthenticate if needed. Creates the window measuring backward from the moment when credentials are set to expire."""
411
+ sasl: NotRequired[InputConfluentCloudAuthenticationTypedDict]
412
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
413
+ session_timeout: NotRequired[float]
414
+ r"""Timeout used to detect client failures when using Kafka's group-management facilities.
415
+ If the client sends no heartbeats to the broker before the timeout expires,
416
+ the broker will remove the client from the group and initiate a rebalance.
417
+ Value must be between the broker's configured group.min.session.timeout.ms and group.max.session.timeout.ms.
418
+ See [Kafka's documentation](https://kafka.apache.org/documentation/#consumerconfigs_session.timeout.ms) for details.
419
+ """
420
+ rebalance_timeout: NotRequired[float]
421
+ r"""Maximum allowed time for each worker to join the group after a rebalance begins.
422
+ If the timeout is exceeded, the coordinator broker will remove the worker from the group.
423
+ See [Kafka's documentation](https://kafka.apache.org/documentation/#connectconfigs_rebalance.timeout.ms) for details.
424
+ """
425
+ heartbeat_interval: NotRequired[float]
426
+ r"""Expected time between heartbeats to the consumer coordinator when using Kafka's group-management facilities.
427
+ Value must be lower than sessionTimeout and typically should not exceed 1/3 of the sessionTimeout value.
428
+ See [Kafka's documentation](https://kafka.apache.org/documentation/#consumerconfigs_heartbeat.interval.ms) for details.
429
+ """
430
+ auto_commit_interval: NotRequired[float]
431
+ r"""How often to commit offsets. If both this and Offset commit threshold are set, @{product} commits offsets when either condition is met. If both are empty, @{product} commits offsets after each batch."""
432
+ auto_commit_threshold: NotRequired[float]
433
+ r"""How many events are needed to trigger an offset commit. If both this and Offset commit interval are set, @{product} commits offsets when either condition is met. If both are empty, @{product} commits offsets after each batch."""
434
+ max_bytes_per_partition: NotRequired[float]
435
+ r"""Maximum amount of data that Kafka will return per partition, per fetch request. Must equal or exceed the maximum message size (maxBytesPerPartition) that Kafka is configured to allow. Otherwise, @{product} can get stuck trying to retrieve messages. Defaults to 1048576 (1 MB)."""
436
+ max_bytes: NotRequired[float]
437
+ r"""Maximum number of bytes that Kafka will return per fetch request. Defaults to 10485760 (10 MB)."""
438
+ max_socket_errors: NotRequired[float]
439
+ r"""Maximum number of network errors before the consumer re-creates a socket"""
440
+ metadata: NotRequired[List[InputConfluentCloudMetadatumTypedDict]]
441
+ r"""Fields to add to events from this input"""
442
+ description: NotRequired[str]
443
+
444
+
445
+ class InputConfluentCloud(BaseModel):
446
+ type: Annotated[InputConfluentCloudType, PlainValidator(validate_open_enum(False))]
447
+
448
+ brokers: List[str]
449
+ r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
450
+
451
+ topics: List[str]
452
+ r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
453
+
454
+ id: Optional[str] = None
455
+ r"""Unique ID for this input"""
456
+
457
+ disabled: Optional[bool] = False
458
+
459
+ pipeline: Optional[str] = None
460
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
461
+
462
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
463
+ True
464
+ )
465
+ r"""Select whether to send data to Routes, or directly to Destinations."""
466
+
467
+ environment: Optional[str] = None
468
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
469
+
470
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
471
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
472
+
473
+ streamtags: Optional[List[str]] = None
474
+ r"""Tags for filtering and grouping in @{product}"""
475
+
476
+ connections: Optional[List[InputConfluentCloudConnection]] = None
477
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
478
+
479
+ pq: Optional[InputConfluentCloudPq] = None
480
+
481
+ tls: Optional[InputConfluentCloudTLSSettingsClientSide] = None
482
+
483
+ group_id: Annotated[Optional[str], pydantic.Field(alias="groupId")] = "Cribl"
484
+ r"""The consumer group to which this instance belongs. Defaults to 'Cribl'."""
485
+
486
+ from_beginning: Annotated[Optional[bool], pydantic.Field(alias="fromBeginning")] = (
487
+ True
488
+ )
489
+ r"""Leave enabled if you want the Source, upon first subscribing to a topic, to read starting with the earliest available message"""
490
+
491
+ kafka_schema_registry: Annotated[
492
+ Optional[InputConfluentCloudKafkaSchemaRegistryAuthentication],
493
+ pydantic.Field(alias="kafkaSchemaRegistry"),
494
+ ] = None
495
+
496
+ connection_timeout: Annotated[
497
+ Optional[float], pydantic.Field(alias="connectionTimeout")
498
+ ] = 10000
499
+ r"""Maximum time to wait for a connection to complete successfully"""
500
+
501
+ request_timeout: Annotated[
502
+ Optional[float], pydantic.Field(alias="requestTimeout")
503
+ ] = 60000
504
+ r"""Maximum time to wait for Kafka to respond to a request"""
505
+
506
+ max_retries: Annotated[Optional[float], pydantic.Field(alias="maxRetries")] = 5
507
+ r"""If messages are failing, you can set the maximum number of retries as high as 100 to prevent loss of data"""
508
+
509
+ max_back_off: Annotated[Optional[float], pydantic.Field(alias="maxBackOff")] = 30000
510
+ r"""The maximum wait time for a retry, in milliseconds. Default (and minimum) is 30,000 ms (30 seconds); maximum is 180,000 ms (180 seconds)."""
511
+
512
+ initial_backoff: Annotated[
513
+ Optional[float], pydantic.Field(alias="initialBackoff")
514
+ ] = 300
515
+ r"""Initial value used to calculate the retry, in milliseconds. Maximum is 600,000 ms (10 minutes)."""
516
+
517
+ backoff_rate: Annotated[Optional[float], pydantic.Field(alias="backoffRate")] = 2
518
+ r"""Set the backoff multiplier (2-20) to control the retry frequency for failed messages. For faster retries, use a lower multiplier. For slower retries with more delay between attempts, use a higher multiplier. The multiplier is used in an exponential backoff formula; see the Kafka [documentation](https://kafka.js.org/docs/retry-detailed) for details."""
519
+
520
+ authentication_timeout: Annotated[
521
+ Optional[float], pydantic.Field(alias="authenticationTimeout")
522
+ ] = 10000
523
+ r"""Maximum time to wait for Kafka to respond to an authentication request"""
524
+
525
+ reauthentication_threshold: Annotated[
526
+ Optional[float], pydantic.Field(alias="reauthenticationThreshold")
527
+ ] = 10000
528
+ r"""Specifies a time window during which @{product} can reauthenticate if needed. Creates the window measuring backward from the moment when credentials are set to expire."""
529
+
530
+ sasl: Optional[InputConfluentCloudAuthentication] = None
531
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
532
+
533
+ session_timeout: Annotated[
534
+ Optional[float], pydantic.Field(alias="sessionTimeout")
535
+ ] = 30000
536
+ r"""Timeout used to detect client failures when using Kafka's group-management facilities.
537
+ If the client sends no heartbeats to the broker before the timeout expires,
538
+ the broker will remove the client from the group and initiate a rebalance.
539
+ Value must be between the broker's configured group.min.session.timeout.ms and group.max.session.timeout.ms.
540
+ See [Kafka's documentation](https://kafka.apache.org/documentation/#consumerconfigs_session.timeout.ms) for details.
541
+ """
542
+
543
+ rebalance_timeout: Annotated[
544
+ Optional[float], pydantic.Field(alias="rebalanceTimeout")
545
+ ] = 60000
546
+ r"""Maximum allowed time for each worker to join the group after a rebalance begins.
547
+ If the timeout is exceeded, the coordinator broker will remove the worker from the group.
548
+ See [Kafka's documentation](https://kafka.apache.org/documentation/#connectconfigs_rebalance.timeout.ms) for details.
549
+ """
550
+
551
+ heartbeat_interval: Annotated[
552
+ Optional[float], pydantic.Field(alias="heartbeatInterval")
553
+ ] = 3000
554
+ r"""Expected time between heartbeats to the consumer coordinator when using Kafka's group-management facilities.
555
+ Value must be lower than sessionTimeout and typically should not exceed 1/3 of the sessionTimeout value.
556
+ See [Kafka's documentation](https://kafka.apache.org/documentation/#consumerconfigs_heartbeat.interval.ms) for details.
557
+ """
558
+
559
+ auto_commit_interval: Annotated[
560
+ Optional[float], pydantic.Field(alias="autoCommitInterval")
561
+ ] = None
562
+ r"""How often to commit offsets. If both this and Offset commit threshold are set, @{product} commits offsets when either condition is met. If both are empty, @{product} commits offsets after each batch."""
563
+
564
+ auto_commit_threshold: Annotated[
565
+ Optional[float], pydantic.Field(alias="autoCommitThreshold")
566
+ ] = None
567
+ r"""How many events are needed to trigger an offset commit. If both this and Offset commit interval are set, @{product} commits offsets when either condition is met. If both are empty, @{product} commits offsets after each batch."""
568
+
569
+ max_bytes_per_partition: Annotated[
570
+ Optional[float], pydantic.Field(alias="maxBytesPerPartition")
571
+ ] = 1048576
572
+ r"""Maximum amount of data that Kafka will return per partition, per fetch request. Must equal or exceed the maximum message size (maxBytesPerPartition) that Kafka is configured to allow. Otherwise, @{product} can get stuck trying to retrieve messages. Defaults to 1048576 (1 MB)."""
573
+
574
+ max_bytes: Annotated[Optional[float], pydantic.Field(alias="maxBytes")] = 10485760
575
+ r"""Maximum number of bytes that Kafka will return per fetch request. Defaults to 10485760 (10 MB)."""
576
+
577
+ max_socket_errors: Annotated[
578
+ Optional[float], pydantic.Field(alias="maxSocketErrors")
579
+ ] = 0
580
+ r"""Maximum number of network errors before the consumer re-creates a socket"""
581
+
582
+ metadata: Optional[List[InputConfluentCloudMetadatum]] = None
583
+ r"""Fields to add to events from this input"""
584
+
585
+ description: Optional[str] = None