cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,591 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputConfluentCloudType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ CONFLUENT_CLOUD = "confluent_cloud"
16
+
17
+
18
+ class OutputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
19
+ TL_SV1 = "TLSv1"
20
+ TL_SV1_1 = "TLSv1.1"
21
+ TL_SV1_2 = "TLSv1.2"
22
+ TL_SV1_3 = "TLSv1.3"
23
+
24
+
25
+ class OutputConfluentCloudMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ TL_SV1 = "TLSv1"
27
+ TL_SV1_1 = "TLSv1.1"
28
+ TL_SV1_2 = "TLSv1.2"
29
+ TL_SV1_3 = "TLSv1.3"
30
+
31
+
32
+ class OutputConfluentCloudTLSSettingsClientSideTypedDict(TypedDict):
33
+ disabled: NotRequired[bool]
34
+ reject_unauthorized: NotRequired[bool]
35
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
36
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
37
+ """
38
+ servername: NotRequired[str]
39
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
40
+ certificate_name: NotRequired[str]
41
+ r"""The name of the predefined certificate"""
42
+ ca_path: NotRequired[str]
43
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
44
+ priv_key_path: NotRequired[str]
45
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
46
+ cert_path: NotRequired[str]
47
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
48
+ passphrase: NotRequired[str]
49
+ r"""Passphrase to use to decrypt private key"""
50
+ min_version: NotRequired[OutputConfluentCloudMinimumTLSVersion]
51
+ max_version: NotRequired[OutputConfluentCloudMaximumTLSVersion]
52
+
53
+
54
+ class OutputConfluentCloudTLSSettingsClientSide(BaseModel):
55
+ disabled: Optional[bool] = False
56
+
57
+ reject_unauthorized: Annotated[
58
+ Optional[bool], pydantic.Field(alias="rejectUnauthorized")
59
+ ] = True
60
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
61
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
62
+ """
63
+
64
+ servername: Optional[str] = None
65
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
66
+
67
+ certificate_name: Annotated[
68
+ Optional[str], pydantic.Field(alias="certificateName")
69
+ ] = None
70
+ r"""The name of the predefined certificate"""
71
+
72
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
73
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
74
+
75
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
76
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
77
+
78
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
79
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
80
+
81
+ passphrase: Optional[str] = None
82
+ r"""Passphrase to use to decrypt private key"""
83
+
84
+ min_version: Annotated[
85
+ Annotated[
86
+ Optional[OutputConfluentCloudMinimumTLSVersion],
87
+ PlainValidator(validate_open_enum(False)),
88
+ ],
89
+ pydantic.Field(alias="minVersion"),
90
+ ] = None
91
+
92
+ max_version: Annotated[
93
+ Annotated[
94
+ Optional[OutputConfluentCloudMaximumTLSVersion],
95
+ PlainValidator(validate_open_enum(False)),
96
+ ],
97
+ pydantic.Field(alias="maxVersion"),
98
+ ] = None
99
+
100
+
101
+ class OutputConfluentCloudAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
102
+ r"""Control the number of required acknowledgments."""
103
+
104
+ ONE = 1
105
+ ZERO = 0
106
+ MINUS_1 = -1
107
+
108
+
109
+ class OutputConfluentCloudRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
110
+ r"""Format to use to serialize events before writing to Kafka."""
111
+
112
+ JSON = "json"
113
+ RAW = "raw"
114
+ PROTOBUF = "protobuf"
115
+
116
+
117
+ class OutputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
118
+ r"""Codec to use to compress the data before sending to Kafka"""
119
+
120
+ NONE = "none"
121
+ GZIP = "gzip"
122
+ SNAPPY = "snappy"
123
+ LZ4 = "lz4"
124
+
125
+
126
+ class OutputConfluentCloudAuthTypedDict(TypedDict):
127
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
128
+
129
+ disabled: NotRequired[bool]
130
+ credentials_secret: NotRequired[str]
131
+ r"""Select or create a secret that references your credentials"""
132
+
133
+
134
+ class OutputConfluentCloudAuth(BaseModel):
135
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
136
+
137
+ disabled: Optional[bool] = True
138
+
139
+ credentials_secret: Annotated[
140
+ Optional[str], pydantic.Field(alias="credentialsSecret")
141
+ ] = None
142
+ r"""Select or create a secret that references your credentials"""
143
+
144
+
145
+ class OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
146
+ str, Enum, metaclass=utils.OpenEnumMeta
147
+ ):
148
+ TL_SV1 = "TLSv1"
149
+ TL_SV1_1 = "TLSv1.1"
150
+ TL_SV1_2 = "TLSv1.2"
151
+ TL_SV1_3 = "TLSv1.3"
152
+
153
+
154
+ class OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
155
+ str, Enum, metaclass=utils.OpenEnumMeta
156
+ ):
157
+ TL_SV1 = "TLSv1"
158
+ TL_SV1_1 = "TLSv1.1"
159
+ TL_SV1_2 = "TLSv1.2"
160
+ TL_SV1_3 = "TLSv1.3"
161
+
162
+
163
+ class OutputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSideTypedDict(TypedDict):
164
+ disabled: NotRequired[bool]
165
+ reject_unauthorized: NotRequired[bool]
166
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
167
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
168
+ """
169
+ servername: NotRequired[str]
170
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
171
+ certificate_name: NotRequired[str]
172
+ r"""The name of the predefined certificate"""
173
+ ca_path: NotRequired[str]
174
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
175
+ priv_key_path: NotRequired[str]
176
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
177
+ cert_path: NotRequired[str]
178
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
179
+ passphrase: NotRequired[str]
180
+ r"""Passphrase to use to decrypt private key"""
181
+ min_version: NotRequired[OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion]
182
+ max_version: NotRequired[OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion]
183
+
184
+
185
+ class OutputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
186
+ disabled: Optional[bool] = True
187
+
188
+ reject_unauthorized: Annotated[
189
+ Optional[bool], pydantic.Field(alias="rejectUnauthorized")
190
+ ] = True
191
+ r"""Reject certificates that are not authorized by a CA in the CA certificate path, or by another
192
+ trusted CA (such as the system's). Defaults to Enabled. Overrides the toggle from Advanced Settings, when also present.
193
+ """
194
+
195
+ servername: Optional[str] = None
196
+ r"""Server name for the SNI (Server Name Indication) TLS extension. It must be a host name, and not an IP address."""
197
+
198
+ certificate_name: Annotated[
199
+ Optional[str], pydantic.Field(alias="certificateName")
200
+ ] = None
201
+ r"""The name of the predefined certificate"""
202
+
203
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
204
+ r"""Path on client in which to find CA certificates to verify the server's cert. PEM format. Can reference $ENV_VARS."""
205
+
206
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
207
+ r"""Path on client in which to find the private key to use. PEM format. Can reference $ENV_VARS."""
208
+
209
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
210
+ r"""Path on client in which to find certificates to use. PEM format. Can reference $ENV_VARS."""
211
+
212
+ passphrase: Optional[str] = None
213
+ r"""Passphrase to use to decrypt private key"""
214
+
215
+ min_version: Annotated[
216
+ Annotated[
217
+ Optional[OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
218
+ PlainValidator(validate_open_enum(False)),
219
+ ],
220
+ pydantic.Field(alias="minVersion"),
221
+ ] = None
222
+
223
+ max_version: Annotated[
224
+ Annotated[
225
+ Optional[OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
226
+ PlainValidator(validate_open_enum(False)),
227
+ ],
228
+ pydantic.Field(alias="maxVersion"),
229
+ ] = None
230
+
231
+
232
+ class OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
233
+ disabled: NotRequired[bool]
234
+ schema_registry_url: NotRequired[str]
235
+ r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
236
+ connection_timeout: NotRequired[float]
237
+ r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
238
+ request_timeout: NotRequired[float]
239
+ r"""Maximum time to wait for the Schema Registry to respond to a request"""
240
+ max_retries: NotRequired[float]
241
+ r"""Maximum number of times to try fetching schemas from the Schema Registry"""
242
+ auth: NotRequired[OutputConfluentCloudAuthTypedDict]
243
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
244
+ tls: NotRequired[
245
+ OutputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSideTypedDict
246
+ ]
247
+ default_key_schema_id: NotRequired[float]
248
+ r"""Used when __keySchemaIdOut is not present, to transform key values, leave blank if key transformation is not required by default."""
249
+ default_value_schema_id: NotRequired[float]
250
+ r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
251
+
252
+
253
+ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
254
+ disabled: Optional[bool] = True
255
+
256
+ schema_registry_url: Annotated[
257
+ Optional[str], pydantic.Field(alias="schemaRegistryURL")
258
+ ] = "http://localhost:8081"
259
+ r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
260
+
261
+ connection_timeout: Annotated[
262
+ Optional[float], pydantic.Field(alias="connectionTimeout")
263
+ ] = 30000
264
+ r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
265
+
266
+ request_timeout: Annotated[
267
+ Optional[float], pydantic.Field(alias="requestTimeout")
268
+ ] = 30000
269
+ r"""Maximum time to wait for the Schema Registry to respond to a request"""
270
+
271
+ max_retries: Annotated[Optional[float], pydantic.Field(alias="maxRetries")] = 1
272
+ r"""Maximum number of times to try fetching schemas from the Schema Registry"""
273
+
274
+ auth: Optional[OutputConfluentCloudAuth] = None
275
+ r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
276
+
277
+ tls: Optional[OutputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
278
+
279
+ default_key_schema_id: Annotated[
280
+ Optional[float], pydantic.Field(alias="defaultKeySchemaId")
281
+ ] = None
282
+ r"""Used when __keySchemaIdOut is not present, to transform key values, leave blank if key transformation is not required by default."""
283
+
284
+ default_value_schema_id: Annotated[
285
+ Optional[float], pydantic.Field(alias="defaultValueSchemaId")
286
+ ] = None
287
+ r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
288
+
289
+
290
+ class OutputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
291
+ PLAIN = "plain"
292
+ SCRAM_SHA_256 = "scram-sha-256"
293
+ SCRAM_SHA_512 = "scram-sha-512"
294
+ KERBEROS = "kerberos"
295
+
296
+
297
+ class OutputConfluentCloudAuthenticationTypedDict(TypedDict):
298
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
299
+
300
+ disabled: NotRequired[bool]
301
+ mechanism: NotRequired[OutputConfluentCloudSASLMechanism]
302
+
303
+
304
+ class OutputConfluentCloudAuthentication(BaseModel):
305
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
306
+
307
+ disabled: Optional[bool] = True
308
+
309
+ mechanism: Annotated[
310
+ Optional[OutputConfluentCloudSASLMechanism],
311
+ PlainValidator(validate_open_enum(False)),
312
+ ] = OutputConfluentCloudSASLMechanism.PLAIN
313
+
314
+
315
+ class OutputConfluentCloudBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
316
+ r"""How to handle events when all receivers are exerting backpressure"""
317
+
318
+ BLOCK = "block"
319
+ DROP = "drop"
320
+ QUEUE = "queue"
321
+
322
+
323
+ class OutputConfluentCloudPqCompressCompression(
324
+ str, Enum, metaclass=utils.OpenEnumMeta
325
+ ):
326
+ r"""Codec to use to compress the persisted data"""
327
+
328
+ NONE = "none"
329
+ GZIP = "gzip"
330
+
331
+
332
+ class OutputConfluentCloudQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
333
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
334
+
335
+ BLOCK = "block"
336
+ DROP = "drop"
337
+
338
+
339
+ class OutputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
340
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
341
+
342
+ ERROR = "error"
343
+ BACKPRESSURE = "backpressure"
344
+ ALWAYS = "always"
345
+
346
+
347
+ class OutputConfluentCloudPqControlsTypedDict(TypedDict):
348
+ pass
349
+
350
+
351
+ class OutputConfluentCloudPqControls(BaseModel):
352
+ pass
353
+
354
+
355
+ class OutputConfluentCloudTypedDict(TypedDict):
356
+ brokers: List[str]
357
+ r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092."""
358
+ topic: str
359
+ r"""The topic to publish events to. Can be overridden using the __topicOut field."""
360
+ id: NotRequired[str]
361
+ r"""Unique ID for this output"""
362
+ type: NotRequired[OutputConfluentCloudType]
363
+ pipeline: NotRequired[str]
364
+ r"""Pipeline to process data before sending out to this output"""
365
+ system_fields: NotRequired[List[str]]
366
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
367
+ environment: NotRequired[str]
368
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
369
+ streamtags: NotRequired[List[str]]
370
+ r"""Tags for filtering and grouping in @{product}"""
371
+ tls: NotRequired[OutputConfluentCloudTLSSettingsClientSideTypedDict]
372
+ ack: NotRequired[OutputConfluentCloudAcknowledgments]
373
+ r"""Control the number of required acknowledgments."""
374
+ format_: NotRequired[OutputConfluentCloudRecordDataFormat]
375
+ r"""Format to use to serialize events before writing to Kafka."""
376
+ compression: NotRequired[OutputConfluentCloudCompression]
377
+ r"""Codec to use to compress the data before sending to Kafka"""
378
+ max_record_size_kb: NotRequired[float]
379
+ r"""Maximum size of each record batch before compression. The value must not exceed the Kafka brokers' message.max.bytes setting."""
380
+ flush_event_count: NotRequired[float]
381
+ r"""The maximum number of events you want the Destination to allow in a batch before forcing a flush"""
382
+ flush_period_sec: NotRequired[float]
383
+ r"""The maximum amount of time you want the Destination to wait before forcing a flush. Shorter intervals tend to result in smaller batches being sent."""
384
+ kafka_schema_registry: NotRequired[
385
+ OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict
386
+ ]
387
+ connection_timeout: NotRequired[float]
388
+ r"""Maximum time to wait for a connection to complete successfully"""
389
+ request_timeout: NotRequired[float]
390
+ r"""Maximum time to wait for Kafka to respond to a request"""
391
+ max_retries: NotRequired[float]
392
+ r"""If messages are failing, you can set the maximum number of retries as high as 100 to prevent loss of data"""
393
+ max_back_off: NotRequired[float]
394
+ r"""The maximum wait time for a retry, in milliseconds. Default (and minimum) is 30,000 ms (30 seconds); maximum is 180,000 ms (180 seconds)."""
395
+ initial_backoff: NotRequired[float]
396
+ r"""Initial value used to calculate the retry, in milliseconds. Maximum is 600,000 ms (10 minutes)."""
397
+ backoff_rate: NotRequired[float]
398
+ r"""Set the backoff multiplier (2-20) to control the retry frequency for failed messages. For faster retries, use a lower multiplier. For slower retries with more delay between attempts, use a higher multiplier. The multiplier is used in an exponential backoff formula; see the Kafka [documentation](https://kafka.js.org/docs/retry-detailed) for details."""
399
+ authentication_timeout: NotRequired[float]
400
+ r"""Maximum time to wait for Kafka to respond to an authentication request"""
401
+ reauthentication_threshold: NotRequired[float]
402
+ r"""Specifies a time window during which @{product} can reauthenticate if needed. Creates the window measuring backward from the moment when credentials are set to expire."""
403
+ sasl: NotRequired[OutputConfluentCloudAuthenticationTypedDict]
404
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
405
+ on_backpressure: NotRequired[OutputConfluentCloudBackpressureBehavior]
406
+ r"""How to handle events when all receivers are exerting backpressure"""
407
+ description: NotRequired[str]
408
+ protobuf_library_id: NotRequired[str]
409
+ r"""Select a set of Protobuf definitions for the events you want to send"""
410
+ pq_max_file_size: NotRequired[str]
411
+ r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
412
+ pq_max_size: NotRequired[str]
413
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
414
+ pq_path: NotRequired[str]
415
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
416
+ pq_compress: NotRequired[OutputConfluentCloudPqCompressCompression]
417
+ r"""Codec to use to compress the persisted data"""
418
+ pq_on_backpressure: NotRequired[OutputConfluentCloudQueueFullBehavior]
419
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
420
+ pq_mode: NotRequired[OutputConfluentCloudMode]
421
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
422
+ pq_controls: NotRequired[OutputConfluentCloudPqControlsTypedDict]
423
+
424
+
425
+ class OutputConfluentCloud(BaseModel):
426
+ brokers: List[str]
427
+ r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092."""
428
+
429
+ topic: str
430
+ r"""The topic to publish events to. Can be overridden using the __topicOut field."""
431
+
432
+ id: Optional[str] = None
433
+ r"""Unique ID for this output"""
434
+
435
+ type: Annotated[
436
+ Optional[OutputConfluentCloudType], PlainValidator(validate_open_enum(False))
437
+ ] = None
438
+
439
+ pipeline: Optional[str] = None
440
+ r"""Pipeline to process data before sending out to this output"""
441
+
442
+ system_fields: Annotated[
443
+ Optional[List[str]], pydantic.Field(alias="systemFields")
444
+ ] = None
445
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
446
+
447
+ environment: Optional[str] = None
448
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
449
+
450
+ streamtags: Optional[List[str]] = None
451
+ r"""Tags for filtering and grouping in @{product}"""
452
+
453
+ tls: Optional[OutputConfluentCloudTLSSettingsClientSide] = None
454
+
455
+ ack: Annotated[
456
+ Optional[OutputConfluentCloudAcknowledgments],
457
+ PlainValidator(validate_open_enum(True)),
458
+ ] = OutputConfluentCloudAcknowledgments.ONE
459
+ r"""Control the number of required acknowledgments."""
460
+
461
+ format_: Annotated[
462
+ Annotated[
463
+ Optional[OutputConfluentCloudRecordDataFormat],
464
+ PlainValidator(validate_open_enum(False)),
465
+ ],
466
+ pydantic.Field(alias="format"),
467
+ ] = OutputConfluentCloudRecordDataFormat.JSON
468
+ r"""Format to use to serialize events before writing to Kafka."""
469
+
470
+ compression: Annotated[
471
+ Optional[OutputConfluentCloudCompression],
472
+ PlainValidator(validate_open_enum(False)),
473
+ ] = OutputConfluentCloudCompression.GZIP
474
+ r"""Codec to use to compress the data before sending to Kafka"""
475
+
476
+ max_record_size_kb: Annotated[
477
+ Optional[float], pydantic.Field(alias="maxRecordSizeKB")
478
+ ] = 768
479
+ r"""Maximum size of each record batch before compression. The value must not exceed the Kafka brokers' message.max.bytes setting."""
480
+
481
+ flush_event_count: Annotated[
482
+ Optional[float], pydantic.Field(alias="flushEventCount")
483
+ ] = 1000
484
+ r"""The maximum number of events you want the Destination to allow in a batch before forcing a flush"""
485
+
486
+ flush_period_sec: Annotated[
487
+ Optional[float], pydantic.Field(alias="flushPeriodSec")
488
+ ] = 1
489
+ r"""The maximum amount of time you want the Destination to wait before forcing a flush. Shorter intervals tend to result in smaller batches being sent."""
490
+
491
+ kafka_schema_registry: Annotated[
492
+ Optional[OutputConfluentCloudKafkaSchemaRegistryAuthentication],
493
+ pydantic.Field(alias="kafkaSchemaRegistry"),
494
+ ] = None
495
+
496
+ connection_timeout: Annotated[
497
+ Optional[float], pydantic.Field(alias="connectionTimeout")
498
+ ] = 10000
499
+ r"""Maximum time to wait for a connection to complete successfully"""
500
+
501
+ request_timeout: Annotated[
502
+ Optional[float], pydantic.Field(alias="requestTimeout")
503
+ ] = 60000
504
+ r"""Maximum time to wait for Kafka to respond to a request"""
505
+
506
+ max_retries: Annotated[Optional[float], pydantic.Field(alias="maxRetries")] = 5
507
+ r"""If messages are failing, you can set the maximum number of retries as high as 100 to prevent loss of data"""
508
+
509
+ max_back_off: Annotated[Optional[float], pydantic.Field(alias="maxBackOff")] = 30000
510
+ r"""The maximum wait time for a retry, in milliseconds. Default (and minimum) is 30,000 ms (30 seconds); maximum is 180,000 ms (180 seconds)."""
511
+
512
+ initial_backoff: Annotated[
513
+ Optional[float], pydantic.Field(alias="initialBackoff")
514
+ ] = 300
515
+ r"""Initial value used to calculate the retry, in milliseconds. Maximum is 600,000 ms (10 minutes)."""
516
+
517
+ backoff_rate: Annotated[Optional[float], pydantic.Field(alias="backoffRate")] = 2
518
+ r"""Set the backoff multiplier (2-20) to control the retry frequency for failed messages. For faster retries, use a lower multiplier. For slower retries with more delay between attempts, use a higher multiplier. The multiplier is used in an exponential backoff formula; see the Kafka [documentation](https://kafka.js.org/docs/retry-detailed) for details."""
519
+
520
+ authentication_timeout: Annotated[
521
+ Optional[float], pydantic.Field(alias="authenticationTimeout")
522
+ ] = 10000
523
+ r"""Maximum time to wait for Kafka to respond to an authentication request"""
524
+
525
+ reauthentication_threshold: Annotated[
526
+ Optional[float], pydantic.Field(alias="reauthenticationThreshold")
527
+ ] = 10000
528
+ r"""Specifies a time window during which @{product} can reauthenticate if needed. Creates the window measuring backward from the moment when credentials are set to expire."""
529
+
530
+ sasl: Optional[OutputConfluentCloudAuthentication] = None
531
+ r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
532
+
533
+ on_backpressure: Annotated[
534
+ Annotated[
535
+ Optional[OutputConfluentCloudBackpressureBehavior],
536
+ PlainValidator(validate_open_enum(False)),
537
+ ],
538
+ pydantic.Field(alias="onBackpressure"),
539
+ ] = OutputConfluentCloudBackpressureBehavior.BLOCK
540
+ r"""How to handle events when all receivers are exerting backpressure"""
541
+
542
+ description: Optional[str] = None
543
+
544
+ protobuf_library_id: Annotated[
545
+ Optional[str], pydantic.Field(alias="protobufLibraryId")
546
+ ] = None
547
+ r"""Select a set of Protobuf definitions for the events you want to send"""
548
+
549
+ pq_max_file_size: Annotated[
550
+ Optional[str], pydantic.Field(alias="pqMaxFileSize")
551
+ ] = "1 MB"
552
+ r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
553
+
554
+ pq_max_size: Annotated[Optional[str], pydantic.Field(alias="pqMaxSize")] = "5GB"
555
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
556
+
557
+ pq_path: Annotated[Optional[str], pydantic.Field(alias="pqPath")] = (
558
+ "$CRIBL_HOME/state/queues"
559
+ )
560
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
561
+
562
+ pq_compress: Annotated[
563
+ Annotated[
564
+ Optional[OutputConfluentCloudPqCompressCompression],
565
+ PlainValidator(validate_open_enum(False)),
566
+ ],
567
+ pydantic.Field(alias="pqCompress"),
568
+ ] = OutputConfluentCloudPqCompressCompression.NONE
569
+ r"""Codec to use to compress the persisted data"""
570
+
571
+ pq_on_backpressure: Annotated[
572
+ Annotated[
573
+ Optional[OutputConfluentCloudQueueFullBehavior],
574
+ PlainValidator(validate_open_enum(False)),
575
+ ],
576
+ pydantic.Field(alias="pqOnBackpressure"),
577
+ ] = OutputConfluentCloudQueueFullBehavior.BLOCK
578
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
579
+
580
+ pq_mode: Annotated[
581
+ Annotated[
582
+ Optional[OutputConfluentCloudMode],
583
+ PlainValidator(validate_open_enum(False)),
584
+ ],
585
+ pydantic.Field(alias="pqMode"),
586
+ ] = OutputConfluentCloudMode.ERROR
587
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
588
+
589
+ pq_controls: Annotated[
590
+ Optional[OutputConfluentCloudPqControls], pydantic.Field(alias="pqControls")
591
+ ] = None