cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (179) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/errors/__init__.py +5 -8
  3. cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
  4. cribl_control_plane/groups_sdk.py +28 -52
  5. cribl_control_plane/health.py +16 -22
  6. cribl_control_plane/models/__init__.py +54 -217
  7. cribl_control_plane/models/appmode.py +14 -0
  8. cribl_control_plane/models/authtoken.py +1 -5
  9. cribl_control_plane/models/cacheconnection.py +0 -20
  10. cribl_control_plane/models/configgroup.py +7 -55
  11. cribl_control_plane/models/configgroupcloud.py +1 -11
  12. cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
  13. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  14. cribl_control_plane/models/createversionundoop.py +3 -3
  15. cribl_control_plane/models/cribllakedataset.py +1 -11
  16. cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
  17. cribl_control_plane/models/datasetmetadata.py +1 -11
  18. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
  19. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  20. cribl_control_plane/models/distributedsummary.py +0 -6
  21. cribl_control_plane/models/error.py +16 -0
  22. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
  23. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
  24. cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
  25. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
  26. cribl_control_plane/models/gethealthinfoop.py +17 -0
  27. cribl_control_plane/models/getsummaryop.py +0 -11
  28. cribl_control_plane/models/hbcriblinfo.py +3 -24
  29. cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
  30. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  31. cribl_control_plane/models/input.py +78 -80
  32. cribl_control_plane/models/inputappscope.py +17 -80
  33. cribl_control_plane/models/inputazureblob.py +1 -33
  34. cribl_control_plane/models/inputcollection.py +1 -24
  35. cribl_control_plane/models/inputconfluentcloud.py +18 -195
  36. cribl_control_plane/models/inputcribl.py +1 -24
  37. cribl_control_plane/models/inputcriblhttp.py +17 -62
  38. cribl_control_plane/models/inputcribllakehttp.py +17 -62
  39. cribl_control_plane/models/inputcriblmetrics.py +1 -24
  40. cribl_control_plane/models/inputcribltcp.py +17 -62
  41. cribl_control_plane/models/inputcrowdstrike.py +1 -54
  42. cribl_control_plane/models/inputdatadogagent.py +17 -62
  43. cribl_control_plane/models/inputdatagen.py +1 -24
  44. cribl_control_plane/models/inputedgeprometheus.py +34 -147
  45. cribl_control_plane/models/inputelastic.py +27 -119
  46. cribl_control_plane/models/inputeventhub.py +1 -182
  47. cribl_control_plane/models/inputexec.py +1 -33
  48. cribl_control_plane/models/inputfile.py +3 -42
  49. cribl_control_plane/models/inputfirehose.py +17 -62
  50. cribl_control_plane/models/inputgooglepubsub.py +1 -36
  51. cribl_control_plane/models/inputgrafana.py +32 -157
  52. cribl_control_plane/models/inputhttp.py +17 -62
  53. cribl_control_plane/models/inputhttpraw.py +17 -62
  54. cribl_control_plane/models/inputjournalfiles.py +1 -24
  55. cribl_control_plane/models/inputkafka.py +17 -189
  56. cribl_control_plane/models/inputkinesis.py +1 -80
  57. cribl_control_plane/models/inputkubeevents.py +1 -24
  58. cribl_control_plane/models/inputkubelogs.py +1 -33
  59. cribl_control_plane/models/inputkubemetrics.py +1 -33
  60. cribl_control_plane/models/inputloki.py +17 -71
  61. cribl_control_plane/models/inputmetrics.py +17 -62
  62. cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
  63. cribl_control_plane/models/inputmsk.py +18 -81
  64. cribl_control_plane/models/inputnetflow.py +1 -24
  65. cribl_control_plane/models/inputoffice365mgmt.py +1 -67
  66. cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
  67. cribl_control_plane/models/inputoffice365service.py +1 -67
  68. cribl_control_plane/models/inputopentelemetry.py +16 -92
  69. cribl_control_plane/models/inputprometheus.py +34 -138
  70. cribl_control_plane/models/inputprometheusrw.py +17 -71
  71. cribl_control_plane/models/inputrawudp.py +1 -24
  72. cribl_control_plane/models/inputs3.py +1 -45
  73. cribl_control_plane/models/inputs3inventory.py +1 -54
  74. cribl_control_plane/models/inputsecuritylake.py +1 -54
  75. cribl_control_plane/models/inputsnmp.py +1 -40
  76. cribl_control_plane/models/inputsplunk.py +17 -85
  77. cribl_control_plane/models/inputsplunkhec.py +16 -70
  78. cribl_control_plane/models/inputsplunksearch.py +1 -63
  79. cribl_control_plane/models/inputsqs.py +1 -56
  80. cribl_control_plane/models/inputsyslog.py +32 -121
  81. cribl_control_plane/models/inputsystemmetrics.py +9 -142
  82. cribl_control_plane/models/inputsystemstate.py +1 -33
  83. cribl_control_plane/models/inputtcp.py +17 -81
  84. cribl_control_plane/models/inputtcpjson.py +17 -71
  85. cribl_control_plane/models/inputwef.py +1 -71
  86. cribl_control_plane/models/inputwindowsmetrics.py +9 -129
  87. cribl_control_plane/models/inputwineventlogs.py +1 -60
  88. cribl_control_plane/models/inputwiz.py +1 -45
  89. cribl_control_plane/models/inputwizwebhook.py +17 -62
  90. cribl_control_plane/models/inputzscalerhec.py +16 -70
  91. cribl_control_plane/models/jobinfo.py +1 -4
  92. cribl_control_plane/models/jobstatus.py +3 -34
  93. cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
  94. cribl_control_plane/models/logininfo.py +3 -3
  95. cribl_control_plane/models/masterworkerentry.py +1 -11
  96. cribl_control_plane/models/nodeprovidedinfo.py +1 -11
  97. cribl_control_plane/models/nodeupgradestatus.py +0 -38
  98. cribl_control_plane/models/output.py +88 -93
  99. cribl_control_plane/models/outputazureblob.py +1 -110
  100. cribl_control_plane/models/outputazuredataexplorer.py +87 -452
  101. cribl_control_plane/models/outputazureeventhub.py +19 -281
  102. cribl_control_plane/models/outputazurelogs.py +19 -115
  103. cribl_control_plane/models/outputchronicle.py +19 -115
  104. cribl_control_plane/models/outputclickhouse.py +19 -155
  105. cribl_control_plane/models/outputcloudwatch.py +19 -106
  106. cribl_control_plane/models/outputconfluentcloud.py +38 -311
  107. cribl_control_plane/models/outputcriblhttp.py +19 -135
  108. cribl_control_plane/models/outputcribllake.py +1 -97
  109. cribl_control_plane/models/outputcribltcp.py +19 -132
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
  111. cribl_control_plane/models/outputdatadog.py +19 -159
  112. cribl_control_plane/models/outputdataset.py +19 -143
  113. cribl_control_plane/models/outputdiskspool.py +1 -11
  114. cribl_control_plane/models/outputdls3.py +1 -152
  115. cribl_control_plane/models/outputdynatracehttp.py +19 -160
  116. cribl_control_plane/models/outputdynatraceotlp.py +19 -160
  117. cribl_control_plane/models/outputelastic.py +19 -163
  118. cribl_control_plane/models/outputelasticcloud.py +19 -140
  119. cribl_control_plane/models/outputexabeam.py +1 -61
  120. cribl_control_plane/models/outputfilesystem.py +1 -87
  121. cribl_control_plane/models/outputgooglechronicle.py +20 -166
  122. cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
  123. cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
  124. cribl_control_plane/models/outputgooglepubsub.py +19 -106
  125. cribl_control_plane/models/outputgrafanacloud.py +37 -288
  126. cribl_control_plane/models/outputgraphite.py +19 -105
  127. cribl_control_plane/models/outputhoneycomb.py +19 -115
  128. cribl_control_plane/models/outputhumiohec.py +19 -126
  129. cribl_control_plane/models/outputinfluxdb.py +19 -130
  130. cribl_control_plane/models/outputkafka.py +34 -302
  131. cribl_control_plane/models/outputkinesis.py +19 -133
  132. cribl_control_plane/models/outputloki.py +17 -129
  133. cribl_control_plane/models/outputminio.py +1 -145
  134. cribl_control_plane/models/outputmsk.py +34 -193
  135. cribl_control_plane/models/outputnewrelic.py +19 -136
  136. cribl_control_plane/models/outputnewrelicevents.py +20 -128
  137. cribl_control_plane/models/outputopentelemetry.py +19 -178
  138. cribl_control_plane/models/outputprometheus.py +19 -115
  139. cribl_control_plane/models/outputring.py +1 -31
  140. cribl_control_plane/models/outputs3.py +1 -152
  141. cribl_control_plane/models/outputsecuritylake.py +1 -114
  142. cribl_control_plane/models/outputsentinel.py +19 -135
  143. cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
  144. cribl_control_plane/models/outputservicenow.py +19 -168
  145. cribl_control_plane/models/outputsignalfx.py +19 -115
  146. cribl_control_plane/models/outputsns.py +17 -113
  147. cribl_control_plane/models/outputsplunk.py +19 -153
  148. cribl_control_plane/models/outputsplunkhec.py +19 -208
  149. cribl_control_plane/models/outputsplunklb.py +19 -182
  150. cribl_control_plane/models/outputsqs.py +17 -124
  151. cribl_control_plane/models/outputstatsd.py +19 -105
  152. cribl_control_plane/models/outputstatsdext.py +19 -105
  153. cribl_control_plane/models/outputsumologic.py +19 -117
  154. cribl_control_plane/models/outputsyslog.py +96 -259
  155. cribl_control_plane/models/outputtcpjson.py +19 -141
  156. cribl_control_plane/models/outputwavefront.py +19 -115
  157. cribl_control_plane/models/outputwebhook.py +19 -161
  158. cribl_control_plane/models/outputxsiam.py +17 -113
  159. cribl_control_plane/models/packinfo.py +5 -8
  160. cribl_control_plane/models/packinstallinfo.py +5 -8
  161. cribl_control_plane/models/resourcepolicy.py +0 -11
  162. cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
  163. cribl_control_plane/models/routeconf.py +4 -3
  164. cribl_control_plane/models/runnablejobcollection.py +9 -72
  165. cribl_control_plane/models/runnablejobexecutor.py +9 -32
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
  169. cribl_control_plane/packs.py +7 -202
  170. cribl_control_plane/routes_sdk.py +6 -6
  171. cribl_control_plane/tokens.py +15 -23
  172. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
  173. cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
  174. cribl_control_plane/models/groupcreaterequest.py +0 -171
  175. cribl_control_plane/models/outpostnodeinfo.py +0 -16
  176. cribl_control_plane/models/outputdatabricks.py +0 -482
  177. cribl_control_plane/models/updatepacksop.py +0 -25
  178. cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
  179. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -19,38 +18,35 @@ class OutputKafkaType(str, Enum):
19
18
  class OutputKafkaAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
20
19
  r"""Control the number of required acknowledgments."""
21
20
 
22
- # Leader
23
21
  ONE = 1
24
- # None
25
22
  ZERO = 0
26
- # All
27
23
  MINUS_1 = -1
28
24
 
29
25
 
30
26
  class OutputKafkaRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
31
27
  r"""Format to use to serialize events before writing to Kafka."""
32
28
 
33
- # JSON
34
29
  JSON = "json"
35
- # Field _raw
36
30
  RAW = "raw"
37
- # Protobuf
38
31
  PROTOBUF = "protobuf"
39
32
 
40
33
 
41
34
  class OutputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
42
35
  r"""Codec to use to compress the data before sending to Kafka"""
43
36
 
44
- # None
45
37
  NONE = "none"
46
- # Gzip
47
38
  GZIP = "gzip"
48
- # Snappy
49
39
  SNAPPY = "snappy"
50
- # LZ4
51
40
  LZ4 = "lz4"
52
41
 
53
42
 
43
+ class OutputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
44
+ r"""The schema format used to encode and decode event data"""
45
+
46
+ AVRO = "avro"
47
+ JSON = "json"
48
+
49
+
54
50
  class OutputKafkaAuthTypedDict(TypedDict):
55
51
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
56
52
 
@@ -156,29 +152,13 @@ class OutputKafkaKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
156
152
  pydantic.Field(alias="maxVersion"),
157
153
  ] = None
158
154
 
159
- @field_serializer("min_version")
160
- def serialize_min_version(self, value):
161
- if isinstance(value, str):
162
- try:
163
- return models.OutputKafkaKafkaSchemaRegistryMinimumTLSVersion(value)
164
- except ValueError:
165
- return value
166
- return value
167
-
168
- @field_serializer("max_version")
169
- def serialize_max_version(self, value):
170
- if isinstance(value, str):
171
- try:
172
- return models.OutputKafkaKafkaSchemaRegistryMaximumTLSVersion(value)
173
- except ValueError:
174
- return value
175
- return value
176
-
177
155
 
178
156
  class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
179
157
  disabled: NotRequired[bool]
180
158
  schema_registry_url: NotRequired[str]
181
159
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
160
+ schema_type: NotRequired[OutputKafkaSchemaType]
161
+ r"""The schema format used to encode and decode event data"""
182
162
  connection_timeout: NotRequired[float]
183
163
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
184
164
  request_timeout: NotRequired[float]
@@ -202,6 +182,14 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
202
182
  ] = "http://localhost:8081"
203
183
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
204
184
 
185
+ schema_type: Annotated[
186
+ Annotated[
187
+ Optional[OutputKafkaSchemaType], PlainValidator(validate_open_enum(False))
188
+ ],
189
+ pydantic.Field(alias="schemaType"),
190
+ ] = OutputKafkaSchemaType.AVRO
191
+ r"""The schema format used to encode and decode event data"""
192
+
205
193
  connection_timeout: Annotated[
206
194
  Optional[float], pydantic.Field(alias="connectionTimeout")
207
195
  ] = 30000
@@ -231,76 +219,20 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
231
219
  r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
232
220
 
233
221
 
234
- class OutputKafkaAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
235
- r"""Enter credentials directly, or select a stored secret"""
236
-
237
- MANUAL = "manual"
238
- SECRET = "secret"
239
-
240
-
241
222
  class OutputKafkaSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
242
- # PLAIN
243
223
  PLAIN = "plain"
244
- # SCRAM-SHA-256
245
224
  SCRAM_SHA_256 = "scram-sha-256"
246
- # SCRAM-SHA-512
247
225
  SCRAM_SHA_512 = "scram-sha-512"
248
- # GSSAPI/Kerberos
249
226
  KERBEROS = "kerberos"
250
227
 
251
228
 
252
- class OutputKafkaOauthParamTypedDict(TypedDict):
253
- name: str
254
- value: str
255
-
256
-
257
- class OutputKafkaOauthParam(BaseModel):
258
- name: str
259
-
260
- value: str
261
-
262
-
263
- class OutputKafkaSaslExtensionTypedDict(TypedDict):
264
- name: str
265
- value: str
266
-
267
-
268
- class OutputKafkaSaslExtension(BaseModel):
269
- name: str
270
-
271
- value: str
272
-
273
-
274
229
  class OutputKafkaAuthenticationTypedDict(TypedDict):
275
230
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
276
231
 
277
232
  disabled: NotRequired[bool]
278
- username: NotRequired[str]
279
- password: NotRequired[str]
280
- auth_type: NotRequired[OutputKafkaAuthenticationMethod]
281
- r"""Enter credentials directly, or select a stored secret"""
282
- credentials_secret: NotRequired[str]
283
- r"""Select or create a secret that references your credentials"""
284
233
  mechanism: NotRequired[OutputKafkaSASLMechanism]
285
- keytab_location: NotRequired[str]
286
- r"""Location of keytab file for authentication principal"""
287
- principal: NotRequired[str]
288
- r"""Authentication principal, such as `kafka_user@example.com`"""
289
- broker_service_class: NotRequired[str]
290
- r"""Kerberos service class for Kafka brokers, such as `kafka`"""
291
234
  oauth_enabled: NotRequired[bool]
292
235
  r"""Enable OAuth authentication"""
293
- token_url: NotRequired[str]
294
- r"""URL of the token endpoint to use for OAuth authentication"""
295
- client_id: NotRequired[str]
296
- r"""Client ID to use for OAuth authentication"""
297
- oauth_secret_type: NotRequired[str]
298
- client_text_secret: NotRequired[str]
299
- r"""Select or create a stored text secret"""
300
- oauth_params: NotRequired[List[OutputKafkaOauthParamTypedDict]]
301
- r"""Additional fields to send to the token endpoint, such as scope or audience"""
302
- sasl_extensions: NotRequired[List[OutputKafkaSaslExtensionTypedDict]]
303
- r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
304
236
 
305
237
 
306
238
  class OutputKafkaAuthentication(BaseModel):
@@ -308,89 +240,15 @@ class OutputKafkaAuthentication(BaseModel):
308
240
 
309
241
  disabled: Optional[bool] = True
310
242
 
311
- username: Optional[str] = None
312
-
313
- password: Optional[str] = None
314
-
315
- auth_type: Annotated[
316
- Annotated[
317
- Optional[OutputKafkaAuthenticationMethod],
318
- PlainValidator(validate_open_enum(False)),
319
- ],
320
- pydantic.Field(alias="authType"),
321
- ] = OutputKafkaAuthenticationMethod.MANUAL
322
- r"""Enter credentials directly, or select a stored secret"""
323
-
324
- credentials_secret: Annotated[
325
- Optional[str], pydantic.Field(alias="credentialsSecret")
326
- ] = None
327
- r"""Select or create a secret that references your credentials"""
328
-
329
243
  mechanism: Annotated[
330
244
  Optional[OutputKafkaSASLMechanism], PlainValidator(validate_open_enum(False))
331
245
  ] = OutputKafkaSASLMechanism.PLAIN
332
246
 
333
- keytab_location: Annotated[
334
- Optional[str], pydantic.Field(alias="keytabLocation")
335
- ] = None
336
- r"""Location of keytab file for authentication principal"""
337
-
338
- principal: Optional[str] = None
339
- r"""Authentication principal, such as `kafka_user@example.com`"""
340
-
341
- broker_service_class: Annotated[
342
- Optional[str], pydantic.Field(alias="brokerServiceClass")
343
- ] = None
344
- r"""Kerberos service class for Kafka brokers, such as `kafka`"""
345
-
346
247
  oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
347
248
  False
348
249
  )
349
250
  r"""Enable OAuth authentication"""
350
251
 
351
- token_url: Annotated[Optional[str], pydantic.Field(alias="tokenUrl")] = None
352
- r"""URL of the token endpoint to use for OAuth authentication"""
353
-
354
- client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
355
- r"""Client ID to use for OAuth authentication"""
356
-
357
- oauth_secret_type: Annotated[
358
- Optional[str], pydantic.Field(alias="oauthSecretType")
359
- ] = "secret"
360
-
361
- client_text_secret: Annotated[
362
- Optional[str], pydantic.Field(alias="clientTextSecret")
363
- ] = None
364
- r"""Select or create a stored text secret"""
365
-
366
- oauth_params: Annotated[
367
- Optional[List[OutputKafkaOauthParam]], pydantic.Field(alias="oauthParams")
368
- ] = None
369
- r"""Additional fields to send to the token endpoint, such as scope or audience"""
370
-
371
- sasl_extensions: Annotated[
372
- Optional[List[OutputKafkaSaslExtension]], pydantic.Field(alias="saslExtensions")
373
- ] = None
374
- r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
375
-
376
- @field_serializer("auth_type")
377
- def serialize_auth_type(self, value):
378
- if isinstance(value, str):
379
- try:
380
- return models.OutputKafkaAuthenticationMethod(value)
381
- except ValueError:
382
- return value
383
- return value
384
-
385
- @field_serializer("mechanism")
386
- def serialize_mechanism(self, value):
387
- if isinstance(value, str):
388
- try:
389
- return models.OutputKafkaSASLMechanism(value)
390
- except ValueError:
391
- return value
392
- return value
393
-
394
252
 
395
253
  class OutputKafkaMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
396
254
  TL_SV1 = "TLSv1"
@@ -474,65 +332,37 @@ class OutputKafkaTLSSettingsClientSide(BaseModel):
474
332
  pydantic.Field(alias="maxVersion"),
475
333
  ] = None
476
334
 
477
- @field_serializer("min_version")
478
- def serialize_min_version(self, value):
479
- if isinstance(value, str):
480
- try:
481
- return models.OutputKafkaMinimumTLSVersion(value)
482
- except ValueError:
483
- return value
484
- return value
485
-
486
- @field_serializer("max_version")
487
- def serialize_max_version(self, value):
488
- if isinstance(value, str):
489
- try:
490
- return models.OutputKafkaMaximumTLSVersion(value)
491
- except ValueError:
492
- return value
493
- return value
494
-
495
335
 
496
336
  class OutputKafkaBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
497
337
  r"""How to handle events when all receivers are exerting backpressure"""
498
338
 
499
- # Block
500
339
  BLOCK = "block"
501
- # Drop
502
340
  DROP = "drop"
503
- # Persistent Queue
504
341
  QUEUE = "queue"
505
342
 
506
343
 
507
- class OutputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
508
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
509
-
510
- # Error
511
- ERROR = "error"
512
- # Backpressure
513
- ALWAYS = "always"
514
- # Always On
515
- BACKPRESSURE = "backpressure"
516
-
517
-
518
344
  class OutputKafkaPqCompressCompression(str, Enum, metaclass=utils.OpenEnumMeta):
519
345
  r"""Codec to use to compress the persisted data"""
520
346
 
521
- # None
522
347
  NONE = "none"
523
- # Gzip
524
348
  GZIP = "gzip"
525
349
 
526
350
 
527
351
  class OutputKafkaQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
528
352
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
529
353
 
530
- # Block
531
354
  BLOCK = "block"
532
- # Drop new data
533
355
  DROP = "drop"
534
356
 
535
357
 
358
+ class OutputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
359
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
360
+
361
+ ERROR = "error"
362
+ BACKPRESSURE = "backpressure"
363
+ ALWAYS = "always"
364
+
365
+
536
366
  class OutputKafkaPqControlsTypedDict(TypedDict):
537
367
  pass
538
368
 
@@ -596,18 +426,6 @@ class OutputKafkaTypedDict(TypedDict):
596
426
  description: NotRequired[str]
597
427
  protobuf_library_id: NotRequired[str]
598
428
  r"""Select a set of Protobuf definitions for the events you want to send"""
599
- protobuf_encoding_id: NotRequired[str]
600
- r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
601
- pq_strict_ordering: NotRequired[bool]
602
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
603
- pq_rate_per_sec: NotRequired[float]
604
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
605
- pq_mode: NotRequired[OutputKafkaMode]
606
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
607
- pq_max_buffer_size: NotRequired[float]
608
- r"""The maximum number of events to hold in memory before writing the events to disk"""
609
- pq_max_backpressure_sec: NotRequired[float]
610
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
611
429
  pq_max_file_size: NotRequired[str]
612
430
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
613
431
  pq_max_size: NotRequired[str]
@@ -618,6 +436,8 @@ class OutputKafkaTypedDict(TypedDict):
618
436
  r"""Codec to use to compress the persisted data"""
619
437
  pq_on_backpressure: NotRequired[OutputKafkaQueueFullBehavior]
620
438
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
439
+ pq_mode: NotRequired[OutputKafkaMode]
440
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
621
441
  pq_controls: NotRequired[OutputKafkaPqControlsTypedDict]
622
442
 
623
443
 
@@ -741,37 +561,6 @@ class OutputKafka(BaseModel):
741
561
  ] = None
742
562
  r"""Select a set of Protobuf definitions for the events you want to send"""
743
563
 
744
- protobuf_encoding_id: Annotated[
745
- Optional[str], pydantic.Field(alias="protobufEncodingId")
746
- ] = None
747
- r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
748
-
749
- pq_strict_ordering: Annotated[
750
- Optional[bool], pydantic.Field(alias="pqStrictOrdering")
751
- ] = True
752
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
753
-
754
- pq_rate_per_sec: Annotated[
755
- Optional[float], pydantic.Field(alias="pqRatePerSec")
756
- ] = 0
757
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
758
-
759
- pq_mode: Annotated[
760
- Annotated[Optional[OutputKafkaMode], PlainValidator(validate_open_enum(False))],
761
- pydantic.Field(alias="pqMode"),
762
- ] = OutputKafkaMode.ERROR
763
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
764
-
765
- pq_max_buffer_size: Annotated[
766
- Optional[float], pydantic.Field(alias="pqMaxBufferSize")
767
- ] = 42
768
- r"""The maximum number of events to hold in memory before writing the events to disk"""
769
-
770
- pq_max_backpressure_sec: Annotated[
771
- Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
772
- ] = 30
773
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
774
-
775
564
  pq_max_file_size: Annotated[
776
565
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
777
566
  ] = "1 MB"
@@ -803,69 +592,12 @@ class OutputKafka(BaseModel):
803
592
  ] = OutputKafkaQueueFullBehavior.BLOCK
804
593
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
805
594
 
595
+ pq_mode: Annotated[
596
+ Annotated[Optional[OutputKafkaMode], PlainValidator(validate_open_enum(False))],
597
+ pydantic.Field(alias="pqMode"),
598
+ ] = OutputKafkaMode.ERROR
599
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
600
+
806
601
  pq_controls: Annotated[
807
602
  Optional[OutputKafkaPqControls], pydantic.Field(alias="pqControls")
808
603
  ] = None
809
-
810
- @field_serializer("ack")
811
- def serialize_ack(self, value):
812
- if isinstance(value, str):
813
- try:
814
- return models.OutputKafkaAcknowledgments(value)
815
- except ValueError:
816
- return value
817
- return value
818
-
819
- @field_serializer("format_")
820
- def serialize_format_(self, value):
821
- if isinstance(value, str):
822
- try:
823
- return models.OutputKafkaRecordDataFormat(value)
824
- except ValueError:
825
- return value
826
- return value
827
-
828
- @field_serializer("compression")
829
- def serialize_compression(self, value):
830
- if isinstance(value, str):
831
- try:
832
- return models.OutputKafkaCompression(value)
833
- except ValueError:
834
- return value
835
- return value
836
-
837
- @field_serializer("on_backpressure")
838
- def serialize_on_backpressure(self, value):
839
- if isinstance(value, str):
840
- try:
841
- return models.OutputKafkaBackpressureBehavior(value)
842
- except ValueError:
843
- return value
844
- return value
845
-
846
- @field_serializer("pq_mode")
847
- def serialize_pq_mode(self, value):
848
- if isinstance(value, str):
849
- try:
850
- return models.OutputKafkaMode(value)
851
- except ValueError:
852
- return value
853
- return value
854
-
855
- @field_serializer("pq_compress")
856
- def serialize_pq_compress(self, value):
857
- if isinstance(value, str):
858
- try:
859
- return models.OutputKafkaPqCompressCompression(value)
860
- except ValueError:
861
- return value
862
- return value
863
-
864
- @field_serializer("pq_on_backpressure")
865
- def serialize_pq_on_backpressure(self, value):
866
- if isinstance(value, str):
867
- try:
868
- return models.OutputKafkaQueueFullBehavior(value)
869
- except ValueError:
870
- return value
871
- return value