cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (179) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/errors/__init__.py +5 -8
  3. cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
  4. cribl_control_plane/groups_sdk.py +28 -52
  5. cribl_control_plane/health.py +16 -22
  6. cribl_control_plane/models/__init__.py +54 -217
  7. cribl_control_plane/models/appmode.py +14 -0
  8. cribl_control_plane/models/authtoken.py +1 -5
  9. cribl_control_plane/models/cacheconnection.py +0 -20
  10. cribl_control_plane/models/configgroup.py +7 -55
  11. cribl_control_plane/models/configgroupcloud.py +1 -11
  12. cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
  13. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  14. cribl_control_plane/models/createversionundoop.py +3 -3
  15. cribl_control_plane/models/cribllakedataset.py +1 -11
  16. cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
  17. cribl_control_plane/models/datasetmetadata.py +1 -11
  18. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
  19. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  20. cribl_control_plane/models/distributedsummary.py +0 -6
  21. cribl_control_plane/models/error.py +16 -0
  22. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
  23. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
  24. cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
  25. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
  26. cribl_control_plane/models/gethealthinfoop.py +17 -0
  27. cribl_control_plane/models/getsummaryop.py +0 -11
  28. cribl_control_plane/models/hbcriblinfo.py +3 -24
  29. cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
  30. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  31. cribl_control_plane/models/input.py +78 -80
  32. cribl_control_plane/models/inputappscope.py +17 -80
  33. cribl_control_plane/models/inputazureblob.py +1 -33
  34. cribl_control_plane/models/inputcollection.py +1 -24
  35. cribl_control_plane/models/inputconfluentcloud.py +18 -195
  36. cribl_control_plane/models/inputcribl.py +1 -24
  37. cribl_control_plane/models/inputcriblhttp.py +17 -62
  38. cribl_control_plane/models/inputcribllakehttp.py +17 -62
  39. cribl_control_plane/models/inputcriblmetrics.py +1 -24
  40. cribl_control_plane/models/inputcribltcp.py +17 -62
  41. cribl_control_plane/models/inputcrowdstrike.py +1 -54
  42. cribl_control_plane/models/inputdatadogagent.py +17 -62
  43. cribl_control_plane/models/inputdatagen.py +1 -24
  44. cribl_control_plane/models/inputedgeprometheus.py +34 -147
  45. cribl_control_plane/models/inputelastic.py +27 -119
  46. cribl_control_plane/models/inputeventhub.py +1 -182
  47. cribl_control_plane/models/inputexec.py +1 -33
  48. cribl_control_plane/models/inputfile.py +3 -42
  49. cribl_control_plane/models/inputfirehose.py +17 -62
  50. cribl_control_plane/models/inputgooglepubsub.py +1 -36
  51. cribl_control_plane/models/inputgrafana.py +32 -157
  52. cribl_control_plane/models/inputhttp.py +17 -62
  53. cribl_control_plane/models/inputhttpraw.py +17 -62
  54. cribl_control_plane/models/inputjournalfiles.py +1 -24
  55. cribl_control_plane/models/inputkafka.py +17 -189
  56. cribl_control_plane/models/inputkinesis.py +1 -80
  57. cribl_control_plane/models/inputkubeevents.py +1 -24
  58. cribl_control_plane/models/inputkubelogs.py +1 -33
  59. cribl_control_plane/models/inputkubemetrics.py +1 -33
  60. cribl_control_plane/models/inputloki.py +17 -71
  61. cribl_control_plane/models/inputmetrics.py +17 -62
  62. cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
  63. cribl_control_plane/models/inputmsk.py +18 -81
  64. cribl_control_plane/models/inputnetflow.py +1 -24
  65. cribl_control_plane/models/inputoffice365mgmt.py +1 -67
  66. cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
  67. cribl_control_plane/models/inputoffice365service.py +1 -67
  68. cribl_control_plane/models/inputopentelemetry.py +16 -92
  69. cribl_control_plane/models/inputprometheus.py +34 -138
  70. cribl_control_plane/models/inputprometheusrw.py +17 -71
  71. cribl_control_plane/models/inputrawudp.py +1 -24
  72. cribl_control_plane/models/inputs3.py +1 -45
  73. cribl_control_plane/models/inputs3inventory.py +1 -54
  74. cribl_control_plane/models/inputsecuritylake.py +1 -54
  75. cribl_control_plane/models/inputsnmp.py +1 -40
  76. cribl_control_plane/models/inputsplunk.py +17 -85
  77. cribl_control_plane/models/inputsplunkhec.py +16 -70
  78. cribl_control_plane/models/inputsplunksearch.py +1 -63
  79. cribl_control_plane/models/inputsqs.py +1 -56
  80. cribl_control_plane/models/inputsyslog.py +32 -121
  81. cribl_control_plane/models/inputsystemmetrics.py +9 -142
  82. cribl_control_plane/models/inputsystemstate.py +1 -33
  83. cribl_control_plane/models/inputtcp.py +17 -81
  84. cribl_control_plane/models/inputtcpjson.py +17 -71
  85. cribl_control_plane/models/inputwef.py +1 -71
  86. cribl_control_plane/models/inputwindowsmetrics.py +9 -129
  87. cribl_control_plane/models/inputwineventlogs.py +1 -60
  88. cribl_control_plane/models/inputwiz.py +1 -45
  89. cribl_control_plane/models/inputwizwebhook.py +17 -62
  90. cribl_control_plane/models/inputzscalerhec.py +16 -70
  91. cribl_control_plane/models/jobinfo.py +1 -4
  92. cribl_control_plane/models/jobstatus.py +3 -34
  93. cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
  94. cribl_control_plane/models/logininfo.py +3 -3
  95. cribl_control_plane/models/masterworkerentry.py +1 -11
  96. cribl_control_plane/models/nodeprovidedinfo.py +1 -11
  97. cribl_control_plane/models/nodeupgradestatus.py +0 -38
  98. cribl_control_plane/models/output.py +88 -93
  99. cribl_control_plane/models/outputazureblob.py +1 -110
  100. cribl_control_plane/models/outputazuredataexplorer.py +87 -452
  101. cribl_control_plane/models/outputazureeventhub.py +19 -281
  102. cribl_control_plane/models/outputazurelogs.py +19 -115
  103. cribl_control_plane/models/outputchronicle.py +19 -115
  104. cribl_control_plane/models/outputclickhouse.py +19 -155
  105. cribl_control_plane/models/outputcloudwatch.py +19 -106
  106. cribl_control_plane/models/outputconfluentcloud.py +38 -311
  107. cribl_control_plane/models/outputcriblhttp.py +19 -135
  108. cribl_control_plane/models/outputcribllake.py +1 -97
  109. cribl_control_plane/models/outputcribltcp.py +19 -132
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
  111. cribl_control_plane/models/outputdatadog.py +19 -159
  112. cribl_control_plane/models/outputdataset.py +19 -143
  113. cribl_control_plane/models/outputdiskspool.py +1 -11
  114. cribl_control_plane/models/outputdls3.py +1 -152
  115. cribl_control_plane/models/outputdynatracehttp.py +19 -160
  116. cribl_control_plane/models/outputdynatraceotlp.py +19 -160
  117. cribl_control_plane/models/outputelastic.py +19 -163
  118. cribl_control_plane/models/outputelasticcloud.py +19 -140
  119. cribl_control_plane/models/outputexabeam.py +1 -61
  120. cribl_control_plane/models/outputfilesystem.py +1 -87
  121. cribl_control_plane/models/outputgooglechronicle.py +20 -166
  122. cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
  123. cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
  124. cribl_control_plane/models/outputgooglepubsub.py +19 -106
  125. cribl_control_plane/models/outputgrafanacloud.py +37 -288
  126. cribl_control_plane/models/outputgraphite.py +19 -105
  127. cribl_control_plane/models/outputhoneycomb.py +19 -115
  128. cribl_control_plane/models/outputhumiohec.py +19 -126
  129. cribl_control_plane/models/outputinfluxdb.py +19 -130
  130. cribl_control_plane/models/outputkafka.py +34 -302
  131. cribl_control_plane/models/outputkinesis.py +19 -133
  132. cribl_control_plane/models/outputloki.py +17 -129
  133. cribl_control_plane/models/outputminio.py +1 -145
  134. cribl_control_plane/models/outputmsk.py +34 -193
  135. cribl_control_plane/models/outputnewrelic.py +19 -136
  136. cribl_control_plane/models/outputnewrelicevents.py +20 -128
  137. cribl_control_plane/models/outputopentelemetry.py +19 -178
  138. cribl_control_plane/models/outputprometheus.py +19 -115
  139. cribl_control_plane/models/outputring.py +1 -31
  140. cribl_control_plane/models/outputs3.py +1 -152
  141. cribl_control_plane/models/outputsecuritylake.py +1 -114
  142. cribl_control_plane/models/outputsentinel.py +19 -135
  143. cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
  144. cribl_control_plane/models/outputservicenow.py +19 -168
  145. cribl_control_plane/models/outputsignalfx.py +19 -115
  146. cribl_control_plane/models/outputsns.py +17 -113
  147. cribl_control_plane/models/outputsplunk.py +19 -153
  148. cribl_control_plane/models/outputsplunkhec.py +19 -208
  149. cribl_control_plane/models/outputsplunklb.py +19 -182
  150. cribl_control_plane/models/outputsqs.py +17 -124
  151. cribl_control_plane/models/outputstatsd.py +19 -105
  152. cribl_control_plane/models/outputstatsdext.py +19 -105
  153. cribl_control_plane/models/outputsumologic.py +19 -117
  154. cribl_control_plane/models/outputsyslog.py +96 -259
  155. cribl_control_plane/models/outputtcpjson.py +19 -141
  156. cribl_control_plane/models/outputwavefront.py +19 -115
  157. cribl_control_plane/models/outputwebhook.py +19 -161
  158. cribl_control_plane/models/outputxsiam.py +17 -113
  159. cribl_control_plane/models/packinfo.py +5 -8
  160. cribl_control_plane/models/packinstallinfo.py +5 -8
  161. cribl_control_plane/models/resourcepolicy.py +0 -11
  162. cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
  163. cribl_control_plane/models/routeconf.py +4 -3
  164. cribl_control_plane/models/runnablejobcollection.py +9 -72
  165. cribl_control_plane/models/runnablejobexecutor.py +9 -32
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
  169. cribl_control_plane/packs.py +7 -202
  170. cribl_control_plane/routes_sdk.py +6 -6
  171. cribl_control_plane/tokens.py +15 -23
  172. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
  173. cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
  174. cribl_control_plane/models/groupcreaterequest.py +0 -171
  175. cribl_control_plane/models/outpostnodeinfo.py +0 -16
  176. cribl_control_plane/models/outputdatabricks.py +0 -482
  177. cribl_control_plane/models/updatepacksop.py +0 -25
  178. cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
  179. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -98,60 +97,39 @@ class OutputConfluentCloudTLSSettingsClientSide(BaseModel):
98
97
  pydantic.Field(alias="maxVersion"),
99
98
  ] = None
100
99
 
101
- @field_serializer("min_version")
102
- def serialize_min_version(self, value):
103
- if isinstance(value, str):
104
- try:
105
- return models.OutputConfluentCloudMinimumTLSVersion(value)
106
- except ValueError:
107
- return value
108
- return value
109
-
110
- @field_serializer("max_version")
111
- def serialize_max_version(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.OutputConfluentCloudMaximumTLSVersion(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
100
 
120
101
  class OutputConfluentCloudAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
121
102
  r"""Control the number of required acknowledgments."""
122
103
 
123
- # Leader
124
104
  ONE = 1
125
- # None
126
105
  ZERO = 0
127
- # All
128
106
  MINUS_1 = -1
129
107
 
130
108
 
131
109
  class OutputConfluentCloudRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
132
110
  r"""Format to use to serialize events before writing to Kafka."""
133
111
 
134
- # JSON
135
112
  JSON = "json"
136
- # Field _raw
137
113
  RAW = "raw"
138
- # Protobuf
139
114
  PROTOBUF = "protobuf"
140
115
 
141
116
 
142
117
  class OutputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
143
118
  r"""Codec to use to compress the data before sending to Kafka"""
144
119
 
145
- # None
146
120
  NONE = "none"
147
- # Gzip
148
121
  GZIP = "gzip"
149
- # Snappy
150
122
  SNAPPY = "snappy"
151
- # LZ4
152
123
  LZ4 = "lz4"
153
124
 
154
125
 
126
+ class OutputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
127
+ r"""The schema format used to encode and decode event data"""
128
+
129
+ AVRO = "avro"
130
+ JSON = "json"
131
+
132
+
155
133
  class OutputConfluentCloudAuthTypedDict(TypedDict):
156
134
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
157
135
 
@@ -257,33 +235,13 @@ class OutputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
257
235
  pydantic.Field(alias="maxVersion"),
258
236
  ] = None
259
237
 
260
- @field_serializer("min_version")
261
- def serialize_min_version(self, value):
262
- if isinstance(value, str):
263
- try:
264
- return models.OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
265
- value
266
- )
267
- except ValueError:
268
- return value
269
- return value
270
-
271
- @field_serializer("max_version")
272
- def serialize_max_version(self, value):
273
- if isinstance(value, str):
274
- try:
275
- return models.OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
276
- value
277
- )
278
- except ValueError:
279
- return value
280
- return value
281
-
282
238
 
283
239
  class OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
284
240
  disabled: NotRequired[bool]
285
241
  schema_registry_url: NotRequired[str]
286
242
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
243
+ schema_type: NotRequired[OutputConfluentCloudSchemaType]
244
+ r"""The schema format used to encode and decode event data"""
287
245
  connection_timeout: NotRequired[float]
288
246
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
289
247
  request_timeout: NotRequired[float]
@@ -309,6 +267,15 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
309
267
  ] = "http://localhost:8081"
310
268
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
311
269
 
270
+ schema_type: Annotated[
271
+ Annotated[
272
+ Optional[OutputConfluentCloudSchemaType],
273
+ PlainValidator(validate_open_enum(False)),
274
+ ],
275
+ pydantic.Field(alias="schemaType"),
276
+ ] = OutputConfluentCloudSchemaType.AVRO
277
+ r"""The schema format used to encode and decode event data"""
278
+
312
279
  connection_timeout: Annotated[
313
280
  Optional[float], pydantic.Field(alias="connectionTimeout")
314
281
  ] = 30000
@@ -338,76 +305,20 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
338
305
  r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
339
306
 
340
307
 
341
- class OutputConfluentCloudAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
342
- r"""Enter credentials directly, or select a stored secret"""
343
-
344
- MANUAL = "manual"
345
- SECRET = "secret"
346
-
347
-
348
308
  class OutputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
349
- # PLAIN
350
309
  PLAIN = "plain"
351
- # SCRAM-SHA-256
352
310
  SCRAM_SHA_256 = "scram-sha-256"
353
- # SCRAM-SHA-512
354
311
  SCRAM_SHA_512 = "scram-sha-512"
355
- # GSSAPI/Kerberos
356
312
  KERBEROS = "kerberos"
357
313
 
358
314
 
359
- class OutputConfluentCloudOauthParamTypedDict(TypedDict):
360
- name: str
361
- value: str
362
-
363
-
364
- class OutputConfluentCloudOauthParam(BaseModel):
365
- name: str
366
-
367
- value: str
368
-
369
-
370
- class OutputConfluentCloudSaslExtensionTypedDict(TypedDict):
371
- name: str
372
- value: str
373
-
374
-
375
- class OutputConfluentCloudSaslExtension(BaseModel):
376
- name: str
377
-
378
- value: str
379
-
380
-
381
315
  class OutputConfluentCloudAuthenticationTypedDict(TypedDict):
382
316
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
383
317
 
384
318
  disabled: NotRequired[bool]
385
- username: NotRequired[str]
386
- password: NotRequired[str]
387
- auth_type: NotRequired[OutputConfluentCloudAuthenticationMethod]
388
- r"""Enter credentials directly, or select a stored secret"""
389
- credentials_secret: NotRequired[str]
390
- r"""Select or create a secret that references your credentials"""
391
319
  mechanism: NotRequired[OutputConfluentCloudSASLMechanism]
392
- keytab_location: NotRequired[str]
393
- r"""Location of keytab file for authentication principal"""
394
- principal: NotRequired[str]
395
- r"""Authentication principal, such as `kafka_user@example.com`"""
396
- broker_service_class: NotRequired[str]
397
- r"""Kerberos service class for Kafka brokers, such as `kafka`"""
398
320
  oauth_enabled: NotRequired[bool]
399
321
  r"""Enable OAuth authentication"""
400
- token_url: NotRequired[str]
401
- r"""URL of the token endpoint to use for OAuth authentication"""
402
- client_id: NotRequired[str]
403
- r"""Client ID to use for OAuth authentication"""
404
- oauth_secret_type: NotRequired[str]
405
- client_text_secret: NotRequired[str]
406
- r"""Select or create a stored text secret"""
407
- oauth_params: NotRequired[List[OutputConfluentCloudOauthParamTypedDict]]
408
- r"""Additional fields to send to the token endpoint, such as scope or audience"""
409
- sasl_extensions: NotRequired[List[OutputConfluentCloudSaslExtensionTypedDict]]
410
- r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
411
322
 
412
323
 
413
324
  class OutputConfluentCloudAuthentication(BaseModel):
@@ -415,135 +326,49 @@ class OutputConfluentCloudAuthentication(BaseModel):
415
326
 
416
327
  disabled: Optional[bool] = True
417
328
 
418
- username: Optional[str] = None
419
-
420
- password: Optional[str] = None
421
-
422
- auth_type: Annotated[
423
- Annotated[
424
- Optional[OutputConfluentCloudAuthenticationMethod],
425
- PlainValidator(validate_open_enum(False)),
426
- ],
427
- pydantic.Field(alias="authType"),
428
- ] = OutputConfluentCloudAuthenticationMethod.MANUAL
429
- r"""Enter credentials directly, or select a stored secret"""
430
-
431
- credentials_secret: Annotated[
432
- Optional[str], pydantic.Field(alias="credentialsSecret")
433
- ] = None
434
- r"""Select or create a secret that references your credentials"""
435
-
436
329
  mechanism: Annotated[
437
330
  Optional[OutputConfluentCloudSASLMechanism],
438
331
  PlainValidator(validate_open_enum(False)),
439
332
  ] = OutputConfluentCloudSASLMechanism.PLAIN
440
333
 
441
- keytab_location: Annotated[
442
- Optional[str], pydantic.Field(alias="keytabLocation")
443
- ] = None
444
- r"""Location of keytab file for authentication principal"""
445
-
446
- principal: Optional[str] = None
447
- r"""Authentication principal, such as `kafka_user@example.com`"""
448
-
449
- broker_service_class: Annotated[
450
- Optional[str], pydantic.Field(alias="brokerServiceClass")
451
- ] = None
452
- r"""Kerberos service class for Kafka brokers, such as `kafka`"""
453
-
454
334
  oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
455
335
  False
456
336
  )
457
337
  r"""Enable OAuth authentication"""
458
338
 
459
- token_url: Annotated[Optional[str], pydantic.Field(alias="tokenUrl")] = None
460
- r"""URL of the token endpoint to use for OAuth authentication"""
461
-
462
- client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
463
- r"""Client ID to use for OAuth authentication"""
464
-
465
- oauth_secret_type: Annotated[
466
- Optional[str], pydantic.Field(alias="oauthSecretType")
467
- ] = "secret"
468
-
469
- client_text_secret: Annotated[
470
- Optional[str], pydantic.Field(alias="clientTextSecret")
471
- ] = None
472
- r"""Select or create a stored text secret"""
473
-
474
- oauth_params: Annotated[
475
- Optional[List[OutputConfluentCloudOauthParam]],
476
- pydantic.Field(alias="oauthParams"),
477
- ] = None
478
- r"""Additional fields to send to the token endpoint, such as scope or audience"""
479
-
480
- sasl_extensions: Annotated[
481
- Optional[List[OutputConfluentCloudSaslExtension]],
482
- pydantic.Field(alias="saslExtensions"),
483
- ] = None
484
- r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
485
-
486
- @field_serializer("auth_type")
487
- def serialize_auth_type(self, value):
488
- if isinstance(value, str):
489
- try:
490
- return models.OutputConfluentCloudAuthenticationMethod(value)
491
- except ValueError:
492
- return value
493
- return value
494
-
495
- @field_serializer("mechanism")
496
- def serialize_mechanism(self, value):
497
- if isinstance(value, str):
498
- try:
499
- return models.OutputConfluentCloudSASLMechanism(value)
500
- except ValueError:
501
- return value
502
- return value
503
-
504
339
 
505
340
  class OutputConfluentCloudBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
506
341
  r"""How to handle events when all receivers are exerting backpressure"""
507
342
 
508
- # Block
509
343
  BLOCK = "block"
510
- # Drop
511
344
  DROP = "drop"
512
- # Persistent Queue
513
345
  QUEUE = "queue"
514
346
 
515
347
 
516
- class OutputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
517
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
518
-
519
- # Error
520
- ERROR = "error"
521
- # Backpressure
522
- ALWAYS = "always"
523
- # Always On
524
- BACKPRESSURE = "backpressure"
525
-
526
-
527
348
  class OutputConfluentCloudPqCompressCompression(
528
349
  str, Enum, metaclass=utils.OpenEnumMeta
529
350
  ):
530
351
  r"""Codec to use to compress the persisted data"""
531
352
 
532
- # None
533
353
  NONE = "none"
534
- # Gzip
535
354
  GZIP = "gzip"
536
355
 
537
356
 
538
357
  class OutputConfluentCloudQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
539
358
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
540
359
 
541
- # Block
542
360
  BLOCK = "block"
543
- # Drop new data
544
361
  DROP = "drop"
545
362
 
546
363
 
364
+ class OutputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
365
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
366
+
367
+ ERROR = "error"
368
+ BACKPRESSURE = "backpressure"
369
+ ALWAYS = "always"
370
+
371
+
547
372
  class OutputConfluentCloudPqControlsTypedDict(TypedDict):
548
373
  pass
549
374
 
@@ -607,18 +432,6 @@ class OutputConfluentCloudTypedDict(TypedDict):
607
432
  description: NotRequired[str]
608
433
  protobuf_library_id: NotRequired[str]
609
434
  r"""Select a set of Protobuf definitions for the events you want to send"""
610
- protobuf_encoding_id: NotRequired[str]
611
- r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
612
- pq_strict_ordering: NotRequired[bool]
613
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
614
- pq_rate_per_sec: NotRequired[float]
615
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
616
- pq_mode: NotRequired[OutputConfluentCloudMode]
617
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
618
- pq_max_buffer_size: NotRequired[float]
619
- r"""The maximum number of events to hold in memory before writing the events to disk"""
620
- pq_max_backpressure_sec: NotRequired[float]
621
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
622
435
  pq_max_file_size: NotRequired[str]
623
436
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
624
437
  pq_max_size: NotRequired[str]
@@ -629,6 +442,8 @@ class OutputConfluentCloudTypedDict(TypedDict):
629
442
  r"""Codec to use to compress the persisted data"""
630
443
  pq_on_backpressure: NotRequired[OutputConfluentCloudQueueFullBehavior]
631
444
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
445
+ pq_mode: NotRequired[OutputConfluentCloudMode]
446
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
632
447
  pq_controls: NotRequired[OutputConfluentCloudPqControlsTypedDict]
633
448
 
634
449
 
@@ -754,40 +569,6 @@ class OutputConfluentCloud(BaseModel):
754
569
  ] = None
755
570
  r"""Select a set of Protobuf definitions for the events you want to send"""
756
571
 
757
- protobuf_encoding_id: Annotated[
758
- Optional[str], pydantic.Field(alias="protobufEncodingId")
759
- ] = None
760
- r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
761
-
762
- pq_strict_ordering: Annotated[
763
- Optional[bool], pydantic.Field(alias="pqStrictOrdering")
764
- ] = True
765
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
766
-
767
- pq_rate_per_sec: Annotated[
768
- Optional[float], pydantic.Field(alias="pqRatePerSec")
769
- ] = 0
770
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
771
-
772
- pq_mode: Annotated[
773
- Annotated[
774
- Optional[OutputConfluentCloudMode],
775
- PlainValidator(validate_open_enum(False)),
776
- ],
777
- pydantic.Field(alias="pqMode"),
778
- ] = OutputConfluentCloudMode.ERROR
779
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
780
-
781
- pq_max_buffer_size: Annotated[
782
- Optional[float], pydantic.Field(alias="pqMaxBufferSize")
783
- ] = 42
784
- r"""The maximum number of events to hold in memory before writing the events to disk"""
785
-
786
- pq_max_backpressure_sec: Annotated[
787
- Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
788
- ] = 30
789
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
790
-
791
572
  pq_max_file_size: Annotated[
792
573
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
793
574
  ] = "1 MB"
@@ -819,69 +600,15 @@ class OutputConfluentCloud(BaseModel):
819
600
  ] = OutputConfluentCloudQueueFullBehavior.BLOCK
820
601
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
821
602
 
603
+ pq_mode: Annotated[
604
+ Annotated[
605
+ Optional[OutputConfluentCloudMode],
606
+ PlainValidator(validate_open_enum(False)),
607
+ ],
608
+ pydantic.Field(alias="pqMode"),
609
+ ] = OutputConfluentCloudMode.ERROR
610
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
611
+
822
612
  pq_controls: Annotated[
823
613
  Optional[OutputConfluentCloudPqControls], pydantic.Field(alias="pqControls")
824
614
  ] = None
825
-
826
- @field_serializer("ack")
827
- def serialize_ack(self, value):
828
- if isinstance(value, str):
829
- try:
830
- return models.OutputConfluentCloudAcknowledgments(value)
831
- except ValueError:
832
- return value
833
- return value
834
-
835
- @field_serializer("format_")
836
- def serialize_format_(self, value):
837
- if isinstance(value, str):
838
- try:
839
- return models.OutputConfluentCloudRecordDataFormat(value)
840
- except ValueError:
841
- return value
842
- return value
843
-
844
- @field_serializer("compression")
845
- def serialize_compression(self, value):
846
- if isinstance(value, str):
847
- try:
848
- return models.OutputConfluentCloudCompression(value)
849
- except ValueError:
850
- return value
851
- return value
852
-
853
- @field_serializer("on_backpressure")
854
- def serialize_on_backpressure(self, value):
855
- if isinstance(value, str):
856
- try:
857
- return models.OutputConfluentCloudBackpressureBehavior(value)
858
- except ValueError:
859
- return value
860
- return value
861
-
862
- @field_serializer("pq_mode")
863
- def serialize_pq_mode(self, value):
864
- if isinstance(value, str):
865
- try:
866
- return models.OutputConfluentCloudMode(value)
867
- except ValueError:
868
- return value
869
- return value
870
-
871
- @field_serializer("pq_compress")
872
- def serialize_pq_compress(self, value):
873
- if isinstance(value, str):
874
- try:
875
- return models.OutputConfluentCloudPqCompressCompression(value)
876
- except ValueError:
877
- return value
878
- return value
879
-
880
- @field_serializer("pq_on_backpressure")
881
- def serialize_pq_on_backpressure(self, value):
882
- if isinstance(value, str):
883
- try:
884
- return models.OutputConfluentCloudQueueFullBehavior(value)
885
- except ValueError:
886
- return value
887
- return value