cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (179) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/errors/__init__.py +5 -8
  3. cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
  4. cribl_control_plane/groups_sdk.py +28 -52
  5. cribl_control_plane/health.py +16 -22
  6. cribl_control_plane/models/__init__.py +54 -217
  7. cribl_control_plane/models/appmode.py +14 -0
  8. cribl_control_plane/models/authtoken.py +1 -5
  9. cribl_control_plane/models/cacheconnection.py +0 -20
  10. cribl_control_plane/models/configgroup.py +7 -55
  11. cribl_control_plane/models/configgroupcloud.py +1 -11
  12. cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
  13. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  14. cribl_control_plane/models/createversionundoop.py +3 -3
  15. cribl_control_plane/models/cribllakedataset.py +1 -11
  16. cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
  17. cribl_control_plane/models/datasetmetadata.py +1 -11
  18. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
  19. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  20. cribl_control_plane/models/distributedsummary.py +0 -6
  21. cribl_control_plane/models/error.py +16 -0
  22. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
  23. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
  24. cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
  25. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
  26. cribl_control_plane/models/gethealthinfoop.py +17 -0
  27. cribl_control_plane/models/getsummaryop.py +0 -11
  28. cribl_control_plane/models/hbcriblinfo.py +3 -24
  29. cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
  30. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  31. cribl_control_plane/models/input.py +78 -80
  32. cribl_control_plane/models/inputappscope.py +17 -80
  33. cribl_control_plane/models/inputazureblob.py +1 -33
  34. cribl_control_plane/models/inputcollection.py +1 -24
  35. cribl_control_plane/models/inputconfluentcloud.py +18 -195
  36. cribl_control_plane/models/inputcribl.py +1 -24
  37. cribl_control_plane/models/inputcriblhttp.py +17 -62
  38. cribl_control_plane/models/inputcribllakehttp.py +17 -62
  39. cribl_control_plane/models/inputcriblmetrics.py +1 -24
  40. cribl_control_plane/models/inputcribltcp.py +17 -62
  41. cribl_control_plane/models/inputcrowdstrike.py +1 -54
  42. cribl_control_plane/models/inputdatadogagent.py +17 -62
  43. cribl_control_plane/models/inputdatagen.py +1 -24
  44. cribl_control_plane/models/inputedgeprometheus.py +34 -147
  45. cribl_control_plane/models/inputelastic.py +27 -119
  46. cribl_control_plane/models/inputeventhub.py +1 -182
  47. cribl_control_plane/models/inputexec.py +1 -33
  48. cribl_control_plane/models/inputfile.py +3 -42
  49. cribl_control_plane/models/inputfirehose.py +17 -62
  50. cribl_control_plane/models/inputgooglepubsub.py +1 -36
  51. cribl_control_plane/models/inputgrafana.py +32 -157
  52. cribl_control_plane/models/inputhttp.py +17 -62
  53. cribl_control_plane/models/inputhttpraw.py +17 -62
  54. cribl_control_plane/models/inputjournalfiles.py +1 -24
  55. cribl_control_plane/models/inputkafka.py +17 -189
  56. cribl_control_plane/models/inputkinesis.py +1 -80
  57. cribl_control_plane/models/inputkubeevents.py +1 -24
  58. cribl_control_plane/models/inputkubelogs.py +1 -33
  59. cribl_control_plane/models/inputkubemetrics.py +1 -33
  60. cribl_control_plane/models/inputloki.py +17 -71
  61. cribl_control_plane/models/inputmetrics.py +17 -62
  62. cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
  63. cribl_control_plane/models/inputmsk.py +18 -81
  64. cribl_control_plane/models/inputnetflow.py +1 -24
  65. cribl_control_plane/models/inputoffice365mgmt.py +1 -67
  66. cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
  67. cribl_control_plane/models/inputoffice365service.py +1 -67
  68. cribl_control_plane/models/inputopentelemetry.py +16 -92
  69. cribl_control_plane/models/inputprometheus.py +34 -138
  70. cribl_control_plane/models/inputprometheusrw.py +17 -71
  71. cribl_control_plane/models/inputrawudp.py +1 -24
  72. cribl_control_plane/models/inputs3.py +1 -45
  73. cribl_control_plane/models/inputs3inventory.py +1 -54
  74. cribl_control_plane/models/inputsecuritylake.py +1 -54
  75. cribl_control_plane/models/inputsnmp.py +1 -40
  76. cribl_control_plane/models/inputsplunk.py +17 -85
  77. cribl_control_plane/models/inputsplunkhec.py +16 -70
  78. cribl_control_plane/models/inputsplunksearch.py +1 -63
  79. cribl_control_plane/models/inputsqs.py +1 -56
  80. cribl_control_plane/models/inputsyslog.py +32 -121
  81. cribl_control_plane/models/inputsystemmetrics.py +9 -142
  82. cribl_control_plane/models/inputsystemstate.py +1 -33
  83. cribl_control_plane/models/inputtcp.py +17 -81
  84. cribl_control_plane/models/inputtcpjson.py +17 -71
  85. cribl_control_plane/models/inputwef.py +1 -71
  86. cribl_control_plane/models/inputwindowsmetrics.py +9 -129
  87. cribl_control_plane/models/inputwineventlogs.py +1 -60
  88. cribl_control_plane/models/inputwiz.py +1 -45
  89. cribl_control_plane/models/inputwizwebhook.py +17 -62
  90. cribl_control_plane/models/inputzscalerhec.py +16 -70
  91. cribl_control_plane/models/jobinfo.py +1 -4
  92. cribl_control_plane/models/jobstatus.py +3 -34
  93. cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
  94. cribl_control_plane/models/logininfo.py +3 -3
  95. cribl_control_plane/models/masterworkerentry.py +1 -11
  96. cribl_control_plane/models/nodeprovidedinfo.py +1 -11
  97. cribl_control_plane/models/nodeupgradestatus.py +0 -38
  98. cribl_control_plane/models/output.py +88 -93
  99. cribl_control_plane/models/outputazureblob.py +1 -110
  100. cribl_control_plane/models/outputazuredataexplorer.py +87 -452
  101. cribl_control_plane/models/outputazureeventhub.py +19 -281
  102. cribl_control_plane/models/outputazurelogs.py +19 -115
  103. cribl_control_plane/models/outputchronicle.py +19 -115
  104. cribl_control_plane/models/outputclickhouse.py +19 -155
  105. cribl_control_plane/models/outputcloudwatch.py +19 -106
  106. cribl_control_plane/models/outputconfluentcloud.py +38 -311
  107. cribl_control_plane/models/outputcriblhttp.py +19 -135
  108. cribl_control_plane/models/outputcribllake.py +1 -97
  109. cribl_control_plane/models/outputcribltcp.py +19 -132
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
  111. cribl_control_plane/models/outputdatadog.py +19 -159
  112. cribl_control_plane/models/outputdataset.py +19 -143
  113. cribl_control_plane/models/outputdiskspool.py +1 -11
  114. cribl_control_plane/models/outputdls3.py +1 -152
  115. cribl_control_plane/models/outputdynatracehttp.py +19 -160
  116. cribl_control_plane/models/outputdynatraceotlp.py +19 -160
  117. cribl_control_plane/models/outputelastic.py +19 -163
  118. cribl_control_plane/models/outputelasticcloud.py +19 -140
  119. cribl_control_plane/models/outputexabeam.py +1 -61
  120. cribl_control_plane/models/outputfilesystem.py +1 -87
  121. cribl_control_plane/models/outputgooglechronicle.py +20 -166
  122. cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
  123. cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
  124. cribl_control_plane/models/outputgooglepubsub.py +19 -106
  125. cribl_control_plane/models/outputgrafanacloud.py +37 -288
  126. cribl_control_plane/models/outputgraphite.py +19 -105
  127. cribl_control_plane/models/outputhoneycomb.py +19 -115
  128. cribl_control_plane/models/outputhumiohec.py +19 -126
  129. cribl_control_plane/models/outputinfluxdb.py +19 -130
  130. cribl_control_plane/models/outputkafka.py +34 -302
  131. cribl_control_plane/models/outputkinesis.py +19 -133
  132. cribl_control_plane/models/outputloki.py +17 -129
  133. cribl_control_plane/models/outputminio.py +1 -145
  134. cribl_control_plane/models/outputmsk.py +34 -193
  135. cribl_control_plane/models/outputnewrelic.py +19 -136
  136. cribl_control_plane/models/outputnewrelicevents.py +20 -128
  137. cribl_control_plane/models/outputopentelemetry.py +19 -178
  138. cribl_control_plane/models/outputprometheus.py +19 -115
  139. cribl_control_plane/models/outputring.py +1 -31
  140. cribl_control_plane/models/outputs3.py +1 -152
  141. cribl_control_plane/models/outputsecuritylake.py +1 -114
  142. cribl_control_plane/models/outputsentinel.py +19 -135
  143. cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
  144. cribl_control_plane/models/outputservicenow.py +19 -168
  145. cribl_control_plane/models/outputsignalfx.py +19 -115
  146. cribl_control_plane/models/outputsns.py +17 -113
  147. cribl_control_plane/models/outputsplunk.py +19 -153
  148. cribl_control_plane/models/outputsplunkhec.py +19 -208
  149. cribl_control_plane/models/outputsplunklb.py +19 -182
  150. cribl_control_plane/models/outputsqs.py +17 -124
  151. cribl_control_plane/models/outputstatsd.py +19 -105
  152. cribl_control_plane/models/outputstatsdext.py +19 -105
  153. cribl_control_plane/models/outputsumologic.py +19 -117
  154. cribl_control_plane/models/outputsyslog.py +96 -259
  155. cribl_control_plane/models/outputtcpjson.py +19 -141
  156. cribl_control_plane/models/outputwavefront.py +19 -115
  157. cribl_control_plane/models/outputwebhook.py +19 -161
  158. cribl_control_plane/models/outputxsiam.py +17 -113
  159. cribl_control_plane/models/packinfo.py +5 -8
  160. cribl_control_plane/models/packinstallinfo.py +5 -8
  161. cribl_control_plane/models/resourcepolicy.py +0 -11
  162. cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
  163. cribl_control_plane/models/routeconf.py +4 -3
  164. cribl_control_plane/models/runnablejobcollection.py +9 -72
  165. cribl_control_plane/models/runnablejobexecutor.py +9 -32
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
  169. cribl_control_plane/packs.py +7 -202
  170. cribl_control_plane/routes_sdk.py +6 -6
  171. cribl_control_plane/tokens.py +15 -23
  172. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
  173. cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
  174. cribl_control_plane/models/groupcreaterequest.py +0 -171
  175. cribl_control_plane/models/outpostnodeinfo.py +0 -16
  176. cribl_control_plane/models/outputdatabricks.py +0 -482
  177. cribl_control_plane/models/updatepacksop.py +0 -25
  178. cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
  179. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputKafkaConnection(BaseModel):
30
29
  class InputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,23 +102,12 @@ class InputKafkaPq(BaseModel):
107
102
  Optional[InputKafkaPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputKafkaMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputKafkaCompression(value)
124
- except ValueError:
125
- return value
126
- return value
105
+
106
+ class InputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
107
+ r"""The schema format used to encode and decode event data"""
108
+
109
+ AVRO = "avro"
110
+ JSON = "json"
127
111
 
128
112
 
129
113
  class InputKafkaAuthTypedDict(TypedDict):
@@ -231,29 +215,13 @@ class InputKafkaKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
231
215
  pydantic.Field(alias="maxVersion"),
232
216
  ] = None
233
217
 
234
- @field_serializer("min_version")
235
- def serialize_min_version(self, value):
236
- if isinstance(value, str):
237
- try:
238
- return models.InputKafkaKafkaSchemaRegistryMinimumTLSVersion(value)
239
- except ValueError:
240
- return value
241
- return value
242
-
243
- @field_serializer("max_version")
244
- def serialize_max_version(self, value):
245
- if isinstance(value, str):
246
- try:
247
- return models.InputKafkaKafkaSchemaRegistryMaximumTLSVersion(value)
248
- except ValueError:
249
- return value
250
- return value
251
-
252
218
 
253
219
  class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
254
220
  disabled: NotRequired[bool]
255
221
  schema_registry_url: NotRequired[str]
256
222
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
223
+ schema_type: NotRequired[InputKafkaSchemaType]
224
+ r"""The schema format used to encode and decode event data"""
257
225
  connection_timeout: NotRequired[float]
258
226
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
259
227
  request_timeout: NotRequired[float]
@@ -273,6 +241,14 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
273
241
  ] = "http://localhost:8081"
274
242
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
275
243
 
244
+ schema_type: Annotated[
245
+ Annotated[
246
+ Optional[InputKafkaSchemaType], PlainValidator(validate_open_enum(False))
247
+ ],
248
+ pydantic.Field(alias="schemaType"),
249
+ ] = InputKafkaSchemaType.AVRO
250
+ r"""The schema format used to encode and decode event data"""
251
+
276
252
  connection_timeout: Annotated[
277
253
  Optional[float], pydantic.Field(alias="connectionTimeout")
278
254
  ] = 30000
@@ -292,76 +268,20 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
292
268
  tls: Optional[InputKafkaKafkaSchemaRegistryTLSSettingsClientSide] = None
293
269
 
294
270
 
295
- class InputKafkaAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
296
- r"""Enter credentials directly, or select a stored secret"""
297
-
298
- MANUAL = "manual"
299
- SECRET = "secret"
300
-
301
-
302
271
  class InputKafkaSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
303
- # PLAIN
304
272
  PLAIN = "plain"
305
- # SCRAM-SHA-256
306
273
  SCRAM_SHA_256 = "scram-sha-256"
307
- # SCRAM-SHA-512
308
274
  SCRAM_SHA_512 = "scram-sha-512"
309
- # GSSAPI/Kerberos
310
275
  KERBEROS = "kerberos"
311
276
 
312
277
 
313
- class InputKafkaOauthParamTypedDict(TypedDict):
314
- name: str
315
- value: str
316
-
317
-
318
- class InputKafkaOauthParam(BaseModel):
319
- name: str
320
-
321
- value: str
322
-
323
-
324
- class InputKafkaSaslExtensionTypedDict(TypedDict):
325
- name: str
326
- value: str
327
-
328
-
329
- class InputKafkaSaslExtension(BaseModel):
330
- name: str
331
-
332
- value: str
333
-
334
-
335
278
  class InputKafkaAuthenticationTypedDict(TypedDict):
336
279
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
337
280
 
338
281
  disabled: NotRequired[bool]
339
- username: NotRequired[str]
340
- password: NotRequired[str]
341
- auth_type: NotRequired[InputKafkaAuthenticationMethod]
342
- r"""Enter credentials directly, or select a stored secret"""
343
- credentials_secret: NotRequired[str]
344
- r"""Select or create a secret that references your credentials"""
345
282
  mechanism: NotRequired[InputKafkaSASLMechanism]
346
- keytab_location: NotRequired[str]
347
- r"""Location of keytab file for authentication principal"""
348
- principal: NotRequired[str]
349
- r"""Authentication principal, such as `kafka_user@example.com`"""
350
- broker_service_class: NotRequired[str]
351
- r"""Kerberos service class for Kafka brokers, such as `kafka`"""
352
283
  oauth_enabled: NotRequired[bool]
353
284
  r"""Enable OAuth authentication"""
354
- token_url: NotRequired[str]
355
- r"""URL of the token endpoint to use for OAuth authentication"""
356
- client_id: NotRequired[str]
357
- r"""Client ID to use for OAuth authentication"""
358
- oauth_secret_type: NotRequired[str]
359
- client_text_secret: NotRequired[str]
360
- r"""Select or create a stored text secret"""
361
- oauth_params: NotRequired[List[InputKafkaOauthParamTypedDict]]
362
- r"""Additional fields to send to the token endpoint, such as scope or audience"""
363
- sasl_extensions: NotRequired[List[InputKafkaSaslExtensionTypedDict]]
364
- r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
365
285
 
366
286
 
367
287
  class InputKafkaAuthentication(BaseModel):
@@ -369,89 +289,15 @@ class InputKafkaAuthentication(BaseModel):
369
289
 
370
290
  disabled: Optional[bool] = True
371
291
 
372
- username: Optional[str] = None
373
-
374
- password: Optional[str] = None
375
-
376
- auth_type: Annotated[
377
- Annotated[
378
- Optional[InputKafkaAuthenticationMethod],
379
- PlainValidator(validate_open_enum(False)),
380
- ],
381
- pydantic.Field(alias="authType"),
382
- ] = InputKafkaAuthenticationMethod.MANUAL
383
- r"""Enter credentials directly, or select a stored secret"""
384
-
385
- credentials_secret: Annotated[
386
- Optional[str], pydantic.Field(alias="credentialsSecret")
387
- ] = None
388
- r"""Select or create a secret that references your credentials"""
389
-
390
292
  mechanism: Annotated[
391
293
  Optional[InputKafkaSASLMechanism], PlainValidator(validate_open_enum(False))
392
294
  ] = InputKafkaSASLMechanism.PLAIN
393
295
 
394
- keytab_location: Annotated[
395
- Optional[str], pydantic.Field(alias="keytabLocation")
396
- ] = None
397
- r"""Location of keytab file for authentication principal"""
398
-
399
- principal: Optional[str] = None
400
- r"""Authentication principal, such as `kafka_user@example.com`"""
401
-
402
- broker_service_class: Annotated[
403
- Optional[str], pydantic.Field(alias="brokerServiceClass")
404
- ] = None
405
- r"""Kerberos service class for Kafka brokers, such as `kafka`"""
406
-
407
296
  oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
408
297
  False
409
298
  )
410
299
  r"""Enable OAuth authentication"""
411
300
 
412
- token_url: Annotated[Optional[str], pydantic.Field(alias="tokenUrl")] = None
413
- r"""URL of the token endpoint to use for OAuth authentication"""
414
-
415
- client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
416
- r"""Client ID to use for OAuth authentication"""
417
-
418
- oauth_secret_type: Annotated[
419
- Optional[str], pydantic.Field(alias="oauthSecretType")
420
- ] = "secret"
421
-
422
- client_text_secret: Annotated[
423
- Optional[str], pydantic.Field(alias="clientTextSecret")
424
- ] = None
425
- r"""Select or create a stored text secret"""
426
-
427
- oauth_params: Annotated[
428
- Optional[List[InputKafkaOauthParam]], pydantic.Field(alias="oauthParams")
429
- ] = None
430
- r"""Additional fields to send to the token endpoint, such as scope or audience"""
431
-
432
- sasl_extensions: Annotated[
433
- Optional[List[InputKafkaSaslExtension]], pydantic.Field(alias="saslExtensions")
434
- ] = None
435
- r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
436
-
437
- @field_serializer("auth_type")
438
- def serialize_auth_type(self, value):
439
- if isinstance(value, str):
440
- try:
441
- return models.InputKafkaAuthenticationMethod(value)
442
- except ValueError:
443
- return value
444
- return value
445
-
446
- @field_serializer("mechanism")
447
- def serialize_mechanism(self, value):
448
- if isinstance(value, str):
449
- try:
450
- return models.InputKafkaSASLMechanism(value)
451
- except ValueError:
452
- return value
453
- return value
454
-
455
301
 
456
302
  class InputKafkaMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
457
303
  TL_SV1 = "TLSv1"
@@ -535,24 +381,6 @@ class InputKafkaTLSSettingsClientSide(BaseModel):
535
381
  pydantic.Field(alias="maxVersion"),
536
382
  ] = None
537
383
 
538
- @field_serializer("min_version")
539
- def serialize_min_version(self, value):
540
- if isinstance(value, str):
541
- try:
542
- return models.InputKafkaMinimumTLSVersion(value)
543
- except ValueError:
544
- return value
545
- return value
546
-
547
- @field_serializer("max_version")
548
- def serialize_max_version(self, value):
549
- if isinstance(value, str):
550
- try:
551
- return models.InputKafkaMaximumTLSVersion(value)
552
- except ValueError:
553
- return value
554
- return value
555
-
556
384
 
557
385
  class InputKafkaMetadatumTypedDict(TypedDict):
558
386
  name: str
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputKinesisConnection(BaseModel):
30
29
  class InputKinesisMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputKinesisCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,64 +102,35 @@ class InputKinesisPq(BaseModel):
107
102
  Optional[InputKinesisPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputKinesisMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputKinesisCompression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class ShardIteratorStart(str, Enum, metaclass=utils.OpenEnumMeta):
130
107
  r"""Location at which to start reading a shard for the first time"""
131
108
 
132
- # Earliest record
133
109
  TRIM_HORIZON = "TRIM_HORIZON"
134
- # Latest record
135
110
  LATEST = "LATEST"
136
111
 
137
112
 
138
113
  class InputKinesisRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
139
114
  r"""Format of data inside the Kinesis Stream records. Gzip compression is automatically detected."""
140
115
 
141
- # Cribl
142
116
  CRIBL = "cribl"
143
- # Newline JSON
144
117
  NDJSON = "ndjson"
145
- # Cloudwatch Logs
146
118
  CLOUDWATCH = "cloudwatch"
147
- # Event per line
148
119
  LINE = "line"
149
120
 
150
121
 
151
122
  class ShardLoadBalancing(str, Enum, metaclass=utils.OpenEnumMeta):
152
123
  r"""The load-balancing algorithm to use for spreading out shards across Workers and Worker Processes"""
153
124
 
154
- # Consistent Hashing
155
125
  CONSISTENT_HASHING = "ConsistentHashing"
156
- # Round Robin
157
126
  ROUND_ROBIN = "RoundRobin"
158
127
 
159
128
 
160
129
  class InputKinesisAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
161
130
  r"""AWS authentication method. Choose Auto to use IAM roles."""
162
131
 
163
- # Auto
164
132
  AUTO = "auto"
165
- # Manual
166
133
  MANUAL = "manual"
167
- # Secret Key pair
168
134
  SECRET = "secret"
169
135
 
170
136
 
@@ -408,48 +374,3 @@ class InputKinesis(BaseModel):
408
374
 
409
375
  aws_secret: Annotated[Optional[str], pydantic.Field(alias="awsSecret")] = None
410
376
  r"""Select or create a stored secret that references your access key and secret key"""
411
-
412
- @field_serializer("shard_iterator_type")
413
- def serialize_shard_iterator_type(self, value):
414
- if isinstance(value, str):
415
- try:
416
- return models.ShardIteratorStart(value)
417
- except ValueError:
418
- return value
419
- return value
420
-
421
- @field_serializer("payload_format")
422
- def serialize_payload_format(self, value):
423
- if isinstance(value, str):
424
- try:
425
- return models.InputKinesisRecordDataFormat(value)
426
- except ValueError:
427
- return value
428
- return value
429
-
430
- @field_serializer("load_balancing_algorithm")
431
- def serialize_load_balancing_algorithm(self, value):
432
- if isinstance(value, str):
433
- try:
434
- return models.ShardLoadBalancing(value)
435
- except ValueError:
436
- return value
437
- return value
438
-
439
- @field_serializer("aws_authentication_method")
440
- def serialize_aws_authentication_method(self, value):
441
- if isinstance(value, str):
442
- try:
443
- return models.InputKinesisAuthenticationMethod(value)
444
- except ValueError:
445
- return value
446
- return value
447
-
448
- @field_serializer("signature_version")
449
- def serialize_signature_version(self, value):
450
- if isinstance(value, str):
451
- try:
452
- return models.InputKinesisSignatureVersion(value)
453
- except ValueError:
454
- return value
455
- return value
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputKubeEventsConnection(BaseModel):
30
29
  class InputKubeEventsMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputKubeEventsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,24 +102,6 @@ class InputKubeEventsPq(BaseModel):
107
102
  Optional[InputKubeEventsPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputKubeEventsMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputKubeEventsCompression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class InputKubeEventsRuleTypedDict(TypedDict):
130
107
  filter_: str
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputKubeLogsConnection(BaseModel):
30
29
  class InputKubeLogsMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputKubeLogsPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,24 +102,6 @@ class InputKubeLogsPq(BaseModel):
107
102
  Optional[InputKubeLogsPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputKubeLogsMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputKubeLogsPqCompression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class InputKubeLogsRuleTypedDict(TypedDict):
130
107
  filter_: str
@@ -193,15 +170,6 @@ class InputKubeLogsDiskSpooling(BaseModel):
193
170
  ] = InputKubeLogsPersistenceCompression.GZIP
194
171
  r"""Data compression format. Default is gzip."""
195
172
 
196
- @field_serializer("compress")
197
- def serialize_compress(self, value):
198
- if isinstance(value, str):
199
- try:
200
- return models.InputKubeLogsPersistenceCompression(value)
201
- except ValueError:
202
- return value
203
- return value
204
-
205
173
 
206
174
  class InputKubeLogsTypedDict(TypedDict):
207
175
  type: InputKubeLogsType
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputKubeMetricsConnection(BaseModel):
30
29
  class InputKubeMetricsMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputKubeMetricsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,24 +102,6 @@ class InputKubeMetricsPq(BaseModel):
107
102
  Optional[InputKubeMetricsPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputKubeMetricsMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputKubeMetricsCompression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class InputKubeMetricsRuleTypedDict(TypedDict):
130
107
  filter_: str
@@ -196,15 +173,6 @@ class InputKubeMetricsPersistence(BaseModel):
196
173
  )
197
174
  r"""Path to use to write metrics. Defaults to $CRIBL_HOME/state/<id>"""
198
175
 
199
- @field_serializer("compress")
200
- def serialize_compress(self, value):
201
- if isinstance(value, str):
202
- try:
203
- return models.InputKubeMetricsDataCompressionFormat(value)
204
- except ValueError:
205
- return value
206
- return value
207
-
208
176
 
209
177
  class InputKubeMetricsTypedDict(TypedDict):
210
178
  type: InputKubeMetricsType