cribl-control-plane 0.0.46__py3-none-any.whl → 0.0.48a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (167) hide show
  1. cribl_control_plane/_version.py +4 -6
  2. cribl_control_plane/errors/apierror.py +2 -0
  3. cribl_control_plane/errors/criblcontrolplaneerror.py +11 -7
  4. cribl_control_plane/errors/error.py +4 -2
  5. cribl_control_plane/errors/healthstatus_error.py +12 -4
  6. cribl_control_plane/errors/no_response_error.py +5 -1
  7. cribl_control_plane/errors/responsevalidationerror.py +2 -0
  8. cribl_control_plane/models/__init__.py +12 -12
  9. cribl_control_plane/models/cacheconnection.py +10 -2
  10. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  11. cribl_control_plane/models/cloudprovider.py +2 -1
  12. cribl_control_plane/models/configgroup.py +7 -2
  13. cribl_control_plane/models/configgroupcloud.py +6 -2
  14. cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
  15. cribl_control_plane/models/cribllakedataset.py +8 -2
  16. cribl_control_plane/models/datasetmetadata.py +8 -2
  17. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
  18. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
  19. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
  20. cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
  21. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
  22. cribl_control_plane/models/getsummaryop.py +7 -2
  23. cribl_control_plane/models/hbcriblinfo.py +19 -3
  24. cribl_control_plane/models/healthstatus.py +7 -4
  25. cribl_control_plane/models/heartbeatmetadata.py +3 -0
  26. cribl_control_plane/models/inputappscope.py +34 -14
  27. cribl_control_plane/models/inputazureblob.py +17 -6
  28. cribl_control_plane/models/inputcollection.py +11 -4
  29. cribl_control_plane/models/inputconfluentcloud.py +47 -20
  30. cribl_control_plane/models/inputcribl.py +11 -4
  31. cribl_control_plane/models/inputcriblhttp.py +23 -8
  32. cribl_control_plane/models/inputcribllakehttp.py +22 -10
  33. cribl_control_plane/models/inputcriblmetrics.py +12 -4
  34. cribl_control_plane/models/inputcribltcp.py +23 -8
  35. cribl_control_plane/models/inputcrowdstrike.py +26 -10
  36. cribl_control_plane/models/inputdatadogagent.py +24 -8
  37. cribl_control_plane/models/inputdatagen.py +11 -4
  38. cribl_control_plane/models/inputedgeprometheus.py +58 -24
  39. cribl_control_plane/models/inputelastic.py +40 -14
  40. cribl_control_plane/models/inputeventhub.py +15 -6
  41. cribl_control_plane/models/inputexec.py +14 -6
  42. cribl_control_plane/models/inputfile.py +15 -6
  43. cribl_control_plane/models/inputfirehose.py +23 -8
  44. cribl_control_plane/models/inputgooglepubsub.py +19 -6
  45. cribl_control_plane/models/inputgrafana.py +67 -24
  46. cribl_control_plane/models/inputhttp.py +23 -8
  47. cribl_control_plane/models/inputhttpraw.py +23 -8
  48. cribl_control_plane/models/inputjournalfiles.py +12 -4
  49. cribl_control_plane/models/inputkafka.py +46 -16
  50. cribl_control_plane/models/inputkinesis.py +38 -14
  51. cribl_control_plane/models/inputkubeevents.py +11 -4
  52. cribl_control_plane/models/inputkubelogs.py +16 -8
  53. cribl_control_plane/models/inputkubemetrics.py +16 -8
  54. cribl_control_plane/models/inputloki.py +29 -10
  55. cribl_control_plane/models/inputmetrics.py +23 -8
  56. cribl_control_plane/models/inputmodeldriventelemetry.py +32 -10
  57. cribl_control_plane/models/inputmsk.py +53 -18
  58. cribl_control_plane/models/inputnetflow.py +11 -4
  59. cribl_control_plane/models/inputoffice365mgmt.py +33 -14
  60. cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
  61. cribl_control_plane/models/inputoffice365service.py +35 -16
  62. cribl_control_plane/models/inputopentelemetry.py +38 -16
  63. cribl_control_plane/models/inputprometheus.py +50 -18
  64. cribl_control_plane/models/inputprometheusrw.py +30 -10
  65. cribl_control_plane/models/inputrawudp.py +11 -4
  66. cribl_control_plane/models/inputs3.py +21 -8
  67. cribl_control_plane/models/inputs3inventory.py +26 -10
  68. cribl_control_plane/models/inputsecuritylake.py +27 -10
  69. cribl_control_plane/models/inputsnmp.py +16 -6
  70. cribl_control_plane/models/inputsplunk.py +33 -12
  71. cribl_control_plane/models/inputsplunkhec.py +29 -10
  72. cribl_control_plane/models/inputsplunksearch.py +33 -14
  73. cribl_control_plane/models/inputsqs.py +27 -10
  74. cribl_control_plane/models/inputsyslog.py +43 -16
  75. cribl_control_plane/models/inputsystemmetrics.py +48 -24
  76. cribl_control_plane/models/inputsystemstate.py +16 -8
  77. cribl_control_plane/models/inputtcp.py +29 -10
  78. cribl_control_plane/models/inputtcpjson.py +29 -10
  79. cribl_control_plane/models/inputwef.py +37 -14
  80. cribl_control_plane/models/inputwindowsmetrics.py +44 -24
  81. cribl_control_plane/models/inputwineventlogs.py +20 -10
  82. cribl_control_plane/models/inputwiz.py +21 -8
  83. cribl_control_plane/models/inputwizwebhook.py +23 -8
  84. cribl_control_plane/models/inputzscalerhec.py +29 -10
  85. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  86. cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
  87. cribl_control_plane/models/masterworkerentry.py +7 -2
  88. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  89. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  90. cribl_control_plane/models/nodeprovidedinfo.py +3 -0
  91. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  92. cribl_control_plane/models/nodeupgradestate.py +2 -1
  93. cribl_control_plane/models/nodeupgradestatus.py +13 -5
  94. cribl_control_plane/models/outputazureblob.py +48 -18
  95. cribl_control_plane/models/outputazuredataexplorer.py +73 -28
  96. cribl_control_plane/models/outputazureeventhub.py +40 -18
  97. cribl_control_plane/models/outputazurelogs.py +35 -12
  98. cribl_control_plane/models/outputclickhouse.py +55 -20
  99. cribl_control_plane/models/outputcloudwatch.py +29 -10
  100. cribl_control_plane/models/outputconfluentcloud.py +77 -32
  101. cribl_control_plane/models/outputcriblhttp.py +44 -16
  102. cribl_control_plane/models/outputcribllake.py +46 -16
  103. cribl_control_plane/models/outputcribltcp.py +45 -18
  104. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +49 -14
  105. cribl_control_plane/models/outputdatadog.py +48 -20
  106. cribl_control_plane/models/outputdataset.py +46 -18
  107. cribl_control_plane/models/outputdiskspool.py +7 -2
  108. cribl_control_plane/models/outputdls3.py +68 -24
  109. cribl_control_plane/models/outputdynatracehttp.py +53 -20
  110. cribl_control_plane/models/outputdynatraceotlp.py +55 -22
  111. cribl_control_plane/models/outputelastic.py +43 -18
  112. cribl_control_plane/models/outputelasticcloud.py +36 -12
  113. cribl_control_plane/models/outputexabeam.py +29 -10
  114. cribl_control_plane/models/outputfilesystem.py +39 -14
  115. cribl_control_plane/models/outputgooglechronicle.py +50 -16
  116. cribl_control_plane/models/outputgooglecloudlogging.py +41 -14
  117. cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
  118. cribl_control_plane/models/outputgooglepubsub.py +31 -10
  119. cribl_control_plane/models/outputgrafanacloud.py +97 -32
  120. cribl_control_plane/models/outputgraphite.py +31 -14
  121. cribl_control_plane/models/outputhoneycomb.py +35 -12
  122. cribl_control_plane/models/outputhumiohec.py +43 -16
  123. cribl_control_plane/models/outputinfluxdb.py +42 -16
  124. cribl_control_plane/models/outputkafka.py +74 -28
  125. cribl_control_plane/models/outputkinesis.py +40 -16
  126. cribl_control_plane/models/outputloki.py +41 -16
  127. cribl_control_plane/models/outputminio.py +65 -24
  128. cribl_control_plane/models/outputmsk.py +82 -30
  129. cribl_control_plane/models/outputnewrelic.py +43 -18
  130. cribl_control_plane/models/outputnewrelicevents.py +41 -14
  131. cribl_control_plane/models/outputopentelemetry.py +67 -26
  132. cribl_control_plane/models/outputprometheus.py +35 -12
  133. cribl_control_plane/models/outputring.py +19 -8
  134. cribl_control_plane/models/outputs3.py +68 -26
  135. cribl_control_plane/models/outputsecuritylake.py +52 -18
  136. cribl_control_plane/models/outputsentinel.py +45 -18
  137. cribl_control_plane/models/outputsentineloneaisiem.py +50 -18
  138. cribl_control_plane/models/outputservicenow.py +60 -24
  139. cribl_control_plane/models/outputsignalfx.py +37 -14
  140. cribl_control_plane/models/outputsns.py +36 -14
  141. cribl_control_plane/models/outputsplunk.py +60 -24
  142. cribl_control_plane/models/outputsplunkhec.py +35 -12
  143. cribl_control_plane/models/outputsplunklb.py +77 -30
  144. cribl_control_plane/models/outputsqs.py +41 -16
  145. cribl_control_plane/models/outputstatsd.py +30 -14
  146. cribl_control_plane/models/outputstatsdext.py +29 -12
  147. cribl_control_plane/models/outputsumologic.py +35 -12
  148. cribl_control_plane/models/outputsyslog.py +58 -24
  149. cribl_control_plane/models/outputtcpjson.py +52 -20
  150. cribl_control_plane/models/outputwavefront.py +35 -12
  151. cribl_control_plane/models/outputwebhook.py +58 -22
  152. cribl_control_plane/models/outputxsiam.py +35 -14
  153. cribl_control_plane/models/productscore.py +2 -1
  154. cribl_control_plane/models/rbacresource.py +2 -1
  155. cribl_control_plane/models/resourcepolicy.py +4 -2
  156. cribl_control_plane/models/routeconf.py +3 -4
  157. cribl_control_plane/models/runnablejobcollection.py +30 -13
  158. cribl_control_plane/models/runnablejobexecutor.py +13 -4
  159. cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
  160. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
  161. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
  162. cribl_control_plane/models/workertypes.py +2 -1
  163. {cribl_control_plane-0.0.46.dist-info → cribl_control_plane-0.0.48a1.dist-info}/METADATA +1 -1
  164. {cribl_control_plane-0.0.46.dist-info → cribl_control_plane-0.0.48a1.dist-info}/RECORD +165 -167
  165. {cribl_control_plane-0.0.46.dist-info → cribl_control_plane-0.0.48a1.dist-info}/WHEEL +1 -1
  166. cribl_control_plane/models/appmode.py +0 -13
  167. cribl_control_plane/models/routecloneconf.py +0 -13
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,14 +15,14 @@ class OutputConfluentCloudType(str, Enum):
12
15
  CONFLUENT_CLOUD = "confluent_cloud"
13
16
 
14
17
 
15
- class OutputConfluentCloudMinimumTLSVersion(str, Enum):
18
+ class OutputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
16
19
  TL_SV1 = "TLSv1"
17
20
  TL_SV1_1 = "TLSv1.1"
18
21
  TL_SV1_2 = "TLSv1.2"
19
22
  TL_SV1_3 = "TLSv1.3"
20
23
 
21
24
 
22
- class OutputConfluentCloudMaximumTLSVersion(str, Enum):
25
+ class OutputConfluentCloudMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
23
26
  TL_SV1 = "TLSv1"
24
27
  TL_SV1_1 = "TLSv1.1"
25
28
  TL_SV1_2 = "TLSv1.2"
@@ -79,17 +82,23 @@ class OutputConfluentCloudTLSSettingsClientSide(BaseModel):
79
82
  r"""Passphrase to use to decrypt private key"""
80
83
 
81
84
  min_version: Annotated[
82
- Optional[OutputConfluentCloudMinimumTLSVersion],
85
+ Annotated[
86
+ Optional[OutputConfluentCloudMinimumTLSVersion],
87
+ PlainValidator(validate_open_enum(False)),
88
+ ],
83
89
  pydantic.Field(alias="minVersion"),
84
90
  ] = None
85
91
 
86
92
  max_version: Annotated[
87
- Optional[OutputConfluentCloudMaximumTLSVersion],
93
+ Annotated[
94
+ Optional[OutputConfluentCloudMaximumTLSVersion],
95
+ PlainValidator(validate_open_enum(False)),
96
+ ],
88
97
  pydantic.Field(alias="maxVersion"),
89
98
  ] = None
90
99
 
91
100
 
92
- class OutputConfluentCloudAcknowledgments(int, Enum):
101
+ class OutputConfluentCloudAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
93
102
  r"""Control the number of required acknowledgments."""
94
103
 
95
104
  ONE = 1
@@ -97,7 +106,7 @@ class OutputConfluentCloudAcknowledgments(int, Enum):
97
106
  MINUS_1 = -1
98
107
 
99
108
 
100
- class OutputConfluentCloudRecordDataFormat(str, Enum):
109
+ class OutputConfluentCloudRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
101
110
  r"""Format to use to serialize events before writing to Kafka."""
102
111
 
103
112
  JSON = "json"
@@ -105,7 +114,7 @@ class OutputConfluentCloudRecordDataFormat(str, Enum):
105
114
  PROTOBUF = "protobuf"
106
115
 
107
116
 
108
- class OutputConfluentCloudCompression(str, Enum):
117
+ class OutputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
109
118
  r"""Codec to use to compress the data before sending to Kafka"""
110
119
 
111
120
  NONE = "none"
@@ -114,7 +123,7 @@ class OutputConfluentCloudCompression(str, Enum):
114
123
  LZ4 = "lz4"
115
124
 
116
125
 
117
- class OutputConfluentCloudSchemaType(str, Enum):
126
+ class OutputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
118
127
  r"""The schema format used to encode and decode event data"""
119
128
 
120
129
  AVRO = "avro"
@@ -140,14 +149,18 @@ class OutputConfluentCloudAuth(BaseModel):
140
149
  r"""Select or create a secret that references your credentials"""
141
150
 
142
151
 
143
- class OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
152
+ class OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
153
+ str, Enum, metaclass=utils.OpenEnumMeta
154
+ ):
144
155
  TL_SV1 = "TLSv1"
145
156
  TL_SV1_1 = "TLSv1.1"
146
157
  TL_SV1_2 = "TLSv1.2"
147
158
  TL_SV1_3 = "TLSv1.3"
148
159
 
149
160
 
150
- class OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
161
+ class OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
162
+ str, Enum, metaclass=utils.OpenEnumMeta
163
+ ):
151
164
  TL_SV1 = "TLSv1"
152
165
  TL_SV1_1 = "TLSv1.1"
153
166
  TL_SV1_2 = "TLSv1.2"
@@ -207,12 +220,18 @@ class OutputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
207
220
  r"""Passphrase to use to decrypt private key"""
208
221
 
209
222
  min_version: Annotated[
210
- Optional[OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
223
+ Annotated[
224
+ Optional[OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
225
+ PlainValidator(validate_open_enum(False)),
226
+ ],
211
227
  pydantic.Field(alias="minVersion"),
212
228
  ] = None
213
229
 
214
230
  max_version: Annotated[
215
- Optional[OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
231
+ Annotated[
232
+ Optional[OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
233
+ PlainValidator(validate_open_enum(False)),
234
+ ],
216
235
  pydantic.Field(alias="maxVersion"),
217
236
  ] = None
218
237
 
@@ -249,7 +268,11 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
249
268
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
250
269
 
251
270
  schema_type: Annotated[
252
- Optional[OutputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
271
+ Annotated[
272
+ Optional[OutputConfluentCloudSchemaType],
273
+ PlainValidator(validate_open_enum(False)),
274
+ ],
275
+ pydantic.Field(alias="schemaType"),
253
276
  ] = OutputConfluentCloudSchemaType.AVRO
254
277
  r"""The schema format used to encode and decode event data"""
255
278
 
@@ -282,7 +305,7 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
282
305
  r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
283
306
 
284
307
 
285
- class OutputConfluentCloudSASLMechanism(str, Enum):
308
+ class OutputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
286
309
  PLAIN = "plain"
287
310
  SCRAM_SHA_256 = "scram-sha-256"
288
311
  SCRAM_SHA_512 = "scram-sha-512"
@@ -303,9 +326,10 @@ class OutputConfluentCloudAuthentication(BaseModel):
303
326
 
304
327
  disabled: Optional[bool] = True
305
328
 
306
- mechanism: Optional[OutputConfluentCloudSASLMechanism] = (
307
- OutputConfluentCloudSASLMechanism.PLAIN
308
- )
329
+ mechanism: Annotated[
330
+ Optional[OutputConfluentCloudSASLMechanism],
331
+ PlainValidator(validate_open_enum(False)),
332
+ ] = OutputConfluentCloudSASLMechanism.PLAIN
309
333
 
310
334
  oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
311
335
  False
@@ -313,7 +337,7 @@ class OutputConfluentCloudAuthentication(BaseModel):
313
337
  r"""Enable OAuth authentication"""
314
338
 
315
339
 
316
- class OutputConfluentCloudBackpressureBehavior(str, Enum):
340
+ class OutputConfluentCloudBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
317
341
  r"""How to handle events when all receivers are exerting backpressure"""
318
342
 
319
343
  BLOCK = "block"
@@ -321,21 +345,23 @@ class OutputConfluentCloudBackpressureBehavior(str, Enum):
321
345
  QUEUE = "queue"
322
346
 
323
347
 
324
- class OutputConfluentCloudPqCompressCompression(str, Enum):
348
+ class OutputConfluentCloudPqCompressCompression(
349
+ str, Enum, metaclass=utils.OpenEnumMeta
350
+ ):
325
351
  r"""Codec to use to compress the persisted data"""
326
352
 
327
353
  NONE = "none"
328
354
  GZIP = "gzip"
329
355
 
330
356
 
331
- class OutputConfluentCloudQueueFullBehavior(str, Enum):
357
+ class OutputConfluentCloudQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
332
358
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
333
359
 
334
360
  BLOCK = "block"
335
361
  DROP = "drop"
336
362
 
337
363
 
338
- class OutputConfluentCloudMode(str, Enum):
364
+ class OutputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
339
365
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
340
366
 
341
367
  ERROR = "error"
@@ -449,19 +475,25 @@ class OutputConfluentCloud(BaseModel):
449
475
 
450
476
  tls: Optional[OutputConfluentCloudTLSSettingsClientSide] = None
451
477
 
452
- ack: Optional[OutputConfluentCloudAcknowledgments] = (
453
- OutputConfluentCloudAcknowledgments.ONE
454
- )
478
+ ack: Annotated[
479
+ Optional[OutputConfluentCloudAcknowledgments],
480
+ PlainValidator(validate_open_enum(True)),
481
+ ] = OutputConfluentCloudAcknowledgments.ONE
455
482
  r"""Control the number of required acknowledgments."""
456
483
 
457
484
  format_: Annotated[
458
- Optional[OutputConfluentCloudRecordDataFormat], pydantic.Field(alias="format")
485
+ Annotated[
486
+ Optional[OutputConfluentCloudRecordDataFormat],
487
+ PlainValidator(validate_open_enum(False)),
488
+ ],
489
+ pydantic.Field(alias="format"),
459
490
  ] = OutputConfluentCloudRecordDataFormat.JSON
460
491
  r"""Format to use to serialize events before writing to Kafka."""
461
492
 
462
- compression: Optional[OutputConfluentCloudCompression] = (
463
- OutputConfluentCloudCompression.GZIP
464
- )
493
+ compression: Annotated[
494
+ Optional[OutputConfluentCloudCompression],
495
+ PlainValidator(validate_open_enum(False)),
496
+ ] = OutputConfluentCloudCompression.GZIP
465
497
  r"""Codec to use to compress the data before sending to Kafka"""
466
498
 
467
499
  max_record_size_kb: Annotated[
@@ -522,7 +554,10 @@ class OutputConfluentCloud(BaseModel):
522
554
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
523
555
 
524
556
  on_backpressure: Annotated[
525
- Optional[OutputConfluentCloudBackpressureBehavior],
557
+ Annotated[
558
+ Optional[OutputConfluentCloudBackpressureBehavior],
559
+ PlainValidator(validate_open_enum(False)),
560
+ ],
526
561
  pydantic.Field(alias="onBackpressure"),
527
562
  ] = OutputConfluentCloudBackpressureBehavior.BLOCK
528
563
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -548,19 +583,29 @@ class OutputConfluentCloud(BaseModel):
548
583
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
549
584
 
550
585
  pq_compress: Annotated[
551
- Optional[OutputConfluentCloudPqCompressCompression],
586
+ Annotated[
587
+ Optional[OutputConfluentCloudPqCompressCompression],
588
+ PlainValidator(validate_open_enum(False)),
589
+ ],
552
590
  pydantic.Field(alias="pqCompress"),
553
591
  ] = OutputConfluentCloudPqCompressCompression.NONE
554
592
  r"""Codec to use to compress the persisted data"""
555
593
 
556
594
  pq_on_backpressure: Annotated[
557
- Optional[OutputConfluentCloudQueueFullBehavior],
595
+ Annotated[
596
+ Optional[OutputConfluentCloudQueueFullBehavior],
597
+ PlainValidator(validate_open_enum(False)),
598
+ ],
558
599
  pydantic.Field(alias="pqOnBackpressure"),
559
600
  ] = OutputConfluentCloudQueueFullBehavior.BLOCK
560
601
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
561
602
 
562
603
  pq_mode: Annotated[
563
- Optional[OutputConfluentCloudMode], pydantic.Field(alias="pqMode")
604
+ Annotated[
605
+ Optional[OutputConfluentCloudMode],
606
+ PlainValidator(validate_open_enum(False)),
607
+ ],
608
+ pydantic.Field(alias="pqMode"),
564
609
  ] = OutputConfluentCloudMode.ERROR
565
610
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
566
611
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,14 +15,14 @@ class OutputCriblHTTPType(str, Enum):
12
15
  CRIBL_HTTP = "cribl_http"
13
16
 
14
17
 
15
- class OutputCriblHTTPMinimumTLSVersion(str, Enum):
18
+ class OutputCriblHTTPMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
16
19
  TL_SV1 = "TLSv1"
17
20
  TL_SV1_1 = "TLSv1.1"
18
21
  TL_SV1_2 = "TLSv1.2"
19
22
  TL_SV1_3 = "TLSv1.3"
20
23
 
21
24
 
22
- class OutputCriblHTTPMaximumTLSVersion(str, Enum):
25
+ class OutputCriblHTTPMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
23
26
  TL_SV1 = "TLSv1"
24
27
  TL_SV1_1 = "TLSv1.1"
25
28
  TL_SV1_2 = "TLSv1.2"
@@ -79,15 +82,23 @@ class OutputCriblHTTPTLSSettingsClientSide(BaseModel):
79
82
  r"""Passphrase to use to decrypt private key"""
80
83
 
81
84
  min_version: Annotated[
82
- Optional[OutputCriblHTTPMinimumTLSVersion], pydantic.Field(alias="minVersion")
85
+ Annotated[
86
+ Optional[OutputCriblHTTPMinimumTLSVersion],
87
+ PlainValidator(validate_open_enum(False)),
88
+ ],
89
+ pydantic.Field(alias="minVersion"),
83
90
  ] = None
84
91
 
85
92
  max_version: Annotated[
86
- Optional[OutputCriblHTTPMaximumTLSVersion], pydantic.Field(alias="maxVersion")
93
+ Annotated[
94
+ Optional[OutputCriblHTTPMaximumTLSVersion],
95
+ PlainValidator(validate_open_enum(False)),
96
+ ],
97
+ pydantic.Field(alias="maxVersion"),
87
98
  ] = None
88
99
 
89
100
 
90
- class OutputCriblHTTPCompression(str, Enum):
101
+ class OutputCriblHTTPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
91
102
  r"""Codec to use to compress the data before sending"""
92
103
 
93
104
  NONE = "none"
@@ -105,7 +116,7 @@ class OutputCriblHTTPExtraHTTPHeader(BaseModel):
105
116
  name: Optional[str] = None
106
117
 
107
118
 
108
- class OutputCriblHTTPFailedRequestLoggingMode(str, Enum):
119
+ class OutputCriblHTTPFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
109
120
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
110
121
 
111
122
  PAYLOAD = "payload"
@@ -167,7 +178,7 @@ class OutputCriblHTTPTimeoutRetrySettings(BaseModel):
167
178
  r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
168
179
 
169
180
 
170
- class OutputCriblHTTPBackpressureBehavior(str, Enum):
181
+ class OutputCriblHTTPBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
171
182
  r"""How to handle events when all receivers are exerting backpressure"""
172
183
 
173
184
  BLOCK = "block"
@@ -190,21 +201,21 @@ class OutputCriblHTTPURL(BaseModel):
190
201
  r"""Assign a weight (>0) to each endpoint to indicate its traffic-handling capability"""
191
202
 
192
203
 
193
- class OutputCriblHTTPPqCompressCompression(str, Enum):
204
+ class OutputCriblHTTPPqCompressCompression(str, Enum, metaclass=utils.OpenEnumMeta):
194
205
  r"""Codec to use to compress the persisted data"""
195
206
 
196
207
  NONE = "none"
197
208
  GZIP = "gzip"
198
209
 
199
210
 
200
- class OutputCriblHTTPQueueFullBehavior(str, Enum):
211
+ class OutputCriblHTTPQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
201
212
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
202
213
 
203
214
  BLOCK = "block"
204
215
  DROP = "drop"
205
216
 
206
217
 
207
- class OutputCriblHTTPMode(str, Enum):
218
+ class OutputCriblHTTPMode(str, Enum, metaclass=utils.OpenEnumMeta):
208
219
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
209
220
 
210
221
  ERROR = "error"
@@ -335,7 +346,9 @@ class OutputCriblHTTP(BaseModel):
335
346
  ] = None
336
347
  r"""Fields to exclude from the event. By default, all internal fields except `__output` are sent. Example: `cribl_pipe`, `c*`. Wildcards supported."""
337
348
 
338
- compression: Optional[OutputCriblHTTPCompression] = OutputCriblHTTPCompression.GZIP
349
+ compression: Annotated[
350
+ Optional[OutputCriblHTTPCompression], PlainValidator(validate_open_enum(False))
351
+ ] = OutputCriblHTTPCompression.GZIP
339
352
  r"""Codec to use to compress the data before sending"""
340
353
 
341
354
  concurrency: Optional[float] = 5
@@ -374,7 +387,10 @@ class OutputCriblHTTP(BaseModel):
374
387
  r"""Headers to add to all events"""
375
388
 
376
389
  failed_request_logging_mode: Annotated[
377
- Optional[OutputCriblHTTPFailedRequestLoggingMode],
390
+ Annotated[
391
+ Optional[OutputCriblHTTPFailedRequestLoggingMode],
392
+ PlainValidator(validate_open_enum(False)),
393
+ ],
378
394
  pydantic.Field(alias="failedRequestLoggingMode"),
379
395
  ] = OutputCriblHTTPFailedRequestLoggingMode.NONE
380
396
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
@@ -401,7 +417,10 @@ class OutputCriblHTTP(BaseModel):
401
417
  r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
402
418
 
403
419
  on_backpressure: Annotated[
404
- Optional[OutputCriblHTTPBackpressureBehavior],
420
+ Annotated[
421
+ Optional[OutputCriblHTTPBackpressureBehavior],
422
+ PlainValidator(validate_open_enum(False)),
423
+ ],
405
424
  pydantic.Field(alias="onBackpressure"),
406
425
  ] = OutputCriblHTTPBackpressureBehavior.BLOCK
407
426
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -445,19 +464,28 @@ class OutputCriblHTTP(BaseModel):
445
464
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
446
465
 
447
466
  pq_compress: Annotated[
448
- Optional[OutputCriblHTTPPqCompressCompression],
467
+ Annotated[
468
+ Optional[OutputCriblHTTPPqCompressCompression],
469
+ PlainValidator(validate_open_enum(False)),
470
+ ],
449
471
  pydantic.Field(alias="pqCompress"),
450
472
  ] = OutputCriblHTTPPqCompressCompression.NONE
451
473
  r"""Codec to use to compress the persisted data"""
452
474
 
453
475
  pq_on_backpressure: Annotated[
454
- Optional[OutputCriblHTTPQueueFullBehavior],
476
+ Annotated[
477
+ Optional[OutputCriblHTTPQueueFullBehavior],
478
+ PlainValidator(validate_open_enum(False)),
479
+ ],
455
480
  pydantic.Field(alias="pqOnBackpressure"),
456
481
  ] = OutputCriblHTTPQueueFullBehavior.BLOCK
457
482
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
458
483
 
459
484
  pq_mode: Annotated[
460
- Optional[OutputCriblHTTPMode], pydantic.Field(alias="pqMode")
485
+ Annotated[
486
+ Optional[OutputCriblHTTPMode], PlainValidator(validate_open_enum(False))
487
+ ],
488
+ pydantic.Field(alias="pqMode"),
461
489
  ] = OutputCriblHTTPMode.ERROR
462
490
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
463
491
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,14 +15,14 @@ class OutputCriblLakeType(str, Enum):
12
15
  CRIBL_LAKE = "cribl_lake"
13
16
 
14
17
 
15
- class OutputCriblLakeSignatureVersion(str, Enum):
18
+ class OutputCriblLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
16
19
  r"""Signature version to use for signing S3 requests"""
17
20
 
18
21
  V2 = "v2"
19
22
  V4 = "v4"
20
23
 
21
24
 
22
- class OutputCriblLakeObjectACL(str, Enum):
25
+ class OutputCriblLakeObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
23
26
  r"""Object ACL to assign to uploaded objects"""
24
27
 
25
28
  PRIVATE = "private"
@@ -31,7 +34,7 @@ class OutputCriblLakeObjectACL(str, Enum):
31
34
  BUCKET_OWNER_FULL_CONTROL = "bucket-owner-full-control"
32
35
 
33
36
 
34
- class OutputCriblLakeStorageClass(str, Enum):
37
+ class OutputCriblLakeStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
35
38
  r"""Storage class to select for uploaded objects"""
36
39
 
37
40
  STANDARD = "STANDARD"
@@ -44,32 +47,34 @@ class OutputCriblLakeStorageClass(str, Enum):
44
47
  DEEP_ARCHIVE = "DEEP_ARCHIVE"
45
48
 
46
49
 
47
- class OutputCriblLakeServerSideEncryptionForUploadedObjects(str, Enum):
50
+ class OutputCriblLakeServerSideEncryptionForUploadedObjects(
51
+ str, Enum, metaclass=utils.OpenEnumMeta
52
+ ):
48
53
  AES256 = "AES256"
49
54
  AWS_KMS = "aws:kms"
50
55
 
51
56
 
52
- class OutputCriblLakeBackpressureBehavior(str, Enum):
57
+ class OutputCriblLakeBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
53
58
  r"""How to handle events when all receivers are exerting backpressure"""
54
59
 
55
60
  BLOCK = "block"
56
61
  DROP = "drop"
57
62
 
58
63
 
59
- class OutputCriblLakeDiskSpaceProtection(str, Enum):
64
+ class OutputCriblLakeDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
60
65
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
61
66
 
62
67
  BLOCK = "block"
63
68
  DROP = "drop"
64
69
 
65
70
 
66
- class AwsAuthenticationMethod(str, Enum):
71
+ class AwsAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
67
72
  AUTO = "auto"
68
73
  AUTO_RPC = "auto_rpc"
69
74
  MANUAL = "manual"
70
75
 
71
76
 
72
- class OutputCriblLakeFormat(str, Enum):
77
+ class OutputCriblLakeFormat(str, Enum, metaclass=utils.OpenEnumMeta):
73
78
  JSON = "json"
74
79
  PARQUET = "parquet"
75
80
  DDSS = "ddss"
@@ -200,7 +205,10 @@ class OutputCriblLake(BaseModel):
200
205
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
201
206
 
202
207
  signature_version: Annotated[
203
- Optional[OutputCriblLakeSignatureVersion],
208
+ Annotated[
209
+ Optional[OutputCriblLakeSignatureVersion],
210
+ PlainValidator(validate_open_enum(False)),
211
+ ],
204
212
  pydantic.Field(alias="signatureVersion"),
205
213
  ] = OutputCriblLakeSignatureVersion.V4
206
214
  r"""Signature version to use for signing S3 requests"""
@@ -249,17 +257,28 @@ class OutputCriblLake(BaseModel):
249
257
  r"""Lake dataset to send the data to."""
250
258
 
251
259
  object_acl: Annotated[
252
- Optional[OutputCriblLakeObjectACL], pydantic.Field(alias="objectACL")
260
+ Annotated[
261
+ Optional[OutputCriblLakeObjectACL],
262
+ PlainValidator(validate_open_enum(False)),
263
+ ],
264
+ pydantic.Field(alias="objectACL"),
253
265
  ] = OutputCriblLakeObjectACL.PRIVATE
254
266
  r"""Object ACL to assign to uploaded objects"""
255
267
 
256
268
  storage_class: Annotated[
257
- Optional[OutputCriblLakeStorageClass], pydantic.Field(alias="storageClass")
269
+ Annotated[
270
+ Optional[OutputCriblLakeStorageClass],
271
+ PlainValidator(validate_open_enum(False)),
272
+ ],
273
+ pydantic.Field(alias="storageClass"),
258
274
  ] = None
259
275
  r"""Storage class to select for uploaded objects"""
260
276
 
261
277
  server_side_encryption: Annotated[
262
- Optional[OutputCriblLakeServerSideEncryptionForUploadedObjects],
278
+ Annotated[
279
+ Optional[OutputCriblLakeServerSideEncryptionForUploadedObjects],
280
+ PlainValidator(validate_open_enum(False)),
281
+ ],
263
282
  pydantic.Field(alias="serverSideEncryption"),
264
283
  ] = None
265
284
 
@@ -300,7 +319,10 @@ class OutputCriblLake(BaseModel):
300
319
  r"""Buffer size used to write to a file"""
301
320
 
302
321
  on_backpressure: Annotated[
303
- Optional[OutputCriblLakeBackpressureBehavior],
322
+ Annotated[
323
+ Optional[OutputCriblLakeBackpressureBehavior],
324
+ PlainValidator(validate_open_enum(False)),
325
+ ],
304
326
  pydantic.Field(alias="onBackpressure"),
305
327
  ] = OutputCriblLakeBackpressureBehavior.BLOCK
306
328
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -311,7 +333,10 @@ class OutputCriblLake(BaseModel):
311
333
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
312
334
 
313
335
  on_disk_full_backpressure: Annotated[
314
- Optional[OutputCriblLakeDiskSpaceProtection],
336
+ Annotated[
337
+ Optional[OutputCriblLakeDiskSpaceProtection],
338
+ PlainValidator(validate_open_enum(False)),
339
+ ],
315
340
  pydantic.Field(alias="onDiskFullBackpressure"),
316
341
  ] = OutputCriblLakeDiskSpaceProtection.BLOCK
317
342
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
@@ -337,12 +362,17 @@ class OutputCriblLake(BaseModel):
337
362
  r"""Maximum number of files that can be waiting for upload before backpressure is applied"""
338
363
 
339
364
  aws_authentication_method: Annotated[
340
- Optional[AwsAuthenticationMethod],
365
+ Annotated[
366
+ Optional[AwsAuthenticationMethod], PlainValidator(validate_open_enum(False))
367
+ ],
341
368
  pydantic.Field(alias="awsAuthenticationMethod"),
342
369
  ] = AwsAuthenticationMethod.AUTO
343
370
 
344
371
  format_: Annotated[
345
- Optional[OutputCriblLakeFormat], pydantic.Field(alias="format")
372
+ Annotated[
373
+ Optional[OutputCriblLakeFormat], PlainValidator(validate_open_enum(False))
374
+ ],
375
+ pydantic.Field(alias="format"),
346
376
  ] = None
347
377
 
348
378
  max_concurrent_file_parts: Annotated[