cribl-control-plane 0.0.50rc2__py3-none-any.whl → 0.0.52__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (182) hide show
  1. cribl_control_plane/_hooks/clientcredentials.py +91 -41
  2. cribl_control_plane/_version.py +6 -4
  3. cribl_control_plane/errors/apierror.py +1 -1
  4. cribl_control_plane/errors/criblcontrolplaneerror.py +1 -1
  5. cribl_control_plane/errors/error.py +1 -1
  6. cribl_control_plane/errors/healthstatus_error.py +3 -9
  7. cribl_control_plane/errors/no_response_error.py +1 -1
  8. cribl_control_plane/errors/responsevalidationerror.py +1 -1
  9. cribl_control_plane/groups_sdk.py +4 -4
  10. cribl_control_plane/health.py +2 -6
  11. cribl_control_plane/models/__init__.py +31 -56
  12. cribl_control_plane/models/appmode.py +13 -0
  13. cribl_control_plane/models/cacheconnection.py +2 -10
  14. cribl_control_plane/models/cacheconnectionbackfillstatus.py +1 -2
  15. cribl_control_plane/models/cloudprovider.py +1 -2
  16. cribl_control_plane/models/configgroup.py +4 -24
  17. cribl_control_plane/models/configgroupcloud.py +2 -6
  18. cribl_control_plane/models/createconfiggroupbyproductop.py +2 -8
  19. cribl_control_plane/models/createinputhectokenbyidop.py +5 -6
  20. cribl_control_plane/models/createversionpushop.py +5 -5
  21. cribl_control_plane/models/cribllakedataset.py +2 -8
  22. cribl_control_plane/models/datasetmetadata.py +2 -8
  23. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +2 -7
  24. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +2 -4
  25. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +2 -4
  26. cribl_control_plane/models/getconfiggroupbyproductandidop.py +1 -3
  27. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +2 -7
  28. cribl_control_plane/models/getsummaryop.py +2 -7
  29. cribl_control_plane/models/getversionshowop.py +5 -6
  30. cribl_control_plane/models/gitinfo.py +3 -14
  31. cribl_control_plane/models/hbcriblinfo.py +3 -24
  32. cribl_control_plane/models/healthstatus.py +4 -7
  33. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  34. cribl_control_plane/models/input.py +63 -65
  35. cribl_control_plane/models/inputappscope.py +14 -34
  36. cribl_control_plane/models/inputazureblob.py +6 -17
  37. cribl_control_plane/models/inputcollection.py +4 -11
  38. cribl_control_plane/models/inputconfluentcloud.py +32 -41
  39. cribl_control_plane/models/inputcribl.py +4 -11
  40. cribl_control_plane/models/inputcriblhttp.py +8 -23
  41. cribl_control_plane/models/inputcribllakehttp.py +10 -22
  42. cribl_control_plane/models/inputcriblmetrics.py +4 -12
  43. cribl_control_plane/models/inputcribltcp.py +8 -23
  44. cribl_control_plane/models/inputcrowdstrike.py +10 -26
  45. cribl_control_plane/models/inputdatadogagent.py +8 -24
  46. cribl_control_plane/models/inputdatagen.py +4 -11
  47. cribl_control_plane/models/inputedgeprometheus.py +24 -58
  48. cribl_control_plane/models/inputelastic.py +14 -40
  49. cribl_control_plane/models/inputeventhub.py +6 -15
  50. cribl_control_plane/models/inputexec.py +6 -14
  51. cribl_control_plane/models/inputfile.py +6 -15
  52. cribl_control_plane/models/inputfirehose.py +8 -23
  53. cribl_control_plane/models/inputgooglepubsub.py +6 -19
  54. cribl_control_plane/models/inputgrafana.py +24 -67
  55. cribl_control_plane/models/inputhttp.py +8 -23
  56. cribl_control_plane/models/inputhttpraw.py +8 -23
  57. cribl_control_plane/models/inputjournalfiles.py +4 -12
  58. cribl_control_plane/models/inputkafka.py +28 -41
  59. cribl_control_plane/models/inputkinesis.py +14 -38
  60. cribl_control_plane/models/inputkubeevents.py +4 -11
  61. cribl_control_plane/models/inputkubelogs.py +8 -16
  62. cribl_control_plane/models/inputkubemetrics.py +8 -16
  63. cribl_control_plane/models/inputloki.py +10 -29
  64. cribl_control_plane/models/inputmetrics.py +8 -23
  65. cribl_control_plane/models/inputmodeldriventelemetry.py +10 -32
  66. cribl_control_plane/models/inputmsk.py +30 -48
  67. cribl_control_plane/models/inputnetflow.py +4 -11
  68. cribl_control_plane/models/inputoffice365mgmt.py +14 -33
  69. cribl_control_plane/models/inputoffice365msgtrace.py +16 -35
  70. cribl_control_plane/models/inputoffice365service.py +16 -35
  71. cribl_control_plane/models/inputopentelemetry.py +16 -38
  72. cribl_control_plane/models/inputprometheus.py +18 -50
  73. cribl_control_plane/models/inputprometheusrw.py +10 -30
  74. cribl_control_plane/models/inputrawudp.py +4 -11
  75. cribl_control_plane/models/inputs3.py +8 -21
  76. cribl_control_plane/models/inputs3inventory.py +10 -26
  77. cribl_control_plane/models/inputsecuritylake.py +10 -27
  78. cribl_control_plane/models/inputsnmp.py +6 -16
  79. cribl_control_plane/models/inputsplunk.py +12 -33
  80. cribl_control_plane/models/inputsplunkhec.py +10 -29
  81. cribl_control_plane/models/inputsplunksearch.py +14 -33
  82. cribl_control_plane/models/inputsqs.py +10 -27
  83. cribl_control_plane/models/inputsyslog.py +16 -43
  84. cribl_control_plane/models/inputsystemmetrics.py +24 -48
  85. cribl_control_plane/models/inputsystemstate.py +8 -16
  86. cribl_control_plane/models/inputtcp.py +10 -29
  87. cribl_control_plane/models/inputtcpjson.py +10 -29
  88. cribl_control_plane/models/inputwef.py +14 -37
  89. cribl_control_plane/models/inputwindowsmetrics.py +24 -44
  90. cribl_control_plane/models/inputwineventlogs.py +10 -20
  91. cribl_control_plane/models/inputwiz.py +8 -21
  92. cribl_control_plane/models/inputwizwebhook.py +8 -23
  93. cribl_control_plane/models/inputzscalerhec.py +10 -29
  94. cribl_control_plane/models/lakehouseconnectiontype.py +1 -2
  95. cribl_control_plane/models/listconfiggroupbyproductop.py +1 -3
  96. cribl_control_plane/models/masterworkerentry.py +2 -7
  97. cribl_control_plane/models/nodeactiveupgradestatus.py +1 -2
  98. cribl_control_plane/models/nodefailedupgradestatus.py +1 -2
  99. cribl_control_plane/models/nodeprovidedinfo.py +0 -3
  100. cribl_control_plane/models/nodeskippedupgradestatus.py +1 -2
  101. cribl_control_plane/models/nodeupgradestate.py +1 -2
  102. cribl_control_plane/models/nodeupgradestatus.py +5 -13
  103. cribl_control_plane/models/output.py +79 -84
  104. cribl_control_plane/models/outputazureblob.py +18 -48
  105. cribl_control_plane/models/outputazuredataexplorer.py +28 -73
  106. cribl_control_plane/models/outputazureeventhub.py +18 -40
  107. cribl_control_plane/models/outputazurelogs.py +12 -35
  108. cribl_control_plane/models/outputclickhouse.py +20 -55
  109. cribl_control_plane/models/outputcloudwatch.py +10 -29
  110. cribl_control_plane/models/outputconfluentcloud.py +44 -71
  111. cribl_control_plane/models/outputcriblhttp.py +16 -44
  112. cribl_control_plane/models/outputcribllake.py +16 -46
  113. cribl_control_plane/models/outputcribltcp.py +18 -45
  114. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +14 -49
  115. cribl_control_plane/models/outputdatadog.py +20 -48
  116. cribl_control_plane/models/outputdataset.py +18 -46
  117. cribl_control_plane/models/outputdiskspool.py +2 -7
  118. cribl_control_plane/models/outputdls3.py +24 -68
  119. cribl_control_plane/models/outputdynatracehttp.py +20 -53
  120. cribl_control_plane/models/outputdynatraceotlp.py +22 -55
  121. cribl_control_plane/models/outputelastic.py +18 -43
  122. cribl_control_plane/models/outputelasticcloud.py +12 -36
  123. cribl_control_plane/models/outputexabeam.py +10 -29
  124. cribl_control_plane/models/outputfilesystem.py +14 -39
  125. cribl_control_plane/models/outputgooglechronicle.py +16 -50
  126. cribl_control_plane/models/outputgooglecloudlogging.py +18 -50
  127. cribl_control_plane/models/outputgooglecloudstorage.py +24 -66
  128. cribl_control_plane/models/outputgooglepubsub.py +10 -31
  129. cribl_control_plane/models/outputgrafanacloud.py +32 -97
  130. cribl_control_plane/models/outputgraphite.py +14 -31
  131. cribl_control_plane/models/outputhoneycomb.py +12 -35
  132. cribl_control_plane/models/outputhumiohec.py +16 -43
  133. cribl_control_plane/models/outputinfluxdb.py +16 -42
  134. cribl_control_plane/models/outputkafka.py +40 -69
  135. cribl_control_plane/models/outputkinesis.py +16 -40
  136. cribl_control_plane/models/outputloki.py +16 -41
  137. cribl_control_plane/models/outputminio.py +24 -65
  138. cribl_control_plane/models/outputmsk.py +42 -77
  139. cribl_control_plane/models/outputnewrelic.py +18 -43
  140. cribl_control_plane/models/outputnewrelicevents.py +14 -41
  141. cribl_control_plane/models/outputopentelemetry.py +26 -67
  142. cribl_control_plane/models/outputprometheus.py +12 -35
  143. cribl_control_plane/models/outputring.py +8 -19
  144. cribl_control_plane/models/outputs3.py +26 -68
  145. cribl_control_plane/models/outputsecuritylake.py +18 -52
  146. cribl_control_plane/models/outputsentinel.py +18 -45
  147. cribl_control_plane/models/outputsentineloneaisiem.py +18 -50
  148. cribl_control_plane/models/outputservicenow.py +24 -60
  149. cribl_control_plane/models/outputsignalfx.py +14 -37
  150. cribl_control_plane/models/outputsns.py +14 -36
  151. cribl_control_plane/models/outputsplunk.py +24 -60
  152. cribl_control_plane/models/outputsplunkhec.py +12 -35
  153. cribl_control_plane/models/outputsplunklb.py +30 -77
  154. cribl_control_plane/models/outputsqs.py +16 -41
  155. cribl_control_plane/models/outputstatsd.py +14 -30
  156. cribl_control_plane/models/outputstatsdext.py +12 -29
  157. cribl_control_plane/models/outputsumologic.py +12 -35
  158. cribl_control_plane/models/outputsyslog.py +24 -58
  159. cribl_control_plane/models/outputtcpjson.py +20 -52
  160. cribl_control_plane/models/outputwavefront.py +12 -35
  161. cribl_control_plane/models/outputwebhook.py +22 -58
  162. cribl_control_plane/models/outputxsiam.py +14 -35
  163. cribl_control_plane/models/productscore.py +1 -2
  164. cribl_control_plane/models/rbacresource.py +1 -2
  165. cribl_control_plane/models/resourcepolicy.py +2 -4
  166. cribl_control_plane/models/routecloneconf.py +13 -0
  167. cribl_control_plane/models/routeconf.py +4 -3
  168. cribl_control_plane/models/runnablejobcollection.py +13 -30
  169. cribl_control_plane/models/runnablejobexecutor.py +4 -13
  170. cribl_control_plane/models/runnablejobscheduledsearch.py +2 -7
  171. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +2 -8
  172. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +2 -8
  173. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +5 -6
  174. cribl_control_plane/models/workertypes.py +1 -2
  175. {cribl_control_plane-0.0.50rc2.dist-info → cribl_control_plane-0.0.52.dist-info}/METADATA +14 -12
  176. cribl_control_plane-0.0.52.dist-info/RECORD +325 -0
  177. cribl_control_plane/models/error.py +0 -16
  178. cribl_control_plane/models/gethealthinfoop.py +0 -17
  179. cribl_control_plane/models/gitshowresult.py +0 -19
  180. cribl_control_plane/models/outputdatabricks.py +0 -282
  181. cribl_control_plane-0.0.50rc2.dist-info/RECORD +0 -327
  182. {cribl_control_plane-0.0.50rc2.dist-info → cribl_control_plane-0.0.52.dist-info}/WHEEL +0 -0
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputKafkaConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKafkaMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKafkaCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputKafkaPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputKafkaPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputKafkaMode], PlainValidator(validate_open_enum(False))
72
- ] = InputKafkaMode.ALWAYS
67
+ mode: Optional[InputKafkaMode] = InputKafkaMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputKafkaPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputKafkaCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputKafkaCompression.NONE
91
+ compress: Optional[InputKafkaCompression] = InputKafkaCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,6 +96,13 @@ class InputKafkaPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
99
+ class InputKafkaSchemaType(str, Enum):
100
+ r"""The schema format used to encode and decode event data"""
101
+
102
+ AVRO = "avro"
103
+ JSON = "json"
104
+
105
+
106
106
  class InputKafkaAuthTypedDict(TypedDict):
107
107
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
108
108
 
@@ -122,18 +122,14 @@ class InputKafkaAuth(BaseModel):
122
122
  r"""Select or create a secret that references your credentials"""
123
123
 
124
124
 
125
- class InputKafkaKafkaSchemaRegistryMinimumTLSVersion(
126
- str, Enum, metaclass=utils.OpenEnumMeta
127
- ):
125
+ class InputKafkaKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
128
126
  TL_SV1 = "TLSv1"
129
127
  TL_SV1_1 = "TLSv1.1"
130
128
  TL_SV1_2 = "TLSv1.2"
131
129
  TL_SV1_3 = "TLSv1.3"
132
130
 
133
131
 
134
- class InputKafkaKafkaSchemaRegistryMaximumTLSVersion(
135
- str, Enum, metaclass=utils.OpenEnumMeta
136
- ):
132
+ class InputKafkaKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
137
133
  TL_SV1 = "TLSv1"
138
134
  TL_SV1_1 = "TLSv1.1"
139
135
  TL_SV1_2 = "TLSv1.2"
@@ -193,18 +189,12 @@ class InputKafkaKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
193
189
  r"""Passphrase to use to decrypt private key"""
194
190
 
195
191
  min_version: Annotated[
196
- Annotated[
197
- Optional[InputKafkaKafkaSchemaRegistryMinimumTLSVersion],
198
- PlainValidator(validate_open_enum(False)),
199
- ],
192
+ Optional[InputKafkaKafkaSchemaRegistryMinimumTLSVersion],
200
193
  pydantic.Field(alias="minVersion"),
201
194
  ] = None
202
195
 
203
196
  max_version: Annotated[
204
- Annotated[
205
- Optional[InputKafkaKafkaSchemaRegistryMaximumTLSVersion],
206
- PlainValidator(validate_open_enum(False)),
207
- ],
197
+ Optional[InputKafkaKafkaSchemaRegistryMaximumTLSVersion],
208
198
  pydantic.Field(alias="maxVersion"),
209
199
  ] = None
210
200
 
@@ -213,6 +203,8 @@ class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
213
203
  disabled: NotRequired[bool]
214
204
  schema_registry_url: NotRequired[str]
215
205
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
206
+ schema_type: NotRequired[InputKafkaSchemaType]
207
+ r"""The schema format used to encode and decode event data"""
216
208
  connection_timeout: NotRequired[float]
217
209
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
218
210
  request_timeout: NotRequired[float]
@@ -232,6 +224,11 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
232
224
  ] = "http://localhost:8081"
233
225
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
234
226
 
227
+ schema_type: Annotated[
228
+ Optional[InputKafkaSchemaType], pydantic.Field(alias="schemaType")
229
+ ] = InputKafkaSchemaType.AVRO
230
+ r"""The schema format used to encode and decode event data"""
231
+
235
232
  connection_timeout: Annotated[
236
233
  Optional[float], pydantic.Field(alias="connectionTimeout")
237
234
  ] = 30000
@@ -251,7 +248,7 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
251
248
  tls: Optional[InputKafkaKafkaSchemaRegistryTLSSettingsClientSide] = None
252
249
 
253
250
 
254
- class InputKafkaSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
251
+ class InputKafkaSASLMechanism(str, Enum):
255
252
  PLAIN = "plain"
256
253
  SCRAM_SHA_256 = "scram-sha-256"
257
254
  SCRAM_SHA_512 = "scram-sha-512"
@@ -272,9 +269,7 @@ class InputKafkaAuthentication(BaseModel):
272
269
 
273
270
  disabled: Optional[bool] = True
274
271
 
275
- mechanism: Annotated[
276
- Optional[InputKafkaSASLMechanism], PlainValidator(validate_open_enum(False))
277
- ] = InputKafkaSASLMechanism.PLAIN
272
+ mechanism: Optional[InputKafkaSASLMechanism] = InputKafkaSASLMechanism.PLAIN
278
273
 
279
274
  oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
280
275
  False
@@ -282,14 +277,14 @@ class InputKafkaAuthentication(BaseModel):
282
277
  r"""Enable OAuth authentication"""
283
278
 
284
279
 
285
- class InputKafkaMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
280
+ class InputKafkaMinimumTLSVersion(str, Enum):
286
281
  TL_SV1 = "TLSv1"
287
282
  TL_SV1_1 = "TLSv1.1"
288
283
  TL_SV1_2 = "TLSv1.2"
289
284
  TL_SV1_3 = "TLSv1.3"
290
285
 
291
286
 
292
- class InputKafkaMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
287
+ class InputKafkaMaximumTLSVersion(str, Enum):
293
288
  TL_SV1 = "TLSv1"
294
289
  TL_SV1_1 = "TLSv1.1"
295
290
  TL_SV1_2 = "TLSv1.2"
@@ -349,19 +344,11 @@ class InputKafkaTLSSettingsClientSide(BaseModel):
349
344
  r"""Passphrase to use to decrypt private key"""
350
345
 
351
346
  min_version: Annotated[
352
- Annotated[
353
- Optional[InputKafkaMinimumTLSVersion],
354
- PlainValidator(validate_open_enum(False)),
355
- ],
356
- pydantic.Field(alias="minVersion"),
347
+ Optional[InputKafkaMinimumTLSVersion], pydantic.Field(alias="minVersion")
357
348
  ] = None
358
349
 
359
350
  max_version: Annotated[
360
- Annotated[
361
- Optional[InputKafkaMaximumTLSVersion],
362
- PlainValidator(validate_open_enum(False)),
363
- ],
364
- pydantic.Field(alias="maxVersion"),
351
+ Optional[InputKafkaMaximumTLSVersion], pydantic.Field(alias="maxVersion")
365
352
  ] = None
366
353
 
367
354
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputKinesisConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKinesisMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKinesisMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKinesisCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKinesisCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputKinesisPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputKinesisPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputKinesisMode], PlainValidator(validate_open_enum(False))
72
- ] = InputKinesisMode.ALWAYS
67
+ mode: Optional[InputKinesisMode] = InputKinesisMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputKinesisPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputKinesisCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputKinesisCompression.NONE
91
+ compress: Optional[InputKinesisCompression] = InputKinesisCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,14 +96,14 @@ class InputKinesisPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class ShardIteratorStart(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class ShardIteratorStart(str, Enum):
107
100
  r"""Location at which to start reading a shard for the first time"""
108
101
 
109
102
  TRIM_HORIZON = "TRIM_HORIZON"
110
103
  LATEST = "LATEST"
111
104
 
112
105
 
113
- class InputKinesisRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
106
+ class InputKinesisRecordDataFormat(str, Enum):
114
107
  r"""Format of data inside the Kinesis Stream records. Gzip compression is automatically detected."""
115
108
 
116
109
  CRIBL = "cribl"
@@ -119,14 +112,14 @@ class InputKinesisRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
119
112
  LINE = "line"
120
113
 
121
114
 
122
- class ShardLoadBalancing(str, Enum, metaclass=utils.OpenEnumMeta):
115
+ class ShardLoadBalancing(str, Enum):
123
116
  r"""The load-balancing algorithm to use for spreading out shards across Workers and Worker Processes"""
124
117
 
125
118
  CONSISTENT_HASHING = "ConsistentHashing"
126
119
  ROUND_ROBIN = "RoundRobin"
127
120
 
128
121
 
129
- class InputKinesisAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
122
+ class InputKinesisAuthenticationMethod(str, Enum):
130
123
  r"""AWS authentication method. Choose Auto to use IAM roles."""
131
124
 
132
125
  AUTO = "auto"
@@ -134,7 +127,7 @@ class InputKinesisAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
134
127
  SECRET = "secret"
135
128
 
136
129
 
137
- class InputKinesisSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
130
+ class InputKinesisSignatureVersion(str, Enum):
138
131
  r"""Signature version to use for signing Kinesis stream requests"""
139
132
 
140
133
  V2 = "v2"
@@ -266,19 +259,12 @@ class InputKinesis(BaseModel):
266
259
  r"""A JavaScript expression to be called with each shardId for the stream. If the expression evaluates to a truthy value, the shard will be processed."""
267
260
 
268
261
  shard_iterator_type: Annotated[
269
- Annotated[
270
- Optional[ShardIteratorStart], PlainValidator(validate_open_enum(False))
271
- ],
272
- pydantic.Field(alias="shardIteratorType"),
262
+ Optional[ShardIteratorStart], pydantic.Field(alias="shardIteratorType")
273
263
  ] = ShardIteratorStart.TRIM_HORIZON
274
264
  r"""Location at which to start reading a shard for the first time"""
275
265
 
276
266
  payload_format: Annotated[
277
- Annotated[
278
- Optional[InputKinesisRecordDataFormat],
279
- PlainValidator(validate_open_enum(False)),
280
- ],
281
- pydantic.Field(alias="payloadFormat"),
267
+ Optional[InputKinesisRecordDataFormat], pydantic.Field(alias="payloadFormat")
282
268
  ] = InputKinesisRecordDataFormat.CRIBL
283
269
  r"""Format of data inside the Kinesis Stream records. Gzip compression is automatically detected."""
284
270
 
@@ -293,18 +279,12 @@ class InputKinesis(BaseModel):
293
279
  r"""Maximum number of records, across all shards, to pull down at once per Worker Process"""
294
280
 
295
281
  load_balancing_algorithm: Annotated[
296
- Annotated[
297
- Optional[ShardLoadBalancing], PlainValidator(validate_open_enum(False))
298
- ],
299
- pydantic.Field(alias="loadBalancingAlgorithm"),
282
+ Optional[ShardLoadBalancing], pydantic.Field(alias="loadBalancingAlgorithm")
300
283
  ] = ShardLoadBalancing.CONSISTENT_HASHING
301
284
  r"""The load-balancing algorithm to use for spreading out shards across Workers and Worker Processes"""
302
285
 
303
286
  aws_authentication_method: Annotated[
304
- Annotated[
305
- Optional[InputKinesisAuthenticationMethod],
306
- PlainValidator(validate_open_enum(False)),
307
- ],
287
+ Optional[InputKinesisAuthenticationMethod],
308
288
  pydantic.Field(alias="awsAuthenticationMethod"),
309
289
  ] = InputKinesisAuthenticationMethod.AUTO
310
290
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -317,11 +297,7 @@ class InputKinesis(BaseModel):
317
297
  r"""Kinesis stream service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to Kinesis stream-compatible endpoint."""
318
298
 
319
299
  signature_version: Annotated[
320
- Annotated[
321
- Optional[InputKinesisSignatureVersion],
322
- PlainValidator(validate_open_enum(False)),
323
- ],
324
- pydantic.Field(alias="signatureVersion"),
300
+ Optional[InputKinesisSignatureVersion], pydantic.Field(alias="signatureVersion")
325
301
  ] = InputKinesisSignatureVersion.V4
326
302
  r"""Signature version to use for signing Kinesis stream requests"""
327
303
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputKubeEventsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKubeEventsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKubeEventsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKubeEventsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKubeEventsCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputKubeEventsPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputKubeEventsPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputKubeEventsMode], PlainValidator(validate_open_enum(False))
72
- ] = InputKubeEventsMode.ALWAYS
67
+ mode: Optional[InputKubeEventsMode] = InputKubeEventsMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputKubeEventsPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputKubeEventsCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputKubeEventsCompression.NONE
91
+ compress: Optional[InputKubeEventsCompression] = InputKubeEventsCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputKubeLogsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKubeLogsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKubeLogsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKubeLogsPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKubeLogsPqCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputKubeLogsPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputKubeLogsPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputKubeLogsMode], PlainValidator(validate_open_enum(False))
72
- ] = InputKubeLogsMode.ALWAYS
67
+ mode: Optional[InputKubeLogsMode] = InputKubeLogsMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputKubeLogsPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputKubeLogsPqCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputKubeLogsPqCompression.NONE
91
+ compress: Optional[InputKubeLogsPqCompression] = InputKubeLogsPqCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -131,7 +124,7 @@ class InputKubeLogsMetadatum(BaseModel):
131
124
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
132
125
 
133
126
 
134
- class InputKubeLogsPersistenceCompression(str, Enum, metaclass=utils.OpenEnumMeta):
127
+ class InputKubeLogsPersistenceCompression(str, Enum):
135
128
  r"""Data compression format. Default is gzip."""
136
129
 
137
130
  NONE = "none"
@@ -164,10 +157,9 @@ class InputKubeLogsDiskSpooling(BaseModel):
164
157
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
165
158
  r"""Maximum amount of time to retain data before older buckets are deleted. Examples: 2h, 4d. Default is 24h."""
166
159
 
167
- compress: Annotated[
168
- Optional[InputKubeLogsPersistenceCompression],
169
- PlainValidator(validate_open_enum(False)),
170
- ] = InputKubeLogsPersistenceCompression.GZIP
160
+ compress: Optional[InputKubeLogsPersistenceCompression] = (
161
+ InputKubeLogsPersistenceCompression.GZIP
162
+ )
171
163
  r"""Data compression format. Default is gzip."""
172
164
 
173
165
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputKubeMetricsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKubeMetricsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKubeMetricsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKubeMetricsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKubeMetricsCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputKubeMetricsPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputKubeMetricsPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputKubeMetricsMode], PlainValidator(validate_open_enum(False))
72
- ] = InputKubeMetricsMode.ALWAYS
67
+ mode: Optional[InputKubeMetricsMode] = InputKubeMetricsMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputKubeMetricsPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputKubeMetricsCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputKubeMetricsCompression.NONE
91
+ compress: Optional[InputKubeMetricsCompression] = InputKubeMetricsCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -131,7 +124,7 @@ class InputKubeMetricsMetadatum(BaseModel):
131
124
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
132
125
 
133
126
 
134
- class InputKubeMetricsDataCompressionFormat(str, Enum, metaclass=utils.OpenEnumMeta):
127
+ class InputKubeMetricsDataCompressionFormat(str, Enum):
135
128
  NONE = "none"
136
129
  GZIP = "gzip"
137
130
 
@@ -163,10 +156,9 @@ class InputKubeMetricsPersistence(BaseModel):
163
156
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
164
157
  r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
165
158
 
166
- compress: Annotated[
167
- Optional[InputKubeMetricsDataCompressionFormat],
168
- PlainValidator(validate_open_enum(False)),
169
- ] = InputKubeMetricsDataCompressionFormat.GZIP
159
+ compress: Optional[InputKubeMetricsDataCompressionFormat] = (
160
+ InputKubeMetricsDataCompressionFormat.GZIP
161
+ )
170
162
 
171
163
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
172
164
  "$CRIBL_HOME/state/kube_metrics"
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputLokiConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputLokiMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputLokiMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputLokiCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputLokiCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputLokiPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputLokiPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputLokiMode], PlainValidator(validate_open_enum(False))
72
- ] = InputLokiMode.ALWAYS
67
+ mode: Optional[InputLokiMode] = InputLokiMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputLokiPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputLokiCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputLokiCompression.NONE
91
+ compress: Optional[InputLokiCompression] = InputLokiCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,14 +96,14 @@ class InputLokiPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputLokiMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputLokiMinimumTLSVersion(str, Enum):
107
100
  TL_SV1 = "TLSv1"
108
101
  TL_SV1_1 = "TLSv1.1"
109
102
  TL_SV1_2 = "TLSv1.2"
110
103
  TL_SV1_3 = "TLSv1.3"
111
104
 
112
105
 
113
- class InputLokiMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
106
+ class InputLokiMaximumTLSVersion(str, Enum):
114
107
  TL_SV1 = "TLSv1"
115
108
  TL_SV1_1 = "TLSv1.1"
116
109
  TL_SV1_2 = "TLSv1.2"
@@ -169,23 +162,15 @@ class InputLokiTLSSettingsServerSide(BaseModel):
169
162
  ] = None
170
163
 
171
164
  min_version: Annotated[
172
- Annotated[
173
- Optional[InputLokiMinimumTLSVersion],
174
- PlainValidator(validate_open_enum(False)),
175
- ],
176
- pydantic.Field(alias="minVersion"),
165
+ Optional[InputLokiMinimumTLSVersion], pydantic.Field(alias="minVersion")
177
166
  ] = None
178
167
 
179
168
  max_version: Annotated[
180
- Annotated[
181
- Optional[InputLokiMaximumTLSVersion],
182
- PlainValidator(validate_open_enum(False)),
183
- ],
184
- pydantic.Field(alias="maxVersion"),
169
+ Optional[InputLokiMaximumTLSVersion], pydantic.Field(alias="maxVersion")
185
170
  ] = None
186
171
 
187
172
 
188
- class InputLokiAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
173
+ class InputLokiAuthenticationType(str, Enum):
189
174
  r"""Loki logs authentication type"""
190
175
 
191
176
  NONE = "none"
@@ -416,11 +401,7 @@ class InputLoki(BaseModel):
416
401
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'."""
417
402
 
418
403
  auth_type: Annotated[
419
- Annotated[
420
- Optional[InputLokiAuthenticationType],
421
- PlainValidator(validate_open_enum(False)),
422
- ],
423
- pydantic.Field(alias="authType"),
404
+ Optional[InputLokiAuthenticationType], pydantic.Field(alias="authType")
424
405
  ] = InputLokiAuthenticationType.NONE
425
406
  r"""Loki logs authentication type"""
426
407