cribl-control-plane 0.0.50__py3-none-any.whl → 0.0.50rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (176) hide show
  1. cribl_control_plane/_version.py +3 -5
  2. cribl_control_plane/errors/healthstatus_error.py +8 -2
  3. cribl_control_plane/groups_sdk.py +4 -4
  4. cribl_control_plane/health.py +6 -2
  5. cribl_control_plane/models/__init__.py +56 -31
  6. cribl_control_plane/models/cacheconnection.py +10 -2
  7. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  8. cribl_control_plane/models/cloudprovider.py +2 -1
  9. cribl_control_plane/models/configgroup.py +24 -4
  10. cribl_control_plane/models/configgroupcloud.py +6 -2
  11. cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
  12. cribl_control_plane/models/createinputhectokenbyidop.py +6 -5
  13. cribl_control_plane/models/createversionpushop.py +5 -5
  14. cribl_control_plane/models/cribllakedataset.py +8 -2
  15. cribl_control_plane/models/datasetmetadata.py +8 -2
  16. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
  17. cribl_control_plane/models/error.py +16 -0
  18. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
  19. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
  20. cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
  21. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
  22. cribl_control_plane/models/gethealthinfoop.py +17 -0
  23. cribl_control_plane/models/getsummaryop.py +7 -2
  24. cribl_control_plane/models/getversionshowop.py +6 -5
  25. cribl_control_plane/models/gitinfo.py +14 -3
  26. cribl_control_plane/models/gitshowresult.py +19 -0
  27. cribl_control_plane/models/hbcriblinfo.py +24 -3
  28. cribl_control_plane/models/healthstatus.py +7 -4
  29. cribl_control_plane/models/heartbeatmetadata.py +3 -0
  30. cribl_control_plane/models/input.py +65 -63
  31. cribl_control_plane/models/inputappscope.py +34 -14
  32. cribl_control_plane/models/inputazureblob.py +17 -6
  33. cribl_control_plane/models/inputcollection.py +11 -4
  34. cribl_control_plane/models/inputconfluentcloud.py +41 -32
  35. cribl_control_plane/models/inputcribl.py +11 -4
  36. cribl_control_plane/models/inputcriblhttp.py +23 -8
  37. cribl_control_plane/models/inputcribllakehttp.py +22 -10
  38. cribl_control_plane/models/inputcriblmetrics.py +12 -4
  39. cribl_control_plane/models/inputcribltcp.py +23 -8
  40. cribl_control_plane/models/inputcrowdstrike.py +26 -10
  41. cribl_control_plane/models/inputdatadogagent.py +24 -8
  42. cribl_control_plane/models/inputdatagen.py +11 -4
  43. cribl_control_plane/models/inputedgeprometheus.py +58 -24
  44. cribl_control_plane/models/inputelastic.py +40 -14
  45. cribl_control_plane/models/inputeventhub.py +15 -6
  46. cribl_control_plane/models/inputexec.py +14 -6
  47. cribl_control_plane/models/inputfile.py +15 -6
  48. cribl_control_plane/models/inputfirehose.py +23 -8
  49. cribl_control_plane/models/inputgooglepubsub.py +19 -6
  50. cribl_control_plane/models/inputgrafana.py +67 -24
  51. cribl_control_plane/models/inputhttp.py +23 -8
  52. cribl_control_plane/models/inputhttpraw.py +23 -8
  53. cribl_control_plane/models/inputjournalfiles.py +12 -4
  54. cribl_control_plane/models/inputkafka.py +41 -28
  55. cribl_control_plane/models/inputkinesis.py +38 -14
  56. cribl_control_plane/models/inputkubeevents.py +11 -4
  57. cribl_control_plane/models/inputkubelogs.py +16 -8
  58. cribl_control_plane/models/inputkubemetrics.py +16 -8
  59. cribl_control_plane/models/inputloki.py +29 -10
  60. cribl_control_plane/models/inputmetrics.py +23 -8
  61. cribl_control_plane/models/inputmodeldriventelemetry.py +32 -10
  62. cribl_control_plane/models/inputmsk.py +48 -30
  63. cribl_control_plane/models/inputnetflow.py +11 -4
  64. cribl_control_plane/models/inputoffice365mgmt.py +33 -14
  65. cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
  66. cribl_control_plane/models/inputoffice365service.py +35 -16
  67. cribl_control_plane/models/inputopentelemetry.py +38 -16
  68. cribl_control_plane/models/inputprometheus.py +50 -18
  69. cribl_control_plane/models/inputprometheusrw.py +30 -10
  70. cribl_control_plane/models/inputrawudp.py +11 -4
  71. cribl_control_plane/models/inputs3.py +21 -8
  72. cribl_control_plane/models/inputs3inventory.py +26 -10
  73. cribl_control_plane/models/inputsecuritylake.py +27 -10
  74. cribl_control_plane/models/inputsnmp.py +16 -6
  75. cribl_control_plane/models/inputsplunk.py +33 -12
  76. cribl_control_plane/models/inputsplunkhec.py +29 -10
  77. cribl_control_plane/models/inputsplunksearch.py +33 -14
  78. cribl_control_plane/models/inputsqs.py +27 -10
  79. cribl_control_plane/models/inputsyslog.py +43 -16
  80. cribl_control_plane/models/inputsystemmetrics.py +48 -24
  81. cribl_control_plane/models/inputsystemstate.py +16 -8
  82. cribl_control_plane/models/inputtcp.py +29 -10
  83. cribl_control_plane/models/inputtcpjson.py +29 -10
  84. cribl_control_plane/models/inputwef.py +37 -14
  85. cribl_control_plane/models/inputwindowsmetrics.py +44 -24
  86. cribl_control_plane/models/inputwineventlogs.py +20 -10
  87. cribl_control_plane/models/inputwiz.py +21 -8
  88. cribl_control_plane/models/inputwizwebhook.py +23 -8
  89. cribl_control_plane/models/inputzscalerhec.py +29 -10
  90. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  91. cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
  92. cribl_control_plane/models/masterworkerentry.py +7 -2
  93. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  94. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  95. cribl_control_plane/models/nodeprovidedinfo.py +3 -0
  96. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  97. cribl_control_plane/models/nodeupgradestate.py +2 -1
  98. cribl_control_plane/models/nodeupgradestatus.py +13 -5
  99. cribl_control_plane/models/output.py +84 -79
  100. cribl_control_plane/models/outputazureblob.py +48 -18
  101. cribl_control_plane/models/outputazuredataexplorer.py +73 -28
  102. cribl_control_plane/models/outputazureeventhub.py +40 -18
  103. cribl_control_plane/models/outputazurelogs.py +35 -12
  104. cribl_control_plane/models/outputclickhouse.py +55 -20
  105. cribl_control_plane/models/outputcloudwatch.py +29 -10
  106. cribl_control_plane/models/outputconfluentcloud.py +71 -44
  107. cribl_control_plane/models/outputcriblhttp.py +44 -16
  108. cribl_control_plane/models/outputcribllake.py +46 -16
  109. cribl_control_plane/models/outputcribltcp.py +45 -18
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +49 -14
  111. cribl_control_plane/models/outputdatabricks.py +282 -0
  112. cribl_control_plane/models/outputdatadog.py +48 -20
  113. cribl_control_plane/models/outputdataset.py +46 -18
  114. cribl_control_plane/models/outputdiskspool.py +7 -2
  115. cribl_control_plane/models/outputdls3.py +68 -24
  116. cribl_control_plane/models/outputdynatracehttp.py +53 -20
  117. cribl_control_plane/models/outputdynatraceotlp.py +55 -22
  118. cribl_control_plane/models/outputelastic.py +43 -18
  119. cribl_control_plane/models/outputelasticcloud.py +36 -12
  120. cribl_control_plane/models/outputexabeam.py +29 -10
  121. cribl_control_plane/models/outputfilesystem.py +39 -14
  122. cribl_control_plane/models/outputgooglechronicle.py +50 -16
  123. cribl_control_plane/models/outputgooglecloudlogging.py +50 -18
  124. cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
  125. cribl_control_plane/models/outputgooglepubsub.py +31 -10
  126. cribl_control_plane/models/outputgrafanacloud.py +97 -32
  127. cribl_control_plane/models/outputgraphite.py +31 -14
  128. cribl_control_plane/models/outputhoneycomb.py +35 -12
  129. cribl_control_plane/models/outputhumiohec.py +43 -16
  130. cribl_control_plane/models/outputinfluxdb.py +42 -16
  131. cribl_control_plane/models/outputkafka.py +69 -40
  132. cribl_control_plane/models/outputkinesis.py +40 -16
  133. cribl_control_plane/models/outputloki.py +41 -16
  134. cribl_control_plane/models/outputminio.py +65 -24
  135. cribl_control_plane/models/outputmsk.py +77 -42
  136. cribl_control_plane/models/outputnewrelic.py +43 -18
  137. cribl_control_plane/models/outputnewrelicevents.py +41 -14
  138. cribl_control_plane/models/outputopentelemetry.py +67 -26
  139. cribl_control_plane/models/outputprometheus.py +35 -12
  140. cribl_control_plane/models/outputring.py +19 -8
  141. cribl_control_plane/models/outputs3.py +68 -26
  142. cribl_control_plane/models/outputsecuritylake.py +52 -18
  143. cribl_control_plane/models/outputsentinel.py +45 -18
  144. cribl_control_plane/models/outputsentineloneaisiem.py +50 -18
  145. cribl_control_plane/models/outputservicenow.py +60 -24
  146. cribl_control_plane/models/outputsignalfx.py +37 -14
  147. cribl_control_plane/models/outputsns.py +36 -14
  148. cribl_control_plane/models/outputsplunk.py +60 -24
  149. cribl_control_plane/models/outputsplunkhec.py +35 -12
  150. cribl_control_plane/models/outputsplunklb.py +77 -30
  151. cribl_control_plane/models/outputsqs.py +41 -16
  152. cribl_control_plane/models/outputstatsd.py +30 -14
  153. cribl_control_plane/models/outputstatsdext.py +29 -12
  154. cribl_control_plane/models/outputsumologic.py +35 -12
  155. cribl_control_plane/models/outputsyslog.py +58 -24
  156. cribl_control_plane/models/outputtcpjson.py +52 -20
  157. cribl_control_plane/models/outputwavefront.py +35 -12
  158. cribl_control_plane/models/outputwebhook.py +58 -22
  159. cribl_control_plane/models/outputxsiam.py +35 -14
  160. cribl_control_plane/models/productscore.py +2 -1
  161. cribl_control_plane/models/rbacresource.py +2 -1
  162. cribl_control_plane/models/resourcepolicy.py +4 -2
  163. cribl_control_plane/models/routeconf.py +3 -4
  164. cribl_control_plane/models/runnablejobcollection.py +30 -13
  165. cribl_control_plane/models/runnablejobexecutor.py +13 -4
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +6 -5
  170. cribl_control_plane/models/workertypes.py +2 -1
  171. {cribl_control_plane-0.0.50.dist-info → cribl_control_plane-0.0.50rc2.dist-info}/METADATA +1 -1
  172. cribl_control_plane-0.0.50rc2.dist-info/RECORD +327 -0
  173. cribl_control_plane/models/appmode.py +0 -13
  174. cribl_control_plane/models/routecloneconf.py +0 -13
  175. cribl_control_plane-0.0.50.dist-info/RECORD +0 -325
  176. {cribl_control_plane-0.0.50.dist-info → cribl_control_plane-0.0.50rc2.dist-info}/WHEEL +0 -0
@@ -66,8 +66,10 @@ from .inputwineventlogs import InputWinEventLogs, InputWinEventLogsTypedDict
66
66
  from .inputwiz import InputWiz, InputWizTypedDict
67
67
  from .inputwizwebhook import InputWizWebhook, InputWizWebhookTypedDict
68
68
  from .inputzscalerhec import InputZscalerHec, InputZscalerHecTypedDict
69
+ from cribl_control_plane.utils import get_discriminator
70
+ from pydantic import Discriminator, Tag
69
71
  from typing import Union
70
- from typing_extensions import TypeAliasType
72
+ from typing_extensions import Annotated, TypeAliasType
71
73
 
72
74
 
73
75
  InputTypedDict = TypeAliasType(
@@ -136,67 +138,67 @@ InputTypedDict = TypeAliasType(
136
138
  )
137
139
 
138
140
 
139
- Input = TypeAliasType(
140
- "Input",
141
+ Input = Annotated[
141
142
  Union[
142
- InputDatagen,
143
- InputKubeEvents,
144
- InputCribl,
145
- InputCriblmetrics,
146
- InputKubeMetrics,
147
- InputCollection,
148
- InputSystemState,
149
- InputModelDrivenTelemetry,
150
- InputSystemMetrics,
151
- InputWindowsMetrics,
152
- InputJournalFiles,
153
- InputWinEventLogs,
154
- InputRawUDP,
155
- InputExec,
156
- InputKubeLogs,
157
- InputMetrics,
158
- InputSnmp,
159
- InputCriblTCP,
160
- InputNetflow,
161
- InputTcpjson,
162
- InputGooglePubsub,
163
- InputOffice365Service,
164
- InputWiz,
165
- InputTCP,
166
- InputFirehose,
167
- InputCriblHTTP,
168
- InputDatadogAgent,
169
- InputOffice365Mgmt,
170
- InputFile,
171
- InputSplunk,
172
- InputWef,
173
- InputAppscope,
174
- InputHTTPRaw,
175
- InputWizWebhook,
176
- InputHTTP,
177
- InputCriblLakeHTTP,
178
- InputAzureBlob,
179
- InputSqs,
180
- InputZscalerHec,
181
- InputKinesis,
182
- InputConfluentCloud,
183
- InputEventhub,
184
- InputKafka,
185
- InputElastic,
186
- InputOffice365MsgTrace,
187
- InputSplunkHec,
188
- InputLoki,
189
- InputPrometheusRw,
190
- InputPrometheus,
191
- InputCrowdstrike,
192
- InputEdgePrometheus,
193
- InputOpenTelemetry,
194
- InputS3,
195
- InputSecurityLake,
196
- InputMsk,
197
- InputS3Inventory,
198
- InputSplunkSearch,
199
- InputSyslog,
200
- InputGrafana,
143
+ Annotated[InputCollection, Tag("collection")],
144
+ Annotated[InputKafka, Tag("kafka")],
145
+ Annotated[InputMsk, Tag("msk")],
146
+ Annotated[InputHTTP, Tag("http")],
147
+ Annotated[InputSplunk, Tag("splunk")],
148
+ Annotated[InputSplunkSearch, Tag("splunk_search")],
149
+ Annotated[InputSplunkHec, Tag("splunk_hec")],
150
+ Annotated[InputAzureBlob, Tag("azure_blob")],
151
+ Annotated[InputElastic, Tag("elastic")],
152
+ Annotated[InputConfluentCloud, Tag("confluent_cloud")],
153
+ Annotated[InputGrafana, Tag("grafana")],
154
+ Annotated[InputLoki, Tag("loki")],
155
+ Annotated[InputPrometheusRw, Tag("prometheus_rw")],
156
+ Annotated[InputPrometheus, Tag("prometheus")],
157
+ Annotated[InputEdgePrometheus, Tag("edge_prometheus")],
158
+ Annotated[InputOffice365Mgmt, Tag("office365_mgmt")],
159
+ Annotated[InputOffice365Service, Tag("office365_service")],
160
+ Annotated[InputOffice365MsgTrace, Tag("office365_msg_trace")],
161
+ Annotated[InputEventhub, Tag("eventhub")],
162
+ Annotated[InputExec, Tag("exec")],
163
+ Annotated[InputFirehose, Tag("firehose")],
164
+ Annotated[InputGooglePubsub, Tag("google_pubsub")],
165
+ Annotated[InputCribl, Tag("cribl")],
166
+ Annotated[InputCriblTCP, Tag("cribl_tcp")],
167
+ Annotated[InputCriblHTTP, Tag("cribl_http")],
168
+ Annotated[InputCriblLakeHTTP, Tag("cribl_lake_http")],
169
+ Annotated[InputTcpjson, Tag("tcpjson")],
170
+ Annotated[InputSystemMetrics, Tag("system_metrics")],
171
+ Annotated[InputSystemState, Tag("system_state")],
172
+ Annotated[InputKubeMetrics, Tag("kube_metrics")],
173
+ Annotated[InputKubeLogs, Tag("kube_logs")],
174
+ Annotated[InputKubeEvents, Tag("kube_events")],
175
+ Annotated[InputWindowsMetrics, Tag("windows_metrics")],
176
+ Annotated[InputCrowdstrike, Tag("crowdstrike")],
177
+ Annotated[InputDatadogAgent, Tag("datadog_agent")],
178
+ Annotated[InputDatagen, Tag("datagen")],
179
+ Annotated[InputHTTPRaw, Tag("http_raw")],
180
+ Annotated[InputKinesis, Tag("kinesis")],
181
+ Annotated[InputCriblmetrics, Tag("criblmetrics")],
182
+ Annotated[InputMetrics, Tag("metrics")],
183
+ Annotated[InputS3, Tag("s3")],
184
+ Annotated[InputS3Inventory, Tag("s3_inventory")],
185
+ Annotated[InputSnmp, Tag("snmp")],
186
+ Annotated[InputOpenTelemetry, Tag("open_telemetry")],
187
+ Annotated[InputModelDrivenTelemetry, Tag("model_driven_telemetry")],
188
+ Annotated[InputSqs, Tag("sqs")],
189
+ Annotated[InputSyslog, Tag("syslog")],
190
+ Annotated[InputFile, Tag("file")],
191
+ Annotated[InputTCP, Tag("tcp")],
192
+ Annotated[InputAppscope, Tag("appscope")],
193
+ Annotated[InputWef, Tag("wef")],
194
+ Annotated[InputWinEventLogs, Tag("win_event_logs")],
195
+ Annotated[InputRawUDP, Tag("raw_udp")],
196
+ Annotated[InputJournalFiles, Tag("journal_files")],
197
+ Annotated[InputWiz, Tag("wiz")],
198
+ Annotated[InputWizWebhook, Tag("wiz_webhook")],
199
+ Annotated[InputNetflow, Tag("netflow")],
200
+ Annotated[InputSecurityLake, Tag("security_lake")],
201
+ Annotated[InputZscalerHec, Tag("zscaler_hec")],
201
202
  ],
202
- )
203
+ Discriminator(lambda m: get_discriminator(m, "type", "type")),
204
+ ]
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Any, List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputAppscopeConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputAppscopeMode(str, Enum):
29
+ class InputAppscopeMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputAppscopeCompression(str, Enum):
36
+ class InputAppscopeCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputAppscopePqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputAppscopePq(BaseModel):
67
- mode: Optional[InputAppscopeMode] = InputAppscopeMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputAppscopeMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputAppscopeMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputAppscopePq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputAppscopeCompression] = InputAppscopeCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputAppscopeCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputAppscopeCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -144,7 +151,7 @@ class InputAppscopeFilter(BaseModel):
144
151
  r"""To override the UNIX domain socket or address/port specified in General Settings (while leaving Authentication settings as is), enter a URL."""
145
152
 
146
153
 
147
- class InputAppscopeDataCompressionFormat(str, Enum):
154
+ class InputAppscopeDataCompressionFormat(str, Enum, metaclass=utils.OpenEnumMeta):
148
155
  NONE = "none"
149
156
  GZIP = "gzip"
150
157
 
@@ -176,9 +183,10 @@ class InputAppscopePersistence(BaseModel):
176
183
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
177
184
  r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
178
185
 
179
- compress: Optional[InputAppscopeDataCompressionFormat] = (
180
- InputAppscopeDataCompressionFormat.GZIP
181
- )
186
+ compress: Annotated[
187
+ Optional[InputAppscopeDataCompressionFormat],
188
+ PlainValidator(validate_open_enum(False)),
189
+ ] = InputAppscopeDataCompressionFormat.GZIP
182
190
 
183
191
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
184
192
  "$CRIBL_HOME/state/appscope"
@@ -186,21 +194,21 @@ class InputAppscopePersistence(BaseModel):
186
194
  r"""Path to use to write metrics. Defaults to $CRIBL_HOME/state/appscope"""
187
195
 
188
196
 
189
- class InputAppscopeAuthenticationMethod(str, Enum):
197
+ class InputAppscopeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
190
198
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
191
199
 
192
200
  MANUAL = "manual"
193
201
  SECRET = "secret"
194
202
 
195
203
 
196
- class InputAppscopeMinimumTLSVersion(str, Enum):
204
+ class InputAppscopeMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
197
205
  TL_SV1 = "TLSv1"
198
206
  TL_SV1_1 = "TLSv1.1"
199
207
  TL_SV1_2 = "TLSv1.2"
200
208
  TL_SV1_3 = "TLSv1.3"
201
209
 
202
210
 
203
- class InputAppscopeMaximumTLSVersion(str, Enum):
211
+ class InputAppscopeMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
204
212
  TL_SV1 = "TLSv1"
205
213
  TL_SV1_1 = "TLSv1.1"
206
214
  TL_SV1_2 = "TLSv1.2"
@@ -259,11 +267,19 @@ class InputAppscopeTLSSettingsServerSide(BaseModel):
259
267
  ] = None
260
268
 
261
269
  min_version: Annotated[
262
- Optional[InputAppscopeMinimumTLSVersion], pydantic.Field(alias="minVersion")
270
+ Annotated[
271
+ Optional[InputAppscopeMinimumTLSVersion],
272
+ PlainValidator(validate_open_enum(False)),
273
+ ],
274
+ pydantic.Field(alias="minVersion"),
263
275
  ] = None
264
276
 
265
277
  max_version: Annotated[
266
- Optional[InputAppscopeMaximumTLSVersion], pydantic.Field(alias="maxVersion")
278
+ Annotated[
279
+ Optional[InputAppscopeMaximumTLSVersion],
280
+ PlainValidator(validate_open_enum(False)),
281
+ ],
282
+ pydantic.Field(alias="maxVersion"),
267
283
  ] = None
268
284
 
269
285
 
@@ -410,7 +426,11 @@ class InputAppscope(BaseModel):
410
426
  persistence: Optional[InputAppscopePersistence] = None
411
427
 
412
428
  auth_type: Annotated[
413
- Optional[InputAppscopeAuthenticationMethod], pydantic.Field(alias="authType")
429
+ Annotated[
430
+ Optional[InputAppscopeAuthenticationMethod],
431
+ PlainValidator(validate_open_enum(False)),
432
+ ],
433
+ pydantic.Field(alias="authType"),
414
434
  ] = InputAppscopeAuthenticationMethod.MANUAL
415
435
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
416
436
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputAzureBlobConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputAzureBlobMode(str, Enum):
29
+ class InputAzureBlobMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputAzureBlobCompression(str, Enum):
36
+ class InputAzureBlobCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputAzureBlobPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputAzureBlobPq(BaseModel):
67
- mode: Optional[InputAzureBlobMode] = InputAzureBlobMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputAzureBlobMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputAzureBlobMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputAzureBlobPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputAzureBlobCompression] = InputAzureBlobCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputAzureBlobCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputAzureBlobCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -109,7 +116,7 @@ class InputAzureBlobMetadatum(BaseModel):
109
116
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
110
117
 
111
118
 
112
- class InputAzureBlobAuthenticationMethod(str, Enum):
119
+ class InputAzureBlobAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
113
120
  MANUAL = "manual"
114
121
  SECRET = "secret"
115
122
  CLIENT_SECRET = "clientSecret"
@@ -270,7 +277,11 @@ class InputAzureBlob(BaseModel):
270
277
  r"""The maximum time allowed for downloading a Parquet chunk. Processing will stop if a chunk cannot be downloaded within the time specified."""
271
278
 
272
279
  auth_type: Annotated[
273
- Optional[InputAzureBlobAuthenticationMethod], pydantic.Field(alias="authType")
280
+ Annotated[
281
+ Optional[InputAzureBlobAuthenticationMethod],
282
+ PlainValidator(validate_open_enum(False)),
283
+ ],
284
+ pydantic.Field(alias="authType"),
274
285
  ] = InputAzureBlobAuthenticationMethod.MANUAL
275
286
 
276
287
  description: Optional[str] = None
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputCollectionConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputCollectionMode(str, Enum):
29
+ class InputCollectionMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputCollectionCompression(str, Enum):
36
+ class InputCollectionCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputCollectionPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputCollectionPq(BaseModel):
67
- mode: Optional[InputCollectionMode] = InputCollectionMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputCollectionMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputCollectionMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputCollectionPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputCollectionCompression] = InputCollectionCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputCollectionCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputCollectionCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputConfluentCloudConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputConfluentCloudMode(str, Enum):
29
+ class InputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputConfluentCloudCompression(str, Enum):
36
+ class InputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputConfluentCloudPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputConfluentCloudPq(BaseModel):
67
- mode: Optional[InputConfluentCloudMode] = InputConfluentCloudMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputConfluentCloudMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputConfluentCloudMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,9 +93,10 @@ class InputConfluentCloudPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputConfluentCloudCompression] = (
92
- InputConfluentCloudCompression.NONE
93
- )
96
+ compress: Annotated[
97
+ Optional[InputConfluentCloudCompression],
98
+ PlainValidator(validate_open_enum(False)),
99
+ ] = InputConfluentCloudCompression.NONE
94
100
  r"""Codec to use to compress the persisted data"""
95
101
 
96
102
  pq_controls: Annotated[
@@ -98,14 +104,14 @@ class InputConfluentCloudPq(BaseModel):
98
104
  ] = None
99
105
 
100
106
 
101
- class InputConfluentCloudMinimumTLSVersion(str, Enum):
107
+ class InputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
102
108
  TL_SV1 = "TLSv1"
103
109
  TL_SV1_1 = "TLSv1.1"
104
110
  TL_SV1_2 = "TLSv1.2"
105
111
  TL_SV1_3 = "TLSv1.3"
106
112
 
107
113
 
108
- class InputConfluentCloudMaximumTLSVersion(str, Enum):
114
+ class InputConfluentCloudMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
109
115
  TL_SV1 = "TLSv1"
110
116
  TL_SV1_1 = "TLSv1.1"
111
117
  TL_SV1_2 = "TLSv1.2"
@@ -165,23 +171,22 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
165
171
  r"""Passphrase to use to decrypt private key"""
166
172
 
167
173
  min_version: Annotated[
168
- Optional[InputConfluentCloudMinimumTLSVersion],
174
+ Annotated[
175
+ Optional[InputConfluentCloudMinimumTLSVersion],
176
+ PlainValidator(validate_open_enum(False)),
177
+ ],
169
178
  pydantic.Field(alias="minVersion"),
170
179
  ] = None
171
180
 
172
181
  max_version: Annotated[
173
- Optional[InputConfluentCloudMaximumTLSVersion],
182
+ Annotated[
183
+ Optional[InputConfluentCloudMaximumTLSVersion],
184
+ PlainValidator(validate_open_enum(False)),
185
+ ],
174
186
  pydantic.Field(alias="maxVersion"),
175
187
  ] = None
176
188
 
177
189
 
178
- class InputConfluentCloudSchemaType(str, Enum):
179
- r"""The schema format used to encode and decode event data"""
180
-
181
- AVRO = "avro"
182
- JSON = "json"
183
-
184
-
185
190
  class InputConfluentCloudAuthTypedDict(TypedDict):
186
191
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
187
192
 
@@ -201,14 +206,18 @@ class InputConfluentCloudAuth(BaseModel):
201
206
  r"""Select or create a secret that references your credentials"""
202
207
 
203
208
 
204
- class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
209
+ class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
210
+ str, Enum, metaclass=utils.OpenEnumMeta
211
+ ):
205
212
  TL_SV1 = "TLSv1"
206
213
  TL_SV1_1 = "TLSv1.1"
207
214
  TL_SV1_2 = "TLSv1.2"
208
215
  TL_SV1_3 = "TLSv1.3"
209
216
 
210
217
 
211
- class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
218
+ class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
219
+ str, Enum, metaclass=utils.OpenEnumMeta
220
+ ):
212
221
  TL_SV1 = "TLSv1"
213
222
  TL_SV1_1 = "TLSv1.1"
214
223
  TL_SV1_2 = "TLSv1.2"
@@ -268,12 +277,18 @@ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
268
277
  r"""Passphrase to use to decrypt private key"""
269
278
 
270
279
  min_version: Annotated[
271
- Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
280
+ Annotated[
281
+ Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
282
+ PlainValidator(validate_open_enum(False)),
283
+ ],
272
284
  pydantic.Field(alias="minVersion"),
273
285
  ] = None
274
286
 
275
287
  max_version: Annotated[
276
- Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
288
+ Annotated[
289
+ Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
290
+ PlainValidator(validate_open_enum(False)),
291
+ ],
277
292
  pydantic.Field(alias="maxVersion"),
278
293
  ] = None
279
294
 
@@ -282,8 +297,6 @@ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
282
297
  disabled: NotRequired[bool]
283
298
  schema_registry_url: NotRequired[str]
284
299
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
285
- schema_type: NotRequired[InputConfluentCloudSchemaType]
286
- r"""The schema format used to encode and decode event data"""
287
300
  connection_timeout: NotRequired[float]
288
301
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
289
302
  request_timeout: NotRequired[float]
@@ -305,11 +318,6 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
305
318
  ] = "http://localhost:8081"
306
319
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
307
320
 
308
- schema_type: Annotated[
309
- Optional[InputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
310
- ] = InputConfluentCloudSchemaType.AVRO
311
- r"""The schema format used to encode and decode event data"""
312
-
313
321
  connection_timeout: Annotated[
314
322
  Optional[float], pydantic.Field(alias="connectionTimeout")
315
323
  ] = 30000
@@ -329,7 +337,7 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
329
337
  tls: Optional[InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
330
338
 
331
339
 
332
- class InputConfluentCloudSASLMechanism(str, Enum):
340
+ class InputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
333
341
  PLAIN = "plain"
334
342
  SCRAM_SHA_256 = "scram-sha-256"
335
343
  SCRAM_SHA_512 = "scram-sha-512"
@@ -350,9 +358,10 @@ class InputConfluentCloudAuthentication(BaseModel):
350
358
 
351
359
  disabled: Optional[bool] = True
352
360
 
353
- mechanism: Optional[InputConfluentCloudSASLMechanism] = (
354
- InputConfluentCloudSASLMechanism.PLAIN
355
- )
361
+ mechanism: Annotated[
362
+ Optional[InputConfluentCloudSASLMechanism],
363
+ PlainValidator(validate_open_enum(False)),
364
+ ] = InputConfluentCloudSASLMechanism.PLAIN
356
365
 
357
366
  oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
358
367
  False
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputCriblConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputCriblMode(str, Enum):
29
+ class InputCriblMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputCriblCompression(str, Enum):
36
+ class InputCriblCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputCriblPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputCriblPq(BaseModel):
67
- mode: Optional[InputCriblMode] = InputCriblMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputCriblMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputCriblMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputCriblPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputCriblCompression] = InputCriblCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputCriblCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputCriblCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[