cribl-control-plane 0.0.49__py3-none-any.whl → 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (166) hide show
  1. cribl_control_plane/_version.py +4 -6
  2. cribl_control_plane/errors/healthstatus_error.py +8 -2
  3. cribl_control_plane/health.py +6 -2
  4. cribl_control_plane/models/__init__.py +18 -3
  5. cribl_control_plane/models/appmode.py +2 -1
  6. cribl_control_plane/models/cacheconnection.py +10 -2
  7. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  8. cribl_control_plane/models/cloudprovider.py +2 -1
  9. cribl_control_plane/models/configgroup.py +7 -2
  10. cribl_control_plane/models/configgroupcloud.py +6 -2
  11. cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
  12. cribl_control_plane/models/createinputhectokenbyidop.py +6 -5
  13. cribl_control_plane/models/createversionpushop.py +5 -5
  14. cribl_control_plane/models/cribllakedataset.py +8 -2
  15. cribl_control_plane/models/datasetmetadata.py +8 -2
  16. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
  17. cribl_control_plane/models/error.py +16 -0
  18. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
  19. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
  20. cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
  21. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
  22. cribl_control_plane/models/gethealthinfoop.py +17 -0
  23. cribl_control_plane/models/getsummaryop.py +7 -2
  24. cribl_control_plane/models/getversionshowop.py +6 -5
  25. cribl_control_plane/models/gitshowresult.py +19 -0
  26. cribl_control_plane/models/hbcriblinfo.py +6 -1
  27. cribl_control_plane/models/healthstatus.py +7 -4
  28. cribl_control_plane/models/inputappscope.py +34 -14
  29. cribl_control_plane/models/inputazureblob.py +17 -6
  30. cribl_control_plane/models/inputcollection.py +11 -4
  31. cribl_control_plane/models/inputconfluentcloud.py +47 -20
  32. cribl_control_plane/models/inputcribl.py +11 -4
  33. cribl_control_plane/models/inputcriblhttp.py +23 -8
  34. cribl_control_plane/models/inputcribllakehttp.py +22 -10
  35. cribl_control_plane/models/inputcriblmetrics.py +12 -4
  36. cribl_control_plane/models/inputcribltcp.py +23 -8
  37. cribl_control_plane/models/inputcrowdstrike.py +26 -10
  38. cribl_control_plane/models/inputdatadogagent.py +24 -8
  39. cribl_control_plane/models/inputdatagen.py +11 -4
  40. cribl_control_plane/models/inputedgeprometheus.py +58 -24
  41. cribl_control_plane/models/inputelastic.py +40 -14
  42. cribl_control_plane/models/inputeventhub.py +15 -6
  43. cribl_control_plane/models/inputexec.py +14 -6
  44. cribl_control_plane/models/inputfile.py +15 -6
  45. cribl_control_plane/models/inputfirehose.py +23 -8
  46. cribl_control_plane/models/inputgooglepubsub.py +19 -6
  47. cribl_control_plane/models/inputgrafana.py +67 -24
  48. cribl_control_plane/models/inputhttp.py +23 -8
  49. cribl_control_plane/models/inputhttpraw.py +23 -8
  50. cribl_control_plane/models/inputjournalfiles.py +12 -4
  51. cribl_control_plane/models/inputkafka.py +46 -16
  52. cribl_control_plane/models/inputkinesis.py +38 -14
  53. cribl_control_plane/models/inputkubeevents.py +11 -4
  54. cribl_control_plane/models/inputkubelogs.py +16 -8
  55. cribl_control_plane/models/inputkubemetrics.py +16 -8
  56. cribl_control_plane/models/inputloki.py +29 -10
  57. cribl_control_plane/models/inputmetrics.py +23 -8
  58. cribl_control_plane/models/inputmodeldriventelemetry.py +32 -10
  59. cribl_control_plane/models/inputmsk.py +53 -18
  60. cribl_control_plane/models/inputnetflow.py +11 -4
  61. cribl_control_plane/models/inputoffice365mgmt.py +33 -14
  62. cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
  63. cribl_control_plane/models/inputoffice365service.py +35 -16
  64. cribl_control_plane/models/inputopentelemetry.py +38 -16
  65. cribl_control_plane/models/inputprometheus.py +50 -18
  66. cribl_control_plane/models/inputprometheusrw.py +30 -10
  67. cribl_control_plane/models/inputrawudp.py +11 -4
  68. cribl_control_plane/models/inputs3.py +21 -8
  69. cribl_control_plane/models/inputs3inventory.py +26 -10
  70. cribl_control_plane/models/inputsecuritylake.py +27 -10
  71. cribl_control_plane/models/inputsnmp.py +16 -6
  72. cribl_control_plane/models/inputsplunk.py +33 -12
  73. cribl_control_plane/models/inputsplunkhec.py +29 -10
  74. cribl_control_plane/models/inputsplunksearch.py +33 -14
  75. cribl_control_plane/models/inputsqs.py +27 -10
  76. cribl_control_plane/models/inputsyslog.py +43 -16
  77. cribl_control_plane/models/inputsystemmetrics.py +48 -24
  78. cribl_control_plane/models/inputsystemstate.py +16 -8
  79. cribl_control_plane/models/inputtcp.py +29 -10
  80. cribl_control_plane/models/inputtcpjson.py +29 -10
  81. cribl_control_plane/models/inputwef.py +37 -14
  82. cribl_control_plane/models/inputwindowsmetrics.py +44 -24
  83. cribl_control_plane/models/inputwineventlogs.py +20 -10
  84. cribl_control_plane/models/inputwiz.py +21 -8
  85. cribl_control_plane/models/inputwizwebhook.py +23 -8
  86. cribl_control_plane/models/inputzscalerhec.py +29 -10
  87. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  88. cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
  89. cribl_control_plane/models/masterworkerentry.py +7 -2
  90. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  91. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  92. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  93. cribl_control_plane/models/nodeupgradestate.py +2 -1
  94. cribl_control_plane/models/nodeupgradestatus.py +13 -5
  95. cribl_control_plane/models/outputazureblob.py +48 -18
  96. cribl_control_plane/models/outputazuredataexplorer.py +73 -28
  97. cribl_control_plane/models/outputazureeventhub.py +40 -18
  98. cribl_control_plane/models/outputazurelogs.py +35 -12
  99. cribl_control_plane/models/outputclickhouse.py +55 -20
  100. cribl_control_plane/models/outputcloudwatch.py +29 -10
  101. cribl_control_plane/models/outputconfluentcloud.py +77 -32
  102. cribl_control_plane/models/outputcriblhttp.py +44 -16
  103. cribl_control_plane/models/outputcribllake.py +46 -16
  104. cribl_control_plane/models/outputcribltcp.py +45 -18
  105. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +49 -14
  106. cribl_control_plane/models/outputdatadog.py +48 -20
  107. cribl_control_plane/models/outputdataset.py +46 -18
  108. cribl_control_plane/models/outputdiskspool.py +7 -2
  109. cribl_control_plane/models/outputdls3.py +68 -24
  110. cribl_control_plane/models/outputdynatracehttp.py +53 -20
  111. cribl_control_plane/models/outputdynatraceotlp.py +55 -22
  112. cribl_control_plane/models/outputelastic.py +43 -18
  113. cribl_control_plane/models/outputelasticcloud.py +36 -12
  114. cribl_control_plane/models/outputexabeam.py +29 -10
  115. cribl_control_plane/models/outputfilesystem.py +39 -14
  116. cribl_control_plane/models/outputgooglechronicle.py +50 -16
  117. cribl_control_plane/models/outputgooglecloudlogging.py +41 -14
  118. cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
  119. cribl_control_plane/models/outputgooglepubsub.py +31 -10
  120. cribl_control_plane/models/outputgrafanacloud.py +97 -32
  121. cribl_control_plane/models/outputgraphite.py +31 -14
  122. cribl_control_plane/models/outputhoneycomb.py +35 -12
  123. cribl_control_plane/models/outputhumiohec.py +43 -16
  124. cribl_control_plane/models/outputinfluxdb.py +42 -16
  125. cribl_control_plane/models/outputkafka.py +74 -28
  126. cribl_control_plane/models/outputkinesis.py +40 -16
  127. cribl_control_plane/models/outputloki.py +41 -16
  128. cribl_control_plane/models/outputminio.py +65 -24
  129. cribl_control_plane/models/outputmsk.py +82 -30
  130. cribl_control_plane/models/outputnewrelic.py +43 -18
  131. cribl_control_plane/models/outputnewrelicevents.py +41 -14
  132. cribl_control_plane/models/outputopentelemetry.py +67 -26
  133. cribl_control_plane/models/outputprometheus.py +35 -12
  134. cribl_control_plane/models/outputring.py +19 -8
  135. cribl_control_plane/models/outputs3.py +68 -26
  136. cribl_control_plane/models/outputsecuritylake.py +52 -18
  137. cribl_control_plane/models/outputsentinel.py +45 -18
  138. cribl_control_plane/models/outputsentineloneaisiem.py +50 -18
  139. cribl_control_plane/models/outputservicenow.py +60 -24
  140. cribl_control_plane/models/outputsignalfx.py +37 -14
  141. cribl_control_plane/models/outputsns.py +36 -14
  142. cribl_control_plane/models/outputsplunk.py +60 -24
  143. cribl_control_plane/models/outputsplunkhec.py +35 -12
  144. cribl_control_plane/models/outputsplunklb.py +77 -30
  145. cribl_control_plane/models/outputsqs.py +41 -16
  146. cribl_control_plane/models/outputstatsd.py +30 -14
  147. cribl_control_plane/models/outputstatsdext.py +29 -12
  148. cribl_control_plane/models/outputsumologic.py +35 -12
  149. cribl_control_plane/models/outputsyslog.py +58 -24
  150. cribl_control_plane/models/outputtcpjson.py +52 -20
  151. cribl_control_plane/models/outputwavefront.py +35 -12
  152. cribl_control_plane/models/outputwebhook.py +58 -22
  153. cribl_control_plane/models/outputxsiam.py +35 -14
  154. cribl_control_plane/models/productscore.py +2 -1
  155. cribl_control_plane/models/rbacresource.py +2 -1
  156. cribl_control_plane/models/resourcepolicy.py +4 -2
  157. cribl_control_plane/models/runnablejobcollection.py +30 -13
  158. cribl_control_plane/models/runnablejobexecutor.py +13 -4
  159. cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
  160. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
  161. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
  162. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +6 -5
  163. cribl_control_plane/models/workertypes.py +2 -1
  164. {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0a1.dist-info}/METADATA +1 -1
  165. {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0a1.dist-info}/RECORD +166 -163
  166. {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0a1.dist-info}/WHEEL +0 -0
@@ -1,10 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .gitshowresult import GitShowResult, GitShowResultTypedDict
4
5
  from cribl_control_plane.types import BaseModel
5
6
  from cribl_control_plane.utils import FieldMetadata, QueryParamMetadata
6
7
  import pydantic
7
- from typing import Any, Dict, List, Optional
8
+ from typing import List, Optional
8
9
  from typing_extensions import Annotated, NotRequired, TypedDict
9
10
 
10
11
 
@@ -48,17 +49,17 @@ class GetVersionShowRequest(BaseModel):
48
49
 
49
50
 
50
51
  class GetVersionShowResponseTypedDict(TypedDict):
51
- r"""a list of any objects"""
52
+ r"""a list of GitShowResult objects"""
52
53
 
53
54
  count: NotRequired[int]
54
55
  r"""number of items present in the items array"""
55
- items: NotRequired[List[Dict[str, Any]]]
56
+ items: NotRequired[List[GitShowResultTypedDict]]
56
57
 
57
58
 
58
59
  class GetVersionShowResponse(BaseModel):
59
- r"""a list of any objects"""
60
+ r"""a list of GitShowResult objects"""
60
61
 
61
62
  count: Optional[int] = None
62
63
  r"""number of items present in the items array"""
63
64
 
64
- items: Optional[List[Dict[str, Any]]] = None
65
+ items: Optional[List[GitShowResult]] = None
@@ -0,0 +1,19 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .difffiles import DiffFiles, DiffFilesTypedDict
5
+ from cribl_control_plane.types import BaseModel
6
+ import pydantic
7
+ from typing import List
8
+ from typing_extensions import Annotated, TypedDict
9
+
10
+
11
+ class GitShowResultTypedDict(TypedDict):
12
+ commit_message: str
13
+ diff_json: List[DiffFilesTypedDict]
14
+
15
+
16
+ class GitShowResult(BaseModel):
17
+ commit_message: Annotated[str, pydantic.Field(alias="commitMessage")]
18
+
19
+ diff_json: Annotated[List[DiffFiles], pydantic.Field(alias="diffJson")]
@@ -5,7 +5,9 @@ from .appmode import AppMode
5
5
  from .hbleaderinfo import HBLeaderInfo, HBLeaderInfoTypedDict
6
6
  from .lookupversions import LookupVersions, LookupVersionsTypedDict
7
7
  from cribl_control_plane.types import BaseModel
8
+ from cribl_control_plane.utils import validate_open_enum
8
9
  import pydantic
10
+ from pydantic.functional_validators import PlainValidator
9
11
  from typing import List, Optional
10
12
  from typing_extensions import Annotated, NotRequired, TypedDict
11
13
 
@@ -54,7 +56,10 @@ class HBCriblInfoTypedDict(TypedDict):
54
56
  class HBCriblInfo(BaseModel):
55
57
  config: Config
56
58
 
57
- dist_mode: Annotated[AppMode, pydantic.Field(alias="distMode")]
59
+ dist_mode: Annotated[
60
+ Annotated[AppMode, PlainValidator(validate_open_enum(False))],
61
+ pydantic.Field(alias="distMode"),
62
+ ]
58
63
 
59
64
  group: str
60
65
 
@@ -1,19 +1,22 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
10
13
 
11
- class Role(str, Enum):
14
+ class Role(str, Enum, metaclass=utils.OpenEnumMeta):
12
15
  PRIMARY = "primary"
13
16
  STANDBY = "standby"
14
17
 
15
18
 
16
- class Status(str, Enum):
19
+ class Status(str, Enum, metaclass=utils.OpenEnumMeta):
17
20
  HEALTHY = "healthy"
18
21
  SHUTTING_DOWN = "shutting down"
19
22
  STANDBY = "standby"
@@ -26,8 +29,8 @@ class HealthStatusTypedDict(TypedDict):
26
29
 
27
30
 
28
31
  class HealthStatus(BaseModel):
29
- status: Status
32
+ status: Annotated[Status, PlainValidator(validate_open_enum(False))]
30
33
 
31
34
  start_time: Annotated[float, pydantic.Field(alias="startTime")]
32
35
 
33
- role: Optional[Role] = None
36
+ role: Annotated[Optional[Role], PlainValidator(validate_open_enum(False))] = None
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Any, List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputAppscopeConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputAppscopeMode(str, Enum):
29
+ class InputAppscopeMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputAppscopeCompression(str, Enum):
36
+ class InputAppscopeCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputAppscopePqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputAppscopePq(BaseModel):
67
- mode: Optional[InputAppscopeMode] = InputAppscopeMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputAppscopeMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputAppscopeMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputAppscopePq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputAppscopeCompression] = InputAppscopeCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputAppscopeCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputAppscopeCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -144,7 +151,7 @@ class InputAppscopeFilter(BaseModel):
144
151
  r"""To override the UNIX domain socket or address/port specified in General Settings (while leaving Authentication settings as is), enter a URL."""
145
152
 
146
153
 
147
- class InputAppscopeDataCompressionFormat(str, Enum):
154
+ class InputAppscopeDataCompressionFormat(str, Enum, metaclass=utils.OpenEnumMeta):
148
155
  NONE = "none"
149
156
  GZIP = "gzip"
150
157
 
@@ -176,9 +183,10 @@ class InputAppscopePersistence(BaseModel):
176
183
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
177
184
  r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
178
185
 
179
- compress: Optional[InputAppscopeDataCompressionFormat] = (
180
- InputAppscopeDataCompressionFormat.GZIP
181
- )
186
+ compress: Annotated[
187
+ Optional[InputAppscopeDataCompressionFormat],
188
+ PlainValidator(validate_open_enum(False)),
189
+ ] = InputAppscopeDataCompressionFormat.GZIP
182
190
 
183
191
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
184
192
  "$CRIBL_HOME/state/appscope"
@@ -186,21 +194,21 @@ class InputAppscopePersistence(BaseModel):
186
194
  r"""Path to use to write metrics. Defaults to $CRIBL_HOME/state/appscope"""
187
195
 
188
196
 
189
- class InputAppscopeAuthenticationMethod(str, Enum):
197
+ class InputAppscopeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
190
198
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
191
199
 
192
200
  MANUAL = "manual"
193
201
  SECRET = "secret"
194
202
 
195
203
 
196
- class InputAppscopeMinimumTLSVersion(str, Enum):
204
+ class InputAppscopeMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
197
205
  TL_SV1 = "TLSv1"
198
206
  TL_SV1_1 = "TLSv1.1"
199
207
  TL_SV1_2 = "TLSv1.2"
200
208
  TL_SV1_3 = "TLSv1.3"
201
209
 
202
210
 
203
- class InputAppscopeMaximumTLSVersion(str, Enum):
211
+ class InputAppscopeMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
204
212
  TL_SV1 = "TLSv1"
205
213
  TL_SV1_1 = "TLSv1.1"
206
214
  TL_SV1_2 = "TLSv1.2"
@@ -259,11 +267,19 @@ class InputAppscopeTLSSettingsServerSide(BaseModel):
259
267
  ] = None
260
268
 
261
269
  min_version: Annotated[
262
- Optional[InputAppscopeMinimumTLSVersion], pydantic.Field(alias="minVersion")
270
+ Annotated[
271
+ Optional[InputAppscopeMinimumTLSVersion],
272
+ PlainValidator(validate_open_enum(False)),
273
+ ],
274
+ pydantic.Field(alias="minVersion"),
263
275
  ] = None
264
276
 
265
277
  max_version: Annotated[
266
- Optional[InputAppscopeMaximumTLSVersion], pydantic.Field(alias="maxVersion")
278
+ Annotated[
279
+ Optional[InputAppscopeMaximumTLSVersion],
280
+ PlainValidator(validate_open_enum(False)),
281
+ ],
282
+ pydantic.Field(alias="maxVersion"),
267
283
  ] = None
268
284
 
269
285
 
@@ -410,7 +426,11 @@ class InputAppscope(BaseModel):
410
426
  persistence: Optional[InputAppscopePersistence] = None
411
427
 
412
428
  auth_type: Annotated[
413
- Optional[InputAppscopeAuthenticationMethod], pydantic.Field(alias="authType")
429
+ Annotated[
430
+ Optional[InputAppscopeAuthenticationMethod],
431
+ PlainValidator(validate_open_enum(False)),
432
+ ],
433
+ pydantic.Field(alias="authType"),
414
434
  ] = InputAppscopeAuthenticationMethod.MANUAL
415
435
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
416
436
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputAzureBlobConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputAzureBlobMode(str, Enum):
29
+ class InputAzureBlobMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputAzureBlobCompression(str, Enum):
36
+ class InputAzureBlobCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputAzureBlobPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputAzureBlobPq(BaseModel):
67
- mode: Optional[InputAzureBlobMode] = InputAzureBlobMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputAzureBlobMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputAzureBlobMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputAzureBlobPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputAzureBlobCompression] = InputAzureBlobCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputAzureBlobCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputAzureBlobCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -109,7 +116,7 @@ class InputAzureBlobMetadatum(BaseModel):
109
116
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
110
117
 
111
118
 
112
- class InputAzureBlobAuthenticationMethod(str, Enum):
119
+ class InputAzureBlobAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
113
120
  MANUAL = "manual"
114
121
  SECRET = "secret"
115
122
  CLIENT_SECRET = "clientSecret"
@@ -270,7 +277,11 @@ class InputAzureBlob(BaseModel):
270
277
  r"""The maximum time allowed for downloading a Parquet chunk. Processing will stop if a chunk cannot be downloaded within the time specified."""
271
278
 
272
279
  auth_type: Annotated[
273
- Optional[InputAzureBlobAuthenticationMethod], pydantic.Field(alias="authType")
280
+ Annotated[
281
+ Optional[InputAzureBlobAuthenticationMethod],
282
+ PlainValidator(validate_open_enum(False)),
283
+ ],
284
+ pydantic.Field(alias="authType"),
274
285
  ] = InputAzureBlobAuthenticationMethod.MANUAL
275
286
 
276
287
  description: Optional[str] = None
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputCollectionConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputCollectionMode(str, Enum):
29
+ class InputCollectionMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputCollectionCompression(str, Enum):
36
+ class InputCollectionCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputCollectionPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputCollectionPq(BaseModel):
67
- mode: Optional[InputCollectionMode] = InputCollectionMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputCollectionMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputCollectionMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputCollectionPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputCollectionCompression] = InputCollectionCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputCollectionCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputCollectionCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputConfluentCloudConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputConfluentCloudMode(str, Enum):
29
+ class InputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputConfluentCloudCompression(str, Enum):
36
+ class InputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputConfluentCloudPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputConfluentCloudPq(BaseModel):
67
- mode: Optional[InputConfluentCloudMode] = InputConfluentCloudMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputConfluentCloudMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputConfluentCloudMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,9 +93,10 @@ class InputConfluentCloudPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputConfluentCloudCompression] = (
92
- InputConfluentCloudCompression.NONE
93
- )
96
+ compress: Annotated[
97
+ Optional[InputConfluentCloudCompression],
98
+ PlainValidator(validate_open_enum(False)),
99
+ ] = InputConfluentCloudCompression.NONE
94
100
  r"""Codec to use to compress the persisted data"""
95
101
 
96
102
  pq_controls: Annotated[
@@ -98,14 +104,14 @@ class InputConfluentCloudPq(BaseModel):
98
104
  ] = None
99
105
 
100
106
 
101
- class InputConfluentCloudMinimumTLSVersion(str, Enum):
107
+ class InputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
102
108
  TL_SV1 = "TLSv1"
103
109
  TL_SV1_1 = "TLSv1.1"
104
110
  TL_SV1_2 = "TLSv1.2"
105
111
  TL_SV1_3 = "TLSv1.3"
106
112
 
107
113
 
108
- class InputConfluentCloudMaximumTLSVersion(str, Enum):
114
+ class InputConfluentCloudMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
109
115
  TL_SV1 = "TLSv1"
110
116
  TL_SV1_1 = "TLSv1.1"
111
117
  TL_SV1_2 = "TLSv1.2"
@@ -165,17 +171,23 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
165
171
  r"""Passphrase to use to decrypt private key"""
166
172
 
167
173
  min_version: Annotated[
168
- Optional[InputConfluentCloudMinimumTLSVersion],
174
+ Annotated[
175
+ Optional[InputConfluentCloudMinimumTLSVersion],
176
+ PlainValidator(validate_open_enum(False)),
177
+ ],
169
178
  pydantic.Field(alias="minVersion"),
170
179
  ] = None
171
180
 
172
181
  max_version: Annotated[
173
- Optional[InputConfluentCloudMaximumTLSVersion],
182
+ Annotated[
183
+ Optional[InputConfluentCloudMaximumTLSVersion],
184
+ PlainValidator(validate_open_enum(False)),
185
+ ],
174
186
  pydantic.Field(alias="maxVersion"),
175
187
  ] = None
176
188
 
177
189
 
178
- class InputConfluentCloudSchemaType(str, Enum):
190
+ class InputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
179
191
  r"""The schema format used to encode and decode event data"""
180
192
 
181
193
  AVRO = "avro"
@@ -201,14 +213,18 @@ class InputConfluentCloudAuth(BaseModel):
201
213
  r"""Select or create a secret that references your credentials"""
202
214
 
203
215
 
204
- class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
216
+ class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
217
+ str, Enum, metaclass=utils.OpenEnumMeta
218
+ ):
205
219
  TL_SV1 = "TLSv1"
206
220
  TL_SV1_1 = "TLSv1.1"
207
221
  TL_SV1_2 = "TLSv1.2"
208
222
  TL_SV1_3 = "TLSv1.3"
209
223
 
210
224
 
211
- class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
225
+ class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
226
+ str, Enum, metaclass=utils.OpenEnumMeta
227
+ ):
212
228
  TL_SV1 = "TLSv1"
213
229
  TL_SV1_1 = "TLSv1.1"
214
230
  TL_SV1_2 = "TLSv1.2"
@@ -268,12 +284,18 @@ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
268
284
  r"""Passphrase to use to decrypt private key"""
269
285
 
270
286
  min_version: Annotated[
271
- Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
287
+ Annotated[
288
+ Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
289
+ PlainValidator(validate_open_enum(False)),
290
+ ],
272
291
  pydantic.Field(alias="minVersion"),
273
292
  ] = None
274
293
 
275
294
  max_version: Annotated[
276
- Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
295
+ Annotated[
296
+ Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
297
+ PlainValidator(validate_open_enum(False)),
298
+ ],
277
299
  pydantic.Field(alias="maxVersion"),
278
300
  ] = None
279
301
 
@@ -306,7 +328,11 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
306
328
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
307
329
 
308
330
  schema_type: Annotated[
309
- Optional[InputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
331
+ Annotated[
332
+ Optional[InputConfluentCloudSchemaType],
333
+ PlainValidator(validate_open_enum(False)),
334
+ ],
335
+ pydantic.Field(alias="schemaType"),
310
336
  ] = InputConfluentCloudSchemaType.AVRO
311
337
  r"""The schema format used to encode and decode event data"""
312
338
 
@@ -329,7 +355,7 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
329
355
  tls: Optional[InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
330
356
 
331
357
 
332
- class InputConfluentCloudSASLMechanism(str, Enum):
358
+ class InputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
333
359
  PLAIN = "plain"
334
360
  SCRAM_SHA_256 = "scram-sha-256"
335
361
  SCRAM_SHA_512 = "scram-sha-512"
@@ -350,9 +376,10 @@ class InputConfluentCloudAuthentication(BaseModel):
350
376
 
351
377
  disabled: Optional[bool] = True
352
378
 
353
- mechanism: Optional[InputConfluentCloudSASLMechanism] = (
354
- InputConfluentCloudSASLMechanism.PLAIN
355
- )
379
+ mechanism: Annotated[
380
+ Optional[InputConfluentCloudSASLMechanism],
381
+ PlainValidator(validate_open_enum(False)),
382
+ ] = InputConfluentCloudSASLMechanism.PLAIN
356
383
 
357
384
  oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
358
385
  False
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputCriblConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputCriblMode(str, Enum):
29
+ class InputCriblMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputCriblCompression(str, Enum):
36
+ class InputCriblCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputCriblPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputCriblPq(BaseModel):
67
- mode: Optional[InputCriblMode] = InputCriblMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputCriblMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputCriblMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputCriblPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputCriblCompression] = InputCriblCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputCriblCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputCriblCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[