cribl-control-plane 0.0.48a1__py3-none-any.whl → 0.0.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (165) hide show
  1. cribl_control_plane/_version.py +6 -4
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/httpclient.py +0 -1
  4. cribl_control_plane/models/__init__.py +12 -12
  5. cribl_control_plane/models/appmode.py +13 -0
  6. cribl_control_plane/models/cacheconnection.py +2 -10
  7. cribl_control_plane/models/cacheconnectionbackfillstatus.py +1 -2
  8. cribl_control_plane/models/cloudprovider.py +1 -2
  9. cribl_control_plane/models/configgroup.py +2 -7
  10. cribl_control_plane/models/configgroupcloud.py +2 -6
  11. cribl_control_plane/models/createconfiggroupbyproductop.py +2 -8
  12. cribl_control_plane/models/cribllakedataset.py +2 -8
  13. cribl_control_plane/models/datasetmetadata.py +2 -8
  14. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +2 -7
  15. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +2 -4
  16. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +2 -4
  17. cribl_control_plane/models/getconfiggroupbyproductandidop.py +1 -3
  18. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +2 -7
  19. cribl_control_plane/models/getsummaryop.py +2 -7
  20. cribl_control_plane/models/hbcriblinfo.py +3 -19
  21. cribl_control_plane/models/healthstatus.py +4 -7
  22. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  23. cribl_control_plane/models/inputappscope.py +14 -34
  24. cribl_control_plane/models/inputazureblob.py +6 -17
  25. cribl_control_plane/models/inputcollection.py +4 -11
  26. cribl_control_plane/models/inputconfluentcloud.py +20 -47
  27. cribl_control_plane/models/inputcribl.py +4 -11
  28. cribl_control_plane/models/inputcriblhttp.py +8 -23
  29. cribl_control_plane/models/inputcribllakehttp.py +10 -22
  30. cribl_control_plane/models/inputcriblmetrics.py +4 -12
  31. cribl_control_plane/models/inputcribltcp.py +8 -23
  32. cribl_control_plane/models/inputcrowdstrike.py +10 -26
  33. cribl_control_plane/models/inputdatadogagent.py +8 -24
  34. cribl_control_plane/models/inputdatagen.py +4 -11
  35. cribl_control_plane/models/inputedgeprometheus.py +24 -58
  36. cribl_control_plane/models/inputelastic.py +14 -40
  37. cribl_control_plane/models/inputeventhub.py +6 -15
  38. cribl_control_plane/models/inputexec.py +6 -14
  39. cribl_control_plane/models/inputfile.py +6 -15
  40. cribl_control_plane/models/inputfirehose.py +8 -23
  41. cribl_control_plane/models/inputgooglepubsub.py +6 -19
  42. cribl_control_plane/models/inputgrafana.py +24 -67
  43. cribl_control_plane/models/inputhttp.py +8 -23
  44. cribl_control_plane/models/inputhttpraw.py +8 -23
  45. cribl_control_plane/models/inputjournalfiles.py +4 -12
  46. cribl_control_plane/models/inputkafka.py +16 -46
  47. cribl_control_plane/models/inputkinesis.py +14 -38
  48. cribl_control_plane/models/inputkubeevents.py +4 -11
  49. cribl_control_plane/models/inputkubelogs.py +8 -16
  50. cribl_control_plane/models/inputkubemetrics.py +8 -16
  51. cribl_control_plane/models/inputloki.py +10 -29
  52. cribl_control_plane/models/inputmetrics.py +8 -23
  53. cribl_control_plane/models/inputmodeldriventelemetry.py +10 -32
  54. cribl_control_plane/models/inputmsk.py +18 -53
  55. cribl_control_plane/models/inputnetflow.py +4 -11
  56. cribl_control_plane/models/inputoffice365mgmt.py +14 -33
  57. cribl_control_plane/models/inputoffice365msgtrace.py +16 -35
  58. cribl_control_plane/models/inputoffice365service.py +16 -35
  59. cribl_control_plane/models/inputopentelemetry.py +16 -38
  60. cribl_control_plane/models/inputprometheus.py +18 -50
  61. cribl_control_plane/models/inputprometheusrw.py +10 -30
  62. cribl_control_plane/models/inputrawudp.py +4 -11
  63. cribl_control_plane/models/inputs3.py +8 -21
  64. cribl_control_plane/models/inputs3inventory.py +10 -26
  65. cribl_control_plane/models/inputsecuritylake.py +10 -27
  66. cribl_control_plane/models/inputsnmp.py +6 -16
  67. cribl_control_plane/models/inputsplunk.py +12 -33
  68. cribl_control_plane/models/inputsplunkhec.py +10 -29
  69. cribl_control_plane/models/inputsplunksearch.py +14 -33
  70. cribl_control_plane/models/inputsqs.py +10 -27
  71. cribl_control_plane/models/inputsyslog.py +16 -43
  72. cribl_control_plane/models/inputsystemmetrics.py +24 -48
  73. cribl_control_plane/models/inputsystemstate.py +8 -16
  74. cribl_control_plane/models/inputtcp.py +10 -29
  75. cribl_control_plane/models/inputtcpjson.py +10 -29
  76. cribl_control_plane/models/inputwef.py +14 -37
  77. cribl_control_plane/models/inputwindowsmetrics.py +24 -44
  78. cribl_control_plane/models/inputwineventlogs.py +10 -20
  79. cribl_control_plane/models/inputwiz.py +8 -21
  80. cribl_control_plane/models/inputwizwebhook.py +8 -23
  81. cribl_control_plane/models/inputzscalerhec.py +10 -29
  82. cribl_control_plane/models/lakehouseconnectiontype.py +1 -2
  83. cribl_control_plane/models/listconfiggroupbyproductop.py +1 -3
  84. cribl_control_plane/models/masterworkerentry.py +2 -7
  85. cribl_control_plane/models/nodeactiveupgradestatus.py +1 -2
  86. cribl_control_plane/models/nodefailedupgradestatus.py +1 -2
  87. cribl_control_plane/models/nodeprovidedinfo.py +0 -3
  88. cribl_control_plane/models/nodeskippedupgradestatus.py +1 -2
  89. cribl_control_plane/models/nodeupgradestate.py +1 -2
  90. cribl_control_plane/models/nodeupgradestatus.py +5 -13
  91. cribl_control_plane/models/outputazureblob.py +18 -48
  92. cribl_control_plane/models/outputazuredataexplorer.py +28 -73
  93. cribl_control_plane/models/outputazureeventhub.py +18 -40
  94. cribl_control_plane/models/outputazurelogs.py +12 -35
  95. cribl_control_plane/models/outputclickhouse.py +20 -55
  96. cribl_control_plane/models/outputcloudwatch.py +10 -29
  97. cribl_control_plane/models/outputconfluentcloud.py +32 -77
  98. cribl_control_plane/models/outputcriblhttp.py +16 -44
  99. cribl_control_plane/models/outputcribllake.py +16 -46
  100. cribl_control_plane/models/outputcribltcp.py +18 -45
  101. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +14 -49
  102. cribl_control_plane/models/outputdatadog.py +20 -48
  103. cribl_control_plane/models/outputdataset.py +18 -46
  104. cribl_control_plane/models/outputdiskspool.py +2 -7
  105. cribl_control_plane/models/outputdls3.py +24 -68
  106. cribl_control_plane/models/outputdynatracehttp.py +20 -53
  107. cribl_control_plane/models/outputdynatraceotlp.py +22 -55
  108. cribl_control_plane/models/outputelastic.py +18 -43
  109. cribl_control_plane/models/outputelasticcloud.py +12 -36
  110. cribl_control_plane/models/outputexabeam.py +10 -29
  111. cribl_control_plane/models/outputfilesystem.py +14 -39
  112. cribl_control_plane/models/outputgooglechronicle.py +16 -50
  113. cribl_control_plane/models/outputgooglecloudlogging.py +14 -41
  114. cribl_control_plane/models/outputgooglecloudstorage.py +24 -66
  115. cribl_control_plane/models/outputgooglepubsub.py +10 -31
  116. cribl_control_plane/models/outputgrafanacloud.py +32 -97
  117. cribl_control_plane/models/outputgraphite.py +14 -31
  118. cribl_control_plane/models/outputhoneycomb.py +12 -35
  119. cribl_control_plane/models/outputhumiohec.py +16 -43
  120. cribl_control_plane/models/outputinfluxdb.py +16 -42
  121. cribl_control_plane/models/outputkafka.py +28 -74
  122. cribl_control_plane/models/outputkinesis.py +16 -40
  123. cribl_control_plane/models/outputloki.py +16 -41
  124. cribl_control_plane/models/outputminio.py +24 -65
  125. cribl_control_plane/models/outputmsk.py +30 -82
  126. cribl_control_plane/models/outputnewrelic.py +18 -43
  127. cribl_control_plane/models/outputnewrelicevents.py +14 -41
  128. cribl_control_plane/models/outputopentelemetry.py +26 -67
  129. cribl_control_plane/models/outputprometheus.py +12 -35
  130. cribl_control_plane/models/outputring.py +8 -19
  131. cribl_control_plane/models/outputs3.py +26 -68
  132. cribl_control_plane/models/outputsecuritylake.py +18 -52
  133. cribl_control_plane/models/outputsentinel.py +18 -45
  134. cribl_control_plane/models/outputsentineloneaisiem.py +18 -50
  135. cribl_control_plane/models/outputservicenow.py +24 -60
  136. cribl_control_plane/models/outputsignalfx.py +14 -37
  137. cribl_control_plane/models/outputsns.py +14 -36
  138. cribl_control_plane/models/outputsplunk.py +24 -60
  139. cribl_control_plane/models/outputsplunkhec.py +12 -35
  140. cribl_control_plane/models/outputsplunklb.py +30 -77
  141. cribl_control_plane/models/outputsqs.py +16 -41
  142. cribl_control_plane/models/outputstatsd.py +14 -30
  143. cribl_control_plane/models/outputstatsdext.py +12 -29
  144. cribl_control_plane/models/outputsumologic.py +12 -35
  145. cribl_control_plane/models/outputsyslog.py +24 -58
  146. cribl_control_plane/models/outputtcpjson.py +20 -52
  147. cribl_control_plane/models/outputwavefront.py +12 -35
  148. cribl_control_plane/models/outputwebhook.py +22 -58
  149. cribl_control_plane/models/outputxsiam.py +14 -35
  150. cribl_control_plane/models/productscore.py +1 -2
  151. cribl_control_plane/models/rbacresource.py +1 -2
  152. cribl_control_plane/models/resourcepolicy.py +2 -4
  153. cribl_control_plane/models/routecloneconf.py +13 -0
  154. cribl_control_plane/models/routeconf.py +4 -3
  155. cribl_control_plane/models/runnablejobcollection.py +13 -30
  156. cribl_control_plane/models/runnablejobexecutor.py +4 -13
  157. cribl_control_plane/models/runnablejobscheduledsearch.py +2 -7
  158. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +2 -8
  159. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +2 -8
  160. cribl_control_plane/models/workertypes.py +1 -2
  161. cribl_control_plane/sdk.py +2 -2
  162. cribl_control_plane/utils/annotations.py +32 -8
  163. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.50.dist-info}/METADATA +2 -1
  164. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.50.dist-info}/RECORD +165 -163
  165. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.50.dist-info}/WHEEL +0 -0
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputWizWebhookConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputWizWebhookMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputWizWebhookMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputWizWebhookCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputWizWebhookCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputWizWebhookPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputWizWebhookPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputWizWebhookMode], PlainValidator(validate_open_enum(False))
72
- ] = InputWizWebhookMode.ALWAYS
67
+ mode: Optional[InputWizWebhookMode] = InputWizWebhookMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputWizWebhookPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputWizWebhookCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputWizWebhookCompression.NONE
91
+ compress: Optional[InputWizWebhookCompression] = InputWizWebhookCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,14 +96,14 @@ class InputWizWebhookPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputWizWebhookMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputWizWebhookMinimumTLSVersion(str, Enum):
107
100
  TL_SV1 = "TLSv1"
108
101
  TL_SV1_1 = "TLSv1.1"
109
102
  TL_SV1_2 = "TLSv1.2"
110
103
  TL_SV1_3 = "TLSv1.3"
111
104
 
112
105
 
113
- class InputWizWebhookMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
106
+ class InputWizWebhookMaximumTLSVersion(str, Enum):
114
107
  TL_SV1 = "TLSv1"
115
108
  TL_SV1_1 = "TLSv1.1"
116
109
  TL_SV1_2 = "TLSv1.2"
@@ -169,19 +162,11 @@ class InputWizWebhookTLSSettingsServerSide(BaseModel):
169
162
  ] = None
170
163
 
171
164
  min_version: Annotated[
172
- Annotated[
173
- Optional[InputWizWebhookMinimumTLSVersion],
174
- PlainValidator(validate_open_enum(False)),
175
- ],
176
- pydantic.Field(alias="minVersion"),
165
+ Optional[InputWizWebhookMinimumTLSVersion], pydantic.Field(alias="minVersion")
177
166
  ] = None
178
167
 
179
168
  max_version: Annotated[
180
- Annotated[
181
- Optional[InputWizWebhookMaximumTLSVersion],
182
- PlainValidator(validate_open_enum(False)),
183
- ],
184
- pydantic.Field(alias="maxVersion"),
169
+ Optional[InputWizWebhookMaximumTLSVersion], pydantic.Field(alias="maxVersion")
185
170
  ] = None
186
171
 
187
172
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputZscalerHecConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputZscalerHecMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputZscalerHecMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputZscalerHecCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputZscalerHecCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputZscalerHecPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputZscalerHecPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputZscalerHecMode], PlainValidator(validate_open_enum(False))
72
- ] = InputZscalerHecMode.ALWAYS
67
+ mode: Optional[InputZscalerHecMode] = InputZscalerHecMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputZscalerHecPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputZscalerHecCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputZscalerHecCompression.NONE
91
+ compress: Optional[InputZscalerHecCompression] = InputZscalerHecCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,7 +96,7 @@ class InputZscalerHecPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputZscalerHecAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputZscalerHecAuthenticationMethod(str, Enum):
107
100
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
108
101
 
109
102
  MANUAL = "manual"
@@ -140,11 +133,7 @@ class InputZscalerHecAuthToken(BaseModel):
140
133
  token: Any
141
134
 
142
135
  auth_type: Annotated[
143
- Annotated[
144
- Optional[InputZscalerHecAuthenticationMethod],
145
- PlainValidator(validate_open_enum(False)),
146
- ],
147
- pydantic.Field(alias="authType"),
136
+ Optional[InputZscalerHecAuthenticationMethod], pydantic.Field(alias="authType")
148
137
  ] = InputZscalerHecAuthenticationMethod.MANUAL
149
138
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
150
139
 
@@ -163,14 +152,14 @@ class InputZscalerHecAuthToken(BaseModel):
163
152
  r"""Fields to add to events referencing this token"""
164
153
 
165
154
 
166
- class InputZscalerHecMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
155
+ class InputZscalerHecMinimumTLSVersion(str, Enum):
167
156
  TL_SV1 = "TLSv1"
168
157
  TL_SV1_1 = "TLSv1.1"
169
158
  TL_SV1_2 = "TLSv1.2"
170
159
  TL_SV1_3 = "TLSv1.3"
171
160
 
172
161
 
173
- class InputZscalerHecMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
162
+ class InputZscalerHecMaximumTLSVersion(str, Enum):
174
163
  TL_SV1 = "TLSv1"
175
164
  TL_SV1_1 = "TLSv1.1"
176
165
  TL_SV1_2 = "TLSv1.2"
@@ -229,19 +218,11 @@ class InputZscalerHecTLSSettingsServerSide(BaseModel):
229
218
  ] = None
230
219
 
231
220
  min_version: Annotated[
232
- Annotated[
233
- Optional[InputZscalerHecMinimumTLSVersion],
234
- PlainValidator(validate_open_enum(False)),
235
- ],
236
- pydantic.Field(alias="minVersion"),
221
+ Optional[InputZscalerHecMinimumTLSVersion], pydantic.Field(alias="minVersion")
237
222
  ] = None
238
223
 
239
224
  max_version: Annotated[
240
- Annotated[
241
- Optional[InputZscalerHecMaximumTLSVersion],
242
- PlainValidator(validate_open_enum(False)),
243
- ],
244
- pydantic.Field(alias="maxVersion"),
225
+ Optional[InputZscalerHecMaximumTLSVersion], pydantic.Field(alias="maxVersion")
245
226
  ] = None
246
227
 
247
228
 
@@ -1,10 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from enum import Enum
6
5
 
7
6
 
8
- class LakehouseConnectionType(str, Enum, metaclass=utils.OpenEnumMeta):
7
+ class LakehouseConnectionType(str, Enum):
9
8
  CACHE = "cache"
10
9
  ZERO_POINT = "zeroPoint"
@@ -8,9 +8,7 @@ from cribl_control_plane.utils import (
8
8
  FieldMetadata,
9
9
  PathParamMetadata,
10
10
  QueryParamMetadata,
11
- validate_open_enum,
12
11
  )
13
- from pydantic.functional_validators import PlainValidator
14
12
  from typing import List, Optional
15
13
  from typing_extensions import Annotated, NotRequired, TypedDict
16
14
 
@@ -24,7 +22,7 @@ class ListConfigGroupByProductRequestTypedDict(TypedDict):
24
22
 
25
23
  class ListConfigGroupByProductRequest(BaseModel):
26
24
  product: Annotated[
27
- Annotated[ProductsCore, PlainValidator(validate_open_enum(False))],
25
+ ProductsCore,
28
26
  FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
29
27
  ]
30
28
  r"""Name of the Cribl product to get the Worker Groups or Edge Fleets for."""
@@ -4,12 +4,9 @@ from __future__ import annotations
4
4
  from .heartbeatmetadata import HeartbeatMetadata, HeartbeatMetadataTypedDict
5
5
  from .nodeprovidedinfo import NodeProvidedInfo, NodeProvidedInfoTypedDict
6
6
  from .nodeupgradestatus import NodeUpgradeStatus, NodeUpgradeStatusTypedDict
7
- from cribl_control_plane import utils
8
7
  from cribl_control_plane.types import BaseModel
9
- from cribl_control_plane.utils import validate_open_enum
10
8
  from enum import Enum
11
9
  import pydantic
12
- from pydantic.functional_validators import PlainValidator
13
10
  from typing import Optional
14
11
  from typing_extensions import Annotated, NotRequired, TypedDict
15
12
 
@@ -22,7 +19,7 @@ class LastMetrics(BaseModel):
22
19
  pass
23
20
 
24
21
 
25
- class MasterWorkerEntryType(str, Enum, metaclass=utils.OpenEnumMeta):
22
+ class MasterWorkerEntryType(str, Enum):
26
23
  INFO = "info"
27
24
  REQ = "req"
28
25
  RESP = "resp"
@@ -82,8 +79,6 @@ class MasterWorkerEntry(BaseModel):
82
79
 
83
80
  status: Optional[str] = None
84
81
 
85
- type: Annotated[
86
- Optional[MasterWorkerEntryType], PlainValidator(validate_open_enum(False))
87
- ] = None
82
+ type: Optional[MasterWorkerEntryType] = None
88
83
 
89
84
  workers: Optional[MasterWorkerEntryWorkers] = None
@@ -1,11 +1,10 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from enum import Enum
6
5
 
7
6
 
8
- class NodeActiveUpgradeStatus(int, Enum, metaclass=utils.OpenEnumMeta):
7
+ class NodeActiveUpgradeStatus(int, Enum):
9
8
  ZERO = 0
10
9
  ONE = 1
11
10
  TWO = 2
@@ -1,10 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from enum import Enum
6
5
 
7
6
 
8
- class NodeFailedUpgradeStatus(int, Enum, metaclass=utils.OpenEnumMeta):
7
+ class NodeFailedUpgradeStatus(int, Enum):
9
8
  ZERO = 0
10
9
  ONE = 1
@@ -19,7 +19,6 @@ class NodeProvidedInfoTags(BaseModel):
19
19
 
20
20
  class NodeProvidedInfoAwsTypedDict(TypedDict):
21
21
  enabled: bool
22
- instance_id: str
23
22
  region: str
24
23
  type: str
25
24
  zone: str
@@ -29,8 +28,6 @@ class NodeProvidedInfoAwsTypedDict(TypedDict):
29
28
  class NodeProvidedInfoAws(BaseModel):
30
29
  enabled: bool
31
30
 
32
- instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
33
-
34
31
  region: str
35
32
 
36
33
  type: str
@@ -1,11 +1,10 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from enum import Enum
6
5
 
7
6
 
8
- class NodeSkippedUpgradeStatus(int, Enum, metaclass=utils.OpenEnumMeta):
7
+ class NodeSkippedUpgradeStatus(int, Enum):
9
8
  ZERO = 0
10
9
  ONE = 1
11
10
  TWO = 2
@@ -1,11 +1,10 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from enum import Enum
6
5
 
7
6
 
8
- class NodeUpgradeState(int, Enum, metaclass=utils.OpenEnumMeta):
7
+ class NodeUpgradeState(int, Enum):
9
8
  ZERO = 0
10
9
  ONE = 1
11
10
  TWO = 2
@@ -6,10 +6,8 @@ from .nodefailedupgradestatus import NodeFailedUpgradeStatus
6
6
  from .nodeskippedupgradestatus import NodeSkippedUpgradeStatus
7
7
  from .nodeupgradestate import NodeUpgradeState
8
8
  from cribl_control_plane.types import BaseModel
9
- from cribl_control_plane.utils import validate_open_enum
10
- from pydantic.functional_validators import PlainValidator
11
9
  from typing import Optional
12
- from typing_extensions import Annotated, NotRequired, TypedDict
10
+ from typing_extensions import NotRequired, TypedDict
13
11
 
14
12
 
15
13
  class NodeUpgradeStatusTypedDict(TypedDict):
@@ -21,18 +19,12 @@ class NodeUpgradeStatusTypedDict(TypedDict):
21
19
 
22
20
 
23
21
  class NodeUpgradeStatus(BaseModel):
24
- state: Annotated[NodeUpgradeState, PlainValidator(validate_open_enum(True))]
22
+ state: NodeUpgradeState
25
23
 
26
24
  timestamp: float
27
25
 
28
- active: Annotated[
29
- Optional[NodeActiveUpgradeStatus], PlainValidator(validate_open_enum(True))
30
- ] = None
26
+ active: Optional[NodeActiveUpgradeStatus] = None
31
27
 
32
- failed: Annotated[
33
- Optional[NodeFailedUpgradeStatus], PlainValidator(validate_open_enum(True))
34
- ] = None
28
+ failed: Optional[NodeFailedUpgradeStatus] = None
35
29
 
36
- skipped: Annotated[
37
- Optional[NodeSkippedUpgradeStatus], PlainValidator(validate_open_enum(True))
38
- ] = None
30
+ skipped: Optional[NodeSkippedUpgradeStatus] = None
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -15,7 +12,7 @@ class OutputAzureBlobType(str, Enum):
15
12
  AZURE_BLOB = "azure_blob"
16
13
 
17
14
 
18
- class OutputAzureBlobDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputAzureBlobDataFormat(str, Enum):
19
16
  r"""Format of the output data"""
20
17
 
21
18
  JSON = "json"
@@ -23,28 +20,28 @@ class OutputAzureBlobDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
23
20
  PARQUET = "parquet"
24
21
 
25
22
 
26
- class OutputAzureBlobBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
23
+ class OutputAzureBlobBackpressureBehavior(str, Enum):
27
24
  r"""How to handle events when all receivers are exerting backpressure"""
28
25
 
29
26
  BLOCK = "block"
30
27
  DROP = "drop"
31
28
 
32
29
 
33
- class OutputAzureBlobDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ class OutputAzureBlobDiskSpaceProtection(str, Enum):
34
31
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
35
32
 
36
33
  BLOCK = "block"
37
34
  DROP = "drop"
38
35
 
39
36
 
40
- class OutputAzureBlobAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ class OutputAzureBlobAuthenticationMethod(str, Enum):
41
38
  MANUAL = "manual"
42
39
  SECRET = "secret"
43
40
  CLIENT_SECRET = "clientSecret"
44
41
  CLIENT_CERT = "clientCert"
45
42
 
46
43
 
47
- class BlobAccessTier(str, Enum, metaclass=utils.OpenEnumMeta):
44
+ class BlobAccessTier(str, Enum):
48
45
  INFERRED = "Inferred"
49
46
  HOT = "Hot"
50
47
  COOL = "Cool"
@@ -52,14 +49,14 @@ class BlobAccessTier(str, Enum, metaclass=utils.OpenEnumMeta):
52
49
  ARCHIVE = "Archive"
53
50
 
54
51
 
55
- class OutputAzureBlobCompression(str, Enum, metaclass=utils.OpenEnumMeta):
52
+ class OutputAzureBlobCompression(str, Enum):
56
53
  r"""Data compression format to apply to HTTP content before it is delivered"""
57
54
 
58
55
  NONE = "none"
59
56
  GZIP = "gzip"
60
57
 
61
58
 
62
- class OutputAzureBlobCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
59
+ class OutputAzureBlobCompressionLevel(str, Enum):
63
60
  r"""Compression level to apply before moving files to final destination"""
64
61
 
65
62
  BEST_SPEED = "best_speed"
@@ -67,7 +64,7 @@ class OutputAzureBlobCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
67
64
  BEST_COMPRESSION = "best_compression"
68
65
 
69
66
 
70
- class OutputAzureBlobParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
67
+ class OutputAzureBlobParquetVersion(str, Enum):
71
68
  r"""Determines which data types are supported and how they are represented"""
72
69
 
73
70
  PARQUET_1_0 = "PARQUET_1_0"
@@ -75,7 +72,7 @@ class OutputAzureBlobParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
75
72
  PARQUET_2_6 = "PARQUET_2_6"
76
73
 
77
74
 
78
- class OutputAzureBlobDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
75
+ class OutputAzureBlobDataPageVersion(str, Enum):
79
76
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
80
77
 
81
78
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -264,11 +261,7 @@ class OutputAzureBlob(BaseModel):
264
261
  r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
265
262
 
266
263
  format_: Annotated[
267
- Annotated[
268
- Optional[OutputAzureBlobDataFormat],
269
- PlainValidator(validate_open_enum(False)),
270
- ],
271
- pydantic.Field(alias="format"),
264
+ Optional[OutputAzureBlobDataFormat], pydantic.Field(alias="format")
272
265
  ] = OutputAzureBlobDataFormat.JSON
273
266
  r"""Format of the output data"""
274
267
 
@@ -311,10 +304,7 @@ class OutputAzureBlob(BaseModel):
311
304
  r"""Buffer size used to write to a file"""
312
305
 
313
306
  on_backpressure: Annotated[
314
- Annotated[
315
- Optional[OutputAzureBlobBackpressureBehavior],
316
- PlainValidator(validate_open_enum(False)),
317
- ],
307
+ Optional[OutputAzureBlobBackpressureBehavior],
318
308
  pydantic.Field(alias="onBackpressure"),
319
309
  ] = OutputAzureBlobBackpressureBehavior.BLOCK
320
310
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -325,39 +315,26 @@ class OutputAzureBlob(BaseModel):
325
315
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
326
316
 
327
317
  on_disk_full_backpressure: Annotated[
328
- Annotated[
329
- Optional[OutputAzureBlobDiskSpaceProtection],
330
- PlainValidator(validate_open_enum(False)),
331
- ],
318
+ Optional[OutputAzureBlobDiskSpaceProtection],
332
319
  pydantic.Field(alias="onDiskFullBackpressure"),
333
320
  ] = OutputAzureBlobDiskSpaceProtection.BLOCK
334
321
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
335
322
 
336
323
  auth_type: Annotated[
337
- Annotated[
338
- Optional[OutputAzureBlobAuthenticationMethod],
339
- PlainValidator(validate_open_enum(False)),
340
- ],
341
- pydantic.Field(alias="authType"),
324
+ Optional[OutputAzureBlobAuthenticationMethod], pydantic.Field(alias="authType")
342
325
  ] = OutputAzureBlobAuthenticationMethod.MANUAL
343
326
 
344
327
  storage_class: Annotated[
345
- Annotated[Optional[BlobAccessTier], PlainValidator(validate_open_enum(False))],
346
- pydantic.Field(alias="storageClass"),
328
+ Optional[BlobAccessTier], pydantic.Field(alias="storageClass")
347
329
  ] = BlobAccessTier.INFERRED
348
330
 
349
331
  description: Optional[str] = None
350
332
 
351
- compress: Annotated[
352
- Optional[OutputAzureBlobCompression], PlainValidator(validate_open_enum(False))
353
- ] = OutputAzureBlobCompression.GZIP
333
+ compress: Optional[OutputAzureBlobCompression] = OutputAzureBlobCompression.GZIP
354
334
  r"""Data compression format to apply to HTTP content before it is delivered"""
355
335
 
356
336
  compression_level: Annotated[
357
- Annotated[
358
- Optional[OutputAzureBlobCompressionLevel],
359
- PlainValidator(validate_open_enum(False)),
360
- ],
337
+ Optional[OutputAzureBlobCompressionLevel],
361
338
  pydantic.Field(alias="compressionLevel"),
362
339
  ] = OutputAzureBlobCompressionLevel.BEST_SPEED
363
340
  r"""Compression level to apply before moving files to final destination"""
@@ -368,19 +345,12 @@ class OutputAzureBlob(BaseModel):
368
345
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
369
346
 
370
347
  parquet_version: Annotated[
371
- Annotated[
372
- Optional[OutputAzureBlobParquetVersion],
373
- PlainValidator(validate_open_enum(False)),
374
- ],
375
- pydantic.Field(alias="parquetVersion"),
348
+ Optional[OutputAzureBlobParquetVersion], pydantic.Field(alias="parquetVersion")
376
349
  ] = OutputAzureBlobParquetVersion.PARQUET_2_6
377
350
  r"""Determines which data types are supported and how they are represented"""
378
351
 
379
352
  parquet_data_page_version: Annotated[
380
- Annotated[
381
- Optional[OutputAzureBlobDataPageVersion],
382
- PlainValidator(validate_open_enum(False)),
383
- ],
353
+ Optional[OutputAzureBlobDataPageVersion],
384
354
  pydantic.Field(alias="parquetDataPageVersion"),
385
355
  ] = OutputAzureBlobDataPageVersion.DATA_PAGE_V2
386
356
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""