cribl-control-plane 0.0.47__py3-none-any.whl → 0.0.48a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (162) hide show
  1. cribl_control_plane/_version.py +3 -5
  2. cribl_control_plane/errors/healthstatus_error.py +8 -2
  3. cribl_control_plane/models/__init__.py +12 -12
  4. cribl_control_plane/models/cacheconnection.py +10 -2
  5. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  6. cribl_control_plane/models/cloudprovider.py +2 -1
  7. cribl_control_plane/models/configgroup.py +7 -2
  8. cribl_control_plane/models/configgroupcloud.py +6 -2
  9. cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
  10. cribl_control_plane/models/cribllakedataset.py +8 -2
  11. cribl_control_plane/models/datasetmetadata.py +8 -2
  12. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
  13. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
  14. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
  15. cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
  16. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
  17. cribl_control_plane/models/getsummaryop.py +7 -2
  18. cribl_control_plane/models/hbcriblinfo.py +19 -3
  19. cribl_control_plane/models/healthstatus.py +7 -4
  20. cribl_control_plane/models/heartbeatmetadata.py +3 -0
  21. cribl_control_plane/models/inputappscope.py +34 -14
  22. cribl_control_plane/models/inputazureblob.py +17 -6
  23. cribl_control_plane/models/inputcollection.py +11 -4
  24. cribl_control_plane/models/inputconfluentcloud.py +47 -20
  25. cribl_control_plane/models/inputcribl.py +11 -4
  26. cribl_control_plane/models/inputcriblhttp.py +23 -8
  27. cribl_control_plane/models/inputcribllakehttp.py +22 -10
  28. cribl_control_plane/models/inputcriblmetrics.py +12 -4
  29. cribl_control_plane/models/inputcribltcp.py +23 -8
  30. cribl_control_plane/models/inputcrowdstrike.py +26 -10
  31. cribl_control_plane/models/inputdatadogagent.py +24 -8
  32. cribl_control_plane/models/inputdatagen.py +11 -4
  33. cribl_control_plane/models/inputedgeprometheus.py +58 -24
  34. cribl_control_plane/models/inputelastic.py +40 -14
  35. cribl_control_plane/models/inputeventhub.py +15 -6
  36. cribl_control_plane/models/inputexec.py +14 -6
  37. cribl_control_plane/models/inputfile.py +15 -6
  38. cribl_control_plane/models/inputfirehose.py +23 -8
  39. cribl_control_plane/models/inputgooglepubsub.py +19 -6
  40. cribl_control_plane/models/inputgrafana.py +67 -24
  41. cribl_control_plane/models/inputhttp.py +23 -8
  42. cribl_control_plane/models/inputhttpraw.py +23 -8
  43. cribl_control_plane/models/inputjournalfiles.py +12 -4
  44. cribl_control_plane/models/inputkafka.py +46 -16
  45. cribl_control_plane/models/inputkinesis.py +38 -14
  46. cribl_control_plane/models/inputkubeevents.py +11 -4
  47. cribl_control_plane/models/inputkubelogs.py +16 -8
  48. cribl_control_plane/models/inputkubemetrics.py +16 -8
  49. cribl_control_plane/models/inputloki.py +29 -10
  50. cribl_control_plane/models/inputmetrics.py +23 -8
  51. cribl_control_plane/models/inputmodeldriventelemetry.py +32 -10
  52. cribl_control_plane/models/inputmsk.py +53 -18
  53. cribl_control_plane/models/inputnetflow.py +11 -4
  54. cribl_control_plane/models/inputoffice365mgmt.py +33 -14
  55. cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
  56. cribl_control_plane/models/inputoffice365service.py +35 -16
  57. cribl_control_plane/models/inputopentelemetry.py +38 -16
  58. cribl_control_plane/models/inputprometheus.py +50 -18
  59. cribl_control_plane/models/inputprometheusrw.py +30 -10
  60. cribl_control_plane/models/inputrawudp.py +11 -4
  61. cribl_control_plane/models/inputs3.py +21 -8
  62. cribl_control_plane/models/inputs3inventory.py +26 -10
  63. cribl_control_plane/models/inputsecuritylake.py +27 -10
  64. cribl_control_plane/models/inputsnmp.py +16 -6
  65. cribl_control_plane/models/inputsplunk.py +33 -12
  66. cribl_control_plane/models/inputsplunkhec.py +29 -10
  67. cribl_control_plane/models/inputsplunksearch.py +33 -14
  68. cribl_control_plane/models/inputsqs.py +27 -10
  69. cribl_control_plane/models/inputsyslog.py +43 -16
  70. cribl_control_plane/models/inputsystemmetrics.py +48 -24
  71. cribl_control_plane/models/inputsystemstate.py +16 -8
  72. cribl_control_plane/models/inputtcp.py +29 -10
  73. cribl_control_plane/models/inputtcpjson.py +29 -10
  74. cribl_control_plane/models/inputwef.py +37 -14
  75. cribl_control_plane/models/inputwindowsmetrics.py +44 -24
  76. cribl_control_plane/models/inputwineventlogs.py +20 -10
  77. cribl_control_plane/models/inputwiz.py +21 -8
  78. cribl_control_plane/models/inputwizwebhook.py +23 -8
  79. cribl_control_plane/models/inputzscalerhec.py +29 -10
  80. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  81. cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
  82. cribl_control_plane/models/masterworkerentry.py +7 -2
  83. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  84. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  85. cribl_control_plane/models/nodeprovidedinfo.py +3 -0
  86. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  87. cribl_control_plane/models/nodeupgradestate.py +2 -1
  88. cribl_control_plane/models/nodeupgradestatus.py +13 -5
  89. cribl_control_plane/models/outputazureblob.py +48 -18
  90. cribl_control_plane/models/outputazuredataexplorer.py +73 -28
  91. cribl_control_plane/models/outputazureeventhub.py +40 -18
  92. cribl_control_plane/models/outputazurelogs.py +35 -12
  93. cribl_control_plane/models/outputclickhouse.py +55 -20
  94. cribl_control_plane/models/outputcloudwatch.py +29 -10
  95. cribl_control_plane/models/outputconfluentcloud.py +77 -32
  96. cribl_control_plane/models/outputcriblhttp.py +44 -16
  97. cribl_control_plane/models/outputcribllake.py +46 -16
  98. cribl_control_plane/models/outputcribltcp.py +45 -18
  99. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +49 -14
  100. cribl_control_plane/models/outputdatadog.py +48 -20
  101. cribl_control_plane/models/outputdataset.py +46 -18
  102. cribl_control_plane/models/outputdiskspool.py +7 -2
  103. cribl_control_plane/models/outputdls3.py +68 -24
  104. cribl_control_plane/models/outputdynatracehttp.py +53 -20
  105. cribl_control_plane/models/outputdynatraceotlp.py +55 -22
  106. cribl_control_plane/models/outputelastic.py +43 -18
  107. cribl_control_plane/models/outputelasticcloud.py +36 -12
  108. cribl_control_plane/models/outputexabeam.py +29 -10
  109. cribl_control_plane/models/outputfilesystem.py +39 -14
  110. cribl_control_plane/models/outputgooglechronicle.py +50 -16
  111. cribl_control_plane/models/outputgooglecloudlogging.py +41 -14
  112. cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
  113. cribl_control_plane/models/outputgooglepubsub.py +31 -10
  114. cribl_control_plane/models/outputgrafanacloud.py +97 -32
  115. cribl_control_plane/models/outputgraphite.py +31 -14
  116. cribl_control_plane/models/outputhoneycomb.py +35 -12
  117. cribl_control_plane/models/outputhumiohec.py +43 -16
  118. cribl_control_plane/models/outputinfluxdb.py +42 -16
  119. cribl_control_plane/models/outputkafka.py +74 -28
  120. cribl_control_plane/models/outputkinesis.py +40 -16
  121. cribl_control_plane/models/outputloki.py +41 -16
  122. cribl_control_plane/models/outputminio.py +65 -24
  123. cribl_control_plane/models/outputmsk.py +82 -30
  124. cribl_control_plane/models/outputnewrelic.py +43 -18
  125. cribl_control_plane/models/outputnewrelicevents.py +41 -14
  126. cribl_control_plane/models/outputopentelemetry.py +67 -26
  127. cribl_control_plane/models/outputprometheus.py +35 -12
  128. cribl_control_plane/models/outputring.py +19 -8
  129. cribl_control_plane/models/outputs3.py +68 -26
  130. cribl_control_plane/models/outputsecuritylake.py +52 -18
  131. cribl_control_plane/models/outputsentinel.py +45 -18
  132. cribl_control_plane/models/outputsentineloneaisiem.py +50 -18
  133. cribl_control_plane/models/outputservicenow.py +60 -24
  134. cribl_control_plane/models/outputsignalfx.py +37 -14
  135. cribl_control_plane/models/outputsns.py +36 -14
  136. cribl_control_plane/models/outputsplunk.py +60 -24
  137. cribl_control_plane/models/outputsplunkhec.py +35 -12
  138. cribl_control_plane/models/outputsplunklb.py +77 -30
  139. cribl_control_plane/models/outputsqs.py +41 -16
  140. cribl_control_plane/models/outputstatsd.py +30 -14
  141. cribl_control_plane/models/outputstatsdext.py +29 -12
  142. cribl_control_plane/models/outputsumologic.py +35 -12
  143. cribl_control_plane/models/outputsyslog.py +58 -24
  144. cribl_control_plane/models/outputtcpjson.py +52 -20
  145. cribl_control_plane/models/outputwavefront.py +35 -12
  146. cribl_control_plane/models/outputwebhook.py +58 -22
  147. cribl_control_plane/models/outputxsiam.py +35 -14
  148. cribl_control_plane/models/productscore.py +2 -1
  149. cribl_control_plane/models/rbacresource.py +2 -1
  150. cribl_control_plane/models/resourcepolicy.py +4 -2
  151. cribl_control_plane/models/routeconf.py +3 -4
  152. cribl_control_plane/models/runnablejobcollection.py +30 -13
  153. cribl_control_plane/models/runnablejobexecutor.py +13 -4
  154. cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
  155. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
  156. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
  157. cribl_control_plane/models/workertypes.py +2 -1
  158. {cribl_control_plane-0.0.47.dist-info → cribl_control_plane-0.0.48a1.dist-info}/METADATA +1 -1
  159. {cribl_control_plane-0.0.47.dist-info → cribl_control_plane-0.0.48a1.dist-info}/RECORD +160 -162
  160. {cribl_control_plane-0.0.47.dist-info → cribl_control_plane-0.0.48a1.dist-info}/WHEEL +1 -1
  161. cribl_control_plane/models/appmode.py +0 -13
  162. cribl_control_plane/models/routecloneconf.py +0 -13
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Any, List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputOpenTelemetryConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputOpenTelemetryMode(str, Enum):
29
+ class InputOpenTelemetryMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputOpenTelemetryCompression(str, Enum):
36
+ class InputOpenTelemetryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputOpenTelemetryPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputOpenTelemetryPq(BaseModel):
67
- mode: Optional[InputOpenTelemetryMode] = InputOpenTelemetryMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputOpenTelemetryMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputOpenTelemetryMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,9 +93,10 @@ class InputOpenTelemetryPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputOpenTelemetryCompression] = (
92
- InputOpenTelemetryCompression.NONE
93
- )
96
+ compress: Annotated[
97
+ Optional[InputOpenTelemetryCompression],
98
+ PlainValidator(validate_open_enum(False)),
99
+ ] = InputOpenTelemetryCompression.NONE
94
100
  r"""Codec to use to compress the persisted data"""
95
101
 
96
102
  pq_controls: Annotated[
@@ -98,14 +104,14 @@ class InputOpenTelemetryPq(BaseModel):
98
104
  ] = None
99
105
 
100
106
 
101
- class InputOpenTelemetryMinimumTLSVersion(str, Enum):
107
+ class InputOpenTelemetryMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
102
108
  TL_SV1 = "TLSv1"
103
109
  TL_SV1_1 = "TLSv1.1"
104
110
  TL_SV1_2 = "TLSv1.2"
105
111
  TL_SV1_3 = "TLSv1.3"
106
112
 
107
113
 
108
- class InputOpenTelemetryMaximumTLSVersion(str, Enum):
114
+ class InputOpenTelemetryMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
109
115
  TL_SV1 = "TLSv1"
110
116
  TL_SV1_1 = "TLSv1.1"
111
117
  TL_SV1_2 = "TLSv1.2"
@@ -164,31 +170,37 @@ class InputOpenTelemetryTLSSettingsServerSide(BaseModel):
164
170
  ] = None
165
171
 
166
172
  min_version: Annotated[
167
- Optional[InputOpenTelemetryMinimumTLSVersion],
173
+ Annotated[
174
+ Optional[InputOpenTelemetryMinimumTLSVersion],
175
+ PlainValidator(validate_open_enum(False)),
176
+ ],
168
177
  pydantic.Field(alias="minVersion"),
169
178
  ] = None
170
179
 
171
180
  max_version: Annotated[
172
- Optional[InputOpenTelemetryMaximumTLSVersion],
181
+ Annotated[
182
+ Optional[InputOpenTelemetryMaximumTLSVersion],
183
+ PlainValidator(validate_open_enum(False)),
184
+ ],
173
185
  pydantic.Field(alias="maxVersion"),
174
186
  ] = None
175
187
 
176
188
 
177
- class InputOpenTelemetryProtocol(str, Enum):
189
+ class InputOpenTelemetryProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
178
190
  r"""Select whether to leverage gRPC or HTTP for OpenTelemetry"""
179
191
 
180
192
  GRPC = "grpc"
181
193
  HTTP = "http"
182
194
 
183
195
 
184
- class InputOpenTelemetryOTLPVersion(str, Enum):
196
+ class InputOpenTelemetryOTLPVersion(str, Enum, metaclass=utils.OpenEnumMeta):
185
197
  r"""The version of OTLP Protobuf definitions to use when interpreting received data"""
186
198
 
187
199
  ZERO_DOT_10_DOT_0 = "0.10.0"
188
200
  ONE_DOT_3_DOT_1 = "1.3.1"
189
201
 
190
202
 
191
- class InputOpenTelemetryAuthenticationType(str, Enum):
203
+ class InputOpenTelemetryAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
192
204
  r"""OpenTelemetry authentication type"""
193
205
 
194
206
  NONE = "none"
@@ -417,7 +429,9 @@ class InputOpenTelemetry(BaseModel):
417
429
  ] = "/^$/"
418
430
  r"""Messages from matched IP addresses will be ignored. This takes precedence over the allowlist."""
419
431
 
420
- protocol: Optional[InputOpenTelemetryProtocol] = InputOpenTelemetryProtocol.GRPC
432
+ protocol: Annotated[
433
+ Optional[InputOpenTelemetryProtocol], PlainValidator(validate_open_enum(False))
434
+ ] = InputOpenTelemetryProtocol.GRPC
421
435
  r"""Select whether to leverage gRPC or HTTP for OpenTelemetry"""
422
436
 
423
437
  extract_spans: Annotated[Optional[bool], pydantic.Field(alias="extractSpans")] = (
@@ -431,12 +445,20 @@ class InputOpenTelemetry(BaseModel):
431
445
  r"""Enable to extract each incoming Gauge or IntGauge metric to multiple events, one per data point"""
432
446
 
433
447
  otlp_version: Annotated[
434
- Optional[InputOpenTelemetryOTLPVersion], pydantic.Field(alias="otlpVersion")
448
+ Annotated[
449
+ Optional[InputOpenTelemetryOTLPVersion],
450
+ PlainValidator(validate_open_enum(False)),
451
+ ],
452
+ pydantic.Field(alias="otlpVersion"),
435
453
  ] = InputOpenTelemetryOTLPVersion.ZERO_DOT_10_DOT_0
436
454
  r"""The version of OTLP Protobuf definitions to use when interpreting received data"""
437
455
 
438
456
  auth_type: Annotated[
439
- Optional[InputOpenTelemetryAuthenticationType], pydantic.Field(alias="authType")
457
+ Annotated[
458
+ Optional[InputOpenTelemetryAuthenticationType],
459
+ PlainValidator(validate_open_enum(False)),
460
+ ],
461
+ pydantic.Field(alias="authType"),
440
462
  ] = InputOpenTelemetryAuthenticationType.NONE
441
463
  r"""OpenTelemetry authentication type"""
442
464
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputPrometheusConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputPrometheusMode(str, Enum):
29
+ class InputPrometheusMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputPrometheusCompression(str, Enum):
36
+ class InputPrometheusCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputPrometheusPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputPrometheusPq(BaseModel):
67
- mode: Optional[InputPrometheusMode] = InputPrometheusMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputPrometheusMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputPrometheusMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputPrometheusPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputPrometheusCompression] = InputPrometheusCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputPrometheusCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputPrometheusCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -96,7 +103,7 @@ class InputPrometheusPq(BaseModel):
96
103
  ] = None
97
104
 
98
105
 
99
- class InputPrometheusDiscoveryType(str, Enum):
106
+ class InputPrometheusDiscoveryType(str, Enum, metaclass=utils.OpenEnumMeta):
100
107
  r"""Target discovery mechanism. Use static to manually enter a list of targets."""
101
108
 
102
109
  STATIC = "static"
@@ -104,7 +111,7 @@ class InputPrometheusDiscoveryType(str, Enum):
104
111
  EC2 = "ec2"
105
112
 
106
113
 
107
- class InputPrometheusLogLevel(str, Enum):
114
+ class InputPrometheusLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
108
115
  r"""Collector runtime Log Level"""
109
116
 
110
117
  ERROR = "error"
@@ -126,14 +133,16 @@ class InputPrometheusMetadatum(BaseModel):
126
133
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
127
134
 
128
135
 
129
- class InputPrometheusAuthTypeAuthenticationMethod(str, Enum):
136
+ class InputPrometheusAuthTypeAuthenticationMethod(
137
+ str, Enum, metaclass=utils.OpenEnumMeta
138
+ ):
130
139
  r"""Enter credentials directly, or select a stored secret"""
131
140
 
132
141
  MANUAL = "manual"
133
142
  SECRET = "secret"
134
143
 
135
144
 
136
- class InputPrometheusRecordType(str, Enum):
145
+ class InputPrometheusRecordType(str, Enum, metaclass=utils.OpenEnumMeta):
137
146
  r"""DNS Record type to resolve"""
138
147
 
139
148
  SRV = "SRV"
@@ -141,7 +150,7 @@ class InputPrometheusRecordType(str, Enum):
141
150
  AAAA = "AAAA"
142
151
 
143
152
 
144
- class MetricsProtocol(str, Enum):
153
+ class MetricsProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
145
154
  r"""Protocol to use when collecting metrics"""
146
155
 
147
156
  HTTP = "http"
@@ -163,7 +172,9 @@ class InputPrometheusSearchFilter(BaseModel):
163
172
  r"""Search Filter Values, if empty only \"running\" EC2 instances will be returned"""
164
173
 
165
174
 
166
- class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(str, Enum):
175
+ class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(
176
+ str, Enum, metaclass=utils.OpenEnumMeta
177
+ ):
167
178
  r"""AWS authentication method. Choose Auto to use IAM roles."""
168
179
 
169
180
  AUTO = "auto"
@@ -171,7 +182,7 @@ class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(str, Enum):
171
182
  SECRET = "secret"
172
183
 
173
184
 
174
- class InputPrometheusSignatureVersion(str, Enum):
185
+ class InputPrometheusSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
175
186
  r"""Signature version to use for signing EC2 requests"""
176
187
 
177
188
  V2 = "v2"
@@ -302,7 +313,11 @@ class InputPrometheus(BaseModel):
302
313
  r"""Other dimensions to include in events"""
303
314
 
304
315
  discovery_type: Annotated[
305
- Optional[InputPrometheusDiscoveryType], pydantic.Field(alias="discoveryType")
316
+ Annotated[
317
+ Optional[InputPrometheusDiscoveryType],
318
+ PlainValidator(validate_open_enum(False)),
319
+ ],
320
+ pydantic.Field(alias="discoveryType"),
306
321
  ] = InputPrometheusDiscoveryType.STATIC
307
322
  r"""Target discovery mechanism. Use static to manually enter a list of targets."""
308
323
 
@@ -310,7 +325,10 @@ class InputPrometheus(BaseModel):
310
325
  r"""How often in minutes to scrape targets for metrics, 60 must be evenly divisible by the value or save will fail."""
311
326
 
312
327
  log_level: Annotated[
313
- Optional[InputPrometheusLogLevel], pydantic.Field(alias="logLevel")
328
+ Annotated[
329
+ Optional[InputPrometheusLogLevel], PlainValidator(validate_open_enum(False))
330
+ ],
331
+ pydantic.Field(alias="logLevel"),
314
332
  ] = InputPrometheusLogLevel.INFO
315
333
  r"""Collector runtime Log Level"""
316
334
 
@@ -344,7 +362,10 @@ class InputPrometheus(BaseModel):
344
362
  r"""Fields to add to events from this input"""
345
363
 
346
364
  auth_type: Annotated[
347
- Optional[InputPrometheusAuthTypeAuthenticationMethod],
365
+ Annotated[
366
+ Optional[InputPrometheusAuthTypeAuthenticationMethod],
367
+ PlainValidator(validate_open_enum(False)),
368
+ ],
348
369
  pydantic.Field(alias="authType"),
349
370
  ] = InputPrometheusAuthTypeAuthenticationMethod.MANUAL
350
371
  r"""Enter credentials directly, or select a stored secret"""
@@ -360,12 +381,17 @@ class InputPrometheus(BaseModel):
360
381
  r"""List of DNS names to resolve"""
361
382
 
362
383
  record_type: Annotated[
363
- Optional[InputPrometheusRecordType], pydantic.Field(alias="recordType")
384
+ Annotated[
385
+ Optional[InputPrometheusRecordType],
386
+ PlainValidator(validate_open_enum(False)),
387
+ ],
388
+ pydantic.Field(alias="recordType"),
364
389
  ] = InputPrometheusRecordType.SRV
365
390
  r"""DNS Record type to resolve"""
366
391
 
367
392
  scrape_protocol: Annotated[
368
- Optional[MetricsProtocol], pydantic.Field(alias="scrapeProtocol")
393
+ Annotated[Optional[MetricsProtocol], PlainValidator(validate_open_enum(False))],
394
+ pydantic.Field(alias="scrapeProtocol"),
369
395
  ] = MetricsProtocol.HTTP
370
396
  r"""Protocol to use when collecting metrics"""
371
397
 
@@ -387,7 +413,10 @@ class InputPrometheus(BaseModel):
387
413
  r"""EC2 Instance Search Filter"""
388
414
 
389
415
  aws_authentication_method: Annotated[
390
- Optional[InputPrometheusAwsAuthenticationMethodAuthenticationMethod],
416
+ Annotated[
417
+ Optional[InputPrometheusAwsAuthenticationMethodAuthenticationMethod],
418
+ PlainValidator(validate_open_enum(False)),
419
+ ],
391
420
  pydantic.Field(alias="awsAuthenticationMethod"),
392
421
  ] = InputPrometheusAwsAuthenticationMethodAuthenticationMethod.AUTO
393
422
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -403,7 +432,10 @@ class InputPrometheus(BaseModel):
403
432
  r"""EC2 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to EC2-compatible endpoint."""
404
433
 
405
434
  signature_version: Annotated[
406
- Optional[InputPrometheusSignatureVersion],
435
+ Annotated[
436
+ Optional[InputPrometheusSignatureVersion],
437
+ PlainValidator(validate_open_enum(False)),
438
+ ],
407
439
  pydantic.Field(alias="signatureVersion"),
408
440
  ] = InputPrometheusSignatureVersion.V4
409
441
  r"""Signature version to use for signing EC2 requests"""
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Any, List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputPrometheusRwConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputPrometheusRwMode(str, Enum):
29
+ class InputPrometheusRwMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputPrometheusRwCompression(str, Enum):
36
+ class InputPrometheusRwCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputPrometheusRwPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputPrometheusRwPq(BaseModel):
67
- mode: Optional[InputPrometheusRwMode] = InputPrometheusRwMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputPrometheusRwMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputPrometheusRwMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,10 @@ class InputPrometheusRwPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputPrometheusRwCompression] = InputPrometheusRwCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputPrometheusRwCompression],
98
+ PlainValidator(validate_open_enum(False)),
99
+ ] = InputPrometheusRwCompression.NONE
92
100
  r"""Codec to use to compress the persisted data"""
93
101
 
94
102
  pq_controls: Annotated[
@@ -96,14 +104,14 @@ class InputPrometheusRwPq(BaseModel):
96
104
  ] = None
97
105
 
98
106
 
99
- class InputPrometheusRwMinimumTLSVersion(str, Enum):
107
+ class InputPrometheusRwMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
100
108
  TL_SV1 = "TLSv1"
101
109
  TL_SV1_1 = "TLSv1.1"
102
110
  TL_SV1_2 = "TLSv1.2"
103
111
  TL_SV1_3 = "TLSv1.3"
104
112
 
105
113
 
106
- class InputPrometheusRwMaximumTLSVersion(str, Enum):
114
+ class InputPrometheusRwMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
107
115
  TL_SV1 = "TLSv1"
108
116
  TL_SV1_1 = "TLSv1.1"
109
117
  TL_SV1_2 = "TLSv1.2"
@@ -162,15 +170,23 @@ class InputPrometheusRwTLSSettingsServerSide(BaseModel):
162
170
  ] = None
163
171
 
164
172
  min_version: Annotated[
165
- Optional[InputPrometheusRwMinimumTLSVersion], pydantic.Field(alias="minVersion")
173
+ Annotated[
174
+ Optional[InputPrometheusRwMinimumTLSVersion],
175
+ PlainValidator(validate_open_enum(False)),
176
+ ],
177
+ pydantic.Field(alias="minVersion"),
166
178
  ] = None
167
179
 
168
180
  max_version: Annotated[
169
- Optional[InputPrometheusRwMaximumTLSVersion], pydantic.Field(alias="maxVersion")
181
+ Annotated[
182
+ Optional[InputPrometheusRwMaximumTLSVersion],
183
+ PlainValidator(validate_open_enum(False)),
184
+ ],
185
+ pydantic.Field(alias="maxVersion"),
170
186
  ] = None
171
187
 
172
188
 
173
- class InputPrometheusRwAuthenticationType(str, Enum):
189
+ class InputPrometheusRwAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
174
190
  r"""Remote Write authentication type"""
175
191
 
176
192
  NONE = "none"
@@ -401,7 +417,11 @@ class InputPrometheusRw(BaseModel):
401
417
  r"""Absolute path on which to listen for Prometheus requests. Defaults to /write, which will expand as: http://<your‑upstream‑URL>:<your‑port>/write."""
402
418
 
403
419
  auth_type: Annotated[
404
- Optional[InputPrometheusRwAuthenticationType], pydantic.Field(alias="authType")
420
+ Annotated[
421
+ Optional[InputPrometheusRwAuthenticationType],
422
+ PlainValidator(validate_open_enum(False)),
423
+ ],
424
+ pydantic.Field(alias="authType"),
405
425
  ] = InputPrometheusRwAuthenticationType.NONE
406
426
  r"""Remote Write authentication type"""
407
427
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputRawUDPConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputRawUDPMode(str, Enum):
29
+ class InputRawUDPMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputRawUDPCompression(str, Enum):
36
+ class InputRawUDPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputRawUDPPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputRawUDPPq(BaseModel):
67
- mode: Optional[InputRawUDPMode] = InputRawUDPMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputRawUDPMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputRawUDPMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputRawUDPPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputRawUDPCompression] = InputRawUDPCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputRawUDPCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputRawUDPCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputS3Connection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputS3Mode(str, Enum):
29
+ class InputS3Mode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputS3Compression(str, Enum):
36
+ class InputS3Compression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputS3PqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputS3Pq(BaseModel):
67
- mode: Optional[InputS3Mode] = InputS3Mode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputS3Mode], PlainValidator(validate_open_enum(False))
72
+ ] = InputS3Mode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputS3Pq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputS3Compression] = InputS3Compression.NONE
96
+ compress: Annotated[
97
+ Optional[InputS3Compression], PlainValidator(validate_open_enum(False))
98
+ ] = InputS3Compression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -96,7 +103,7 @@ class InputS3Pq(BaseModel):
96
103
  ] = None
97
104
 
98
105
 
99
- class InputS3AuthenticationMethod(str, Enum):
106
+ class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
100
107
  r"""AWS authentication method. Choose Auto to use IAM roles."""
101
108
 
102
109
  AUTO = "auto"
@@ -104,7 +111,7 @@ class InputS3AuthenticationMethod(str, Enum):
104
111
  SECRET = "secret"
105
112
 
106
113
 
107
- class InputS3SignatureVersion(str, Enum):
114
+ class InputS3SignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
108
115
  r"""Signature version to use for signing S3 requests"""
109
116
 
110
117
  V2 = "v2"
@@ -286,7 +293,10 @@ class InputS3(BaseModel):
286
293
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
287
294
 
288
295
  aws_authentication_method: Annotated[
289
- Optional[InputS3AuthenticationMethod],
296
+ Annotated[
297
+ Optional[InputS3AuthenticationMethod],
298
+ PlainValidator(validate_open_enum(False)),
299
+ ],
290
300
  pydantic.Field(alias="awsAuthenticationMethod"),
291
301
  ] = InputS3AuthenticationMethod.AUTO
292
302
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -302,7 +312,10 @@ class InputS3(BaseModel):
302
312
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
303
313
 
304
314
  signature_version: Annotated[
305
- Optional[InputS3SignatureVersion], pydantic.Field(alias="signatureVersion")
315
+ Annotated[
316
+ Optional[InputS3SignatureVersion], PlainValidator(validate_open_enum(False))
317
+ ],
318
+ pydantic.Field(alias="signatureVersion"),
306
319
  ] = InputS3SignatureVersion.V4
307
320
  r"""Signature version to use for signing S3 requests"""
308
321