cribl-control-plane 0.0.48a1__py3-none-any.whl → 0.0.49__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (164) hide show
  1. cribl_control_plane/_version.py +6 -4
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/models/__init__.py +12 -12
  4. cribl_control_plane/models/appmode.py +13 -0
  5. cribl_control_plane/models/cacheconnection.py +2 -10
  6. cribl_control_plane/models/cacheconnectionbackfillstatus.py +1 -2
  7. cribl_control_plane/models/cloudprovider.py +1 -2
  8. cribl_control_plane/models/configgroup.py +2 -7
  9. cribl_control_plane/models/configgroupcloud.py +2 -6
  10. cribl_control_plane/models/createconfiggroupbyproductop.py +2 -8
  11. cribl_control_plane/models/cribllakedataset.py +2 -8
  12. cribl_control_plane/models/datasetmetadata.py +2 -8
  13. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +2 -7
  14. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +2 -4
  15. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +2 -4
  16. cribl_control_plane/models/getconfiggroupbyproductandidop.py +1 -3
  17. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +2 -7
  18. cribl_control_plane/models/getsummaryop.py +2 -7
  19. cribl_control_plane/models/hbcriblinfo.py +3 -19
  20. cribl_control_plane/models/healthstatus.py +4 -7
  21. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  22. cribl_control_plane/models/inputappscope.py +14 -34
  23. cribl_control_plane/models/inputazureblob.py +6 -17
  24. cribl_control_plane/models/inputcollection.py +4 -11
  25. cribl_control_plane/models/inputconfluentcloud.py +20 -47
  26. cribl_control_plane/models/inputcribl.py +4 -11
  27. cribl_control_plane/models/inputcriblhttp.py +8 -23
  28. cribl_control_plane/models/inputcribllakehttp.py +10 -22
  29. cribl_control_plane/models/inputcriblmetrics.py +4 -12
  30. cribl_control_plane/models/inputcribltcp.py +8 -23
  31. cribl_control_plane/models/inputcrowdstrike.py +10 -26
  32. cribl_control_plane/models/inputdatadogagent.py +8 -24
  33. cribl_control_plane/models/inputdatagen.py +4 -11
  34. cribl_control_plane/models/inputedgeprometheus.py +24 -58
  35. cribl_control_plane/models/inputelastic.py +14 -40
  36. cribl_control_plane/models/inputeventhub.py +6 -15
  37. cribl_control_plane/models/inputexec.py +6 -14
  38. cribl_control_plane/models/inputfile.py +6 -15
  39. cribl_control_plane/models/inputfirehose.py +8 -23
  40. cribl_control_plane/models/inputgooglepubsub.py +6 -19
  41. cribl_control_plane/models/inputgrafana.py +24 -67
  42. cribl_control_plane/models/inputhttp.py +8 -23
  43. cribl_control_plane/models/inputhttpraw.py +8 -23
  44. cribl_control_plane/models/inputjournalfiles.py +4 -12
  45. cribl_control_plane/models/inputkafka.py +16 -46
  46. cribl_control_plane/models/inputkinesis.py +14 -38
  47. cribl_control_plane/models/inputkubeevents.py +4 -11
  48. cribl_control_plane/models/inputkubelogs.py +8 -16
  49. cribl_control_plane/models/inputkubemetrics.py +8 -16
  50. cribl_control_plane/models/inputloki.py +10 -29
  51. cribl_control_plane/models/inputmetrics.py +8 -23
  52. cribl_control_plane/models/inputmodeldriventelemetry.py +10 -32
  53. cribl_control_plane/models/inputmsk.py +18 -53
  54. cribl_control_plane/models/inputnetflow.py +4 -11
  55. cribl_control_plane/models/inputoffice365mgmt.py +14 -33
  56. cribl_control_plane/models/inputoffice365msgtrace.py +16 -35
  57. cribl_control_plane/models/inputoffice365service.py +16 -35
  58. cribl_control_plane/models/inputopentelemetry.py +16 -38
  59. cribl_control_plane/models/inputprometheus.py +18 -50
  60. cribl_control_plane/models/inputprometheusrw.py +10 -30
  61. cribl_control_plane/models/inputrawudp.py +4 -11
  62. cribl_control_plane/models/inputs3.py +8 -21
  63. cribl_control_plane/models/inputs3inventory.py +10 -26
  64. cribl_control_plane/models/inputsecuritylake.py +10 -27
  65. cribl_control_plane/models/inputsnmp.py +6 -16
  66. cribl_control_plane/models/inputsplunk.py +12 -33
  67. cribl_control_plane/models/inputsplunkhec.py +10 -29
  68. cribl_control_plane/models/inputsplunksearch.py +14 -33
  69. cribl_control_plane/models/inputsqs.py +10 -27
  70. cribl_control_plane/models/inputsyslog.py +16 -43
  71. cribl_control_plane/models/inputsystemmetrics.py +24 -48
  72. cribl_control_plane/models/inputsystemstate.py +8 -16
  73. cribl_control_plane/models/inputtcp.py +10 -29
  74. cribl_control_plane/models/inputtcpjson.py +10 -29
  75. cribl_control_plane/models/inputwef.py +14 -37
  76. cribl_control_plane/models/inputwindowsmetrics.py +24 -44
  77. cribl_control_plane/models/inputwineventlogs.py +10 -20
  78. cribl_control_plane/models/inputwiz.py +8 -21
  79. cribl_control_plane/models/inputwizwebhook.py +8 -23
  80. cribl_control_plane/models/inputzscalerhec.py +10 -29
  81. cribl_control_plane/models/lakehouseconnectiontype.py +1 -2
  82. cribl_control_plane/models/listconfiggroupbyproductop.py +1 -3
  83. cribl_control_plane/models/masterworkerentry.py +2 -7
  84. cribl_control_plane/models/nodeactiveupgradestatus.py +1 -2
  85. cribl_control_plane/models/nodefailedupgradestatus.py +1 -2
  86. cribl_control_plane/models/nodeprovidedinfo.py +0 -3
  87. cribl_control_plane/models/nodeskippedupgradestatus.py +1 -2
  88. cribl_control_plane/models/nodeupgradestate.py +1 -2
  89. cribl_control_plane/models/nodeupgradestatus.py +5 -13
  90. cribl_control_plane/models/outputazureblob.py +18 -48
  91. cribl_control_plane/models/outputazuredataexplorer.py +28 -73
  92. cribl_control_plane/models/outputazureeventhub.py +18 -40
  93. cribl_control_plane/models/outputazurelogs.py +12 -35
  94. cribl_control_plane/models/outputclickhouse.py +20 -55
  95. cribl_control_plane/models/outputcloudwatch.py +10 -29
  96. cribl_control_plane/models/outputconfluentcloud.py +32 -77
  97. cribl_control_plane/models/outputcriblhttp.py +16 -44
  98. cribl_control_plane/models/outputcribllake.py +16 -46
  99. cribl_control_plane/models/outputcribltcp.py +18 -45
  100. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +14 -49
  101. cribl_control_plane/models/outputdatadog.py +20 -48
  102. cribl_control_plane/models/outputdataset.py +18 -46
  103. cribl_control_plane/models/outputdiskspool.py +2 -7
  104. cribl_control_plane/models/outputdls3.py +24 -68
  105. cribl_control_plane/models/outputdynatracehttp.py +20 -53
  106. cribl_control_plane/models/outputdynatraceotlp.py +22 -55
  107. cribl_control_plane/models/outputelastic.py +18 -43
  108. cribl_control_plane/models/outputelasticcloud.py +12 -36
  109. cribl_control_plane/models/outputexabeam.py +10 -29
  110. cribl_control_plane/models/outputfilesystem.py +14 -39
  111. cribl_control_plane/models/outputgooglechronicle.py +16 -50
  112. cribl_control_plane/models/outputgooglecloudlogging.py +14 -41
  113. cribl_control_plane/models/outputgooglecloudstorage.py +24 -66
  114. cribl_control_plane/models/outputgooglepubsub.py +10 -31
  115. cribl_control_plane/models/outputgrafanacloud.py +32 -97
  116. cribl_control_plane/models/outputgraphite.py +14 -31
  117. cribl_control_plane/models/outputhoneycomb.py +12 -35
  118. cribl_control_plane/models/outputhumiohec.py +16 -43
  119. cribl_control_plane/models/outputinfluxdb.py +16 -42
  120. cribl_control_plane/models/outputkafka.py +28 -74
  121. cribl_control_plane/models/outputkinesis.py +16 -40
  122. cribl_control_plane/models/outputloki.py +16 -41
  123. cribl_control_plane/models/outputminio.py +24 -65
  124. cribl_control_plane/models/outputmsk.py +30 -82
  125. cribl_control_plane/models/outputnewrelic.py +18 -43
  126. cribl_control_plane/models/outputnewrelicevents.py +14 -41
  127. cribl_control_plane/models/outputopentelemetry.py +26 -67
  128. cribl_control_plane/models/outputprometheus.py +12 -35
  129. cribl_control_plane/models/outputring.py +8 -19
  130. cribl_control_plane/models/outputs3.py +26 -68
  131. cribl_control_plane/models/outputsecuritylake.py +18 -52
  132. cribl_control_plane/models/outputsentinel.py +18 -45
  133. cribl_control_plane/models/outputsentineloneaisiem.py +18 -50
  134. cribl_control_plane/models/outputservicenow.py +24 -60
  135. cribl_control_plane/models/outputsignalfx.py +14 -37
  136. cribl_control_plane/models/outputsns.py +14 -36
  137. cribl_control_plane/models/outputsplunk.py +24 -60
  138. cribl_control_plane/models/outputsplunkhec.py +12 -35
  139. cribl_control_plane/models/outputsplunklb.py +30 -77
  140. cribl_control_plane/models/outputsqs.py +16 -41
  141. cribl_control_plane/models/outputstatsd.py +14 -30
  142. cribl_control_plane/models/outputstatsdext.py +12 -29
  143. cribl_control_plane/models/outputsumologic.py +12 -35
  144. cribl_control_plane/models/outputsyslog.py +24 -58
  145. cribl_control_plane/models/outputtcpjson.py +20 -52
  146. cribl_control_plane/models/outputwavefront.py +12 -35
  147. cribl_control_plane/models/outputwebhook.py +22 -58
  148. cribl_control_plane/models/outputxsiam.py +14 -35
  149. cribl_control_plane/models/productscore.py +1 -2
  150. cribl_control_plane/models/rbacresource.py +1 -2
  151. cribl_control_plane/models/resourcepolicy.py +2 -4
  152. cribl_control_plane/models/routecloneconf.py +13 -0
  153. cribl_control_plane/models/routeconf.py +4 -3
  154. cribl_control_plane/models/runnablejobcollection.py +13 -30
  155. cribl_control_plane/models/runnablejobexecutor.py +4 -13
  156. cribl_control_plane/models/runnablejobscheduledsearch.py +2 -7
  157. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +2 -8
  158. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +2 -8
  159. cribl_control_plane/models/workertypes.py +1 -2
  160. cribl_control_plane/sdk.py +2 -2
  161. cribl_control_plane/utils/annotations.py +32 -8
  162. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.49.dist-info}/METADATA +2 -1
  163. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.49.dist-info}/RECORD +164 -162
  164. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.49.dist-info}/WHEEL +0 -0
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputOpenTelemetryConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputOpenTelemetryMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputOpenTelemetryMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputOpenTelemetryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputOpenTelemetryCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputOpenTelemetryPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputOpenTelemetryPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputOpenTelemetryMode], PlainValidator(validate_open_enum(False))
72
- ] = InputOpenTelemetryMode.ALWAYS
67
+ mode: Optional[InputOpenTelemetryMode] = InputOpenTelemetryMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,10 +88,9 @@ class InputOpenTelemetryPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputOpenTelemetryCompression],
98
- PlainValidator(validate_open_enum(False)),
99
- ] = InputOpenTelemetryCompression.NONE
91
+ compress: Optional[InputOpenTelemetryCompression] = (
92
+ InputOpenTelemetryCompression.NONE
93
+ )
100
94
  r"""Codec to use to compress the persisted data"""
101
95
 
102
96
  pq_controls: Annotated[
@@ -104,14 +98,14 @@ class InputOpenTelemetryPq(BaseModel):
104
98
  ] = None
105
99
 
106
100
 
107
- class InputOpenTelemetryMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
101
+ class InputOpenTelemetryMinimumTLSVersion(str, Enum):
108
102
  TL_SV1 = "TLSv1"
109
103
  TL_SV1_1 = "TLSv1.1"
110
104
  TL_SV1_2 = "TLSv1.2"
111
105
  TL_SV1_3 = "TLSv1.3"
112
106
 
113
107
 
114
- class InputOpenTelemetryMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
108
+ class InputOpenTelemetryMaximumTLSVersion(str, Enum):
115
109
  TL_SV1 = "TLSv1"
116
110
  TL_SV1_1 = "TLSv1.1"
117
111
  TL_SV1_2 = "TLSv1.2"
@@ -170,37 +164,31 @@ class InputOpenTelemetryTLSSettingsServerSide(BaseModel):
170
164
  ] = None
171
165
 
172
166
  min_version: Annotated[
173
- Annotated[
174
- Optional[InputOpenTelemetryMinimumTLSVersion],
175
- PlainValidator(validate_open_enum(False)),
176
- ],
167
+ Optional[InputOpenTelemetryMinimumTLSVersion],
177
168
  pydantic.Field(alias="minVersion"),
178
169
  ] = None
179
170
 
180
171
  max_version: Annotated[
181
- Annotated[
182
- Optional[InputOpenTelemetryMaximumTLSVersion],
183
- PlainValidator(validate_open_enum(False)),
184
- ],
172
+ Optional[InputOpenTelemetryMaximumTLSVersion],
185
173
  pydantic.Field(alias="maxVersion"),
186
174
  ] = None
187
175
 
188
176
 
189
- class InputOpenTelemetryProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
177
+ class InputOpenTelemetryProtocol(str, Enum):
190
178
  r"""Select whether to leverage gRPC or HTTP for OpenTelemetry"""
191
179
 
192
180
  GRPC = "grpc"
193
181
  HTTP = "http"
194
182
 
195
183
 
196
- class InputOpenTelemetryOTLPVersion(str, Enum, metaclass=utils.OpenEnumMeta):
184
+ class InputOpenTelemetryOTLPVersion(str, Enum):
197
185
  r"""The version of OTLP Protobuf definitions to use when interpreting received data"""
198
186
 
199
187
  ZERO_DOT_10_DOT_0 = "0.10.0"
200
188
  ONE_DOT_3_DOT_1 = "1.3.1"
201
189
 
202
190
 
203
- class InputOpenTelemetryAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
191
+ class InputOpenTelemetryAuthenticationType(str, Enum):
204
192
  r"""OpenTelemetry authentication type"""
205
193
 
206
194
  NONE = "none"
@@ -429,9 +417,7 @@ class InputOpenTelemetry(BaseModel):
429
417
  ] = "/^$/"
430
418
  r"""Messages from matched IP addresses will be ignored. This takes precedence over the allowlist."""
431
419
 
432
- protocol: Annotated[
433
- Optional[InputOpenTelemetryProtocol], PlainValidator(validate_open_enum(False))
434
- ] = InputOpenTelemetryProtocol.GRPC
420
+ protocol: Optional[InputOpenTelemetryProtocol] = InputOpenTelemetryProtocol.GRPC
435
421
  r"""Select whether to leverage gRPC or HTTP for OpenTelemetry"""
436
422
 
437
423
  extract_spans: Annotated[Optional[bool], pydantic.Field(alias="extractSpans")] = (
@@ -445,20 +431,12 @@ class InputOpenTelemetry(BaseModel):
445
431
  r"""Enable to extract each incoming Gauge or IntGauge metric to multiple events, one per data point"""
446
432
 
447
433
  otlp_version: Annotated[
448
- Annotated[
449
- Optional[InputOpenTelemetryOTLPVersion],
450
- PlainValidator(validate_open_enum(False)),
451
- ],
452
- pydantic.Field(alias="otlpVersion"),
434
+ Optional[InputOpenTelemetryOTLPVersion], pydantic.Field(alias="otlpVersion")
453
435
  ] = InputOpenTelemetryOTLPVersion.ZERO_DOT_10_DOT_0
454
436
  r"""The version of OTLP Protobuf definitions to use when interpreting received data"""
455
437
 
456
438
  auth_type: Annotated[
457
- Annotated[
458
- Optional[InputOpenTelemetryAuthenticationType],
459
- PlainValidator(validate_open_enum(False)),
460
- ],
461
- pydantic.Field(alias="authType"),
439
+ Optional[InputOpenTelemetryAuthenticationType], pydantic.Field(alias="authType")
462
440
  ] = InputOpenTelemetryAuthenticationType.NONE
463
441
  r"""OpenTelemetry authentication type"""
464
442
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputPrometheusConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputPrometheusMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputPrometheusMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputPrometheusCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputPrometheusCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputPrometheusPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputPrometheusPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputPrometheusMode], PlainValidator(validate_open_enum(False))
72
- ] = InputPrometheusMode.ALWAYS
67
+ mode: Optional[InputPrometheusMode] = InputPrometheusMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputPrometheusPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputPrometheusCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputPrometheusCompression.NONE
91
+ compress: Optional[InputPrometheusCompression] = InputPrometheusCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,7 +96,7 @@ class InputPrometheusPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputPrometheusDiscoveryType(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputPrometheusDiscoveryType(str, Enum):
107
100
  r"""Target discovery mechanism. Use static to manually enter a list of targets."""
108
101
 
109
102
  STATIC = "static"
@@ -111,7 +104,7 @@ class InputPrometheusDiscoveryType(str, Enum, metaclass=utils.OpenEnumMeta):
111
104
  EC2 = "ec2"
112
105
 
113
106
 
114
- class InputPrometheusLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
107
+ class InputPrometheusLogLevel(str, Enum):
115
108
  r"""Collector runtime Log Level"""
116
109
 
117
110
  ERROR = "error"
@@ -133,16 +126,14 @@ class InputPrometheusMetadatum(BaseModel):
133
126
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
134
127
 
135
128
 
136
- class InputPrometheusAuthTypeAuthenticationMethod(
137
- str, Enum, metaclass=utils.OpenEnumMeta
138
- ):
129
+ class InputPrometheusAuthTypeAuthenticationMethod(str, Enum):
139
130
  r"""Enter credentials directly, or select a stored secret"""
140
131
 
141
132
  MANUAL = "manual"
142
133
  SECRET = "secret"
143
134
 
144
135
 
145
- class InputPrometheusRecordType(str, Enum, metaclass=utils.OpenEnumMeta):
136
+ class InputPrometheusRecordType(str, Enum):
146
137
  r"""DNS Record type to resolve"""
147
138
 
148
139
  SRV = "SRV"
@@ -150,7 +141,7 @@ class InputPrometheusRecordType(str, Enum, metaclass=utils.OpenEnumMeta):
150
141
  AAAA = "AAAA"
151
142
 
152
143
 
153
- class MetricsProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
144
+ class MetricsProtocol(str, Enum):
154
145
  r"""Protocol to use when collecting metrics"""
155
146
 
156
147
  HTTP = "http"
@@ -172,9 +163,7 @@ class InputPrometheusSearchFilter(BaseModel):
172
163
  r"""Search Filter Values, if empty only \"running\" EC2 instances will be returned"""
173
164
 
174
165
 
175
- class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(
176
- str, Enum, metaclass=utils.OpenEnumMeta
177
- ):
166
+ class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(str, Enum):
178
167
  r"""AWS authentication method. Choose Auto to use IAM roles."""
179
168
 
180
169
  AUTO = "auto"
@@ -182,7 +171,7 @@ class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(
182
171
  SECRET = "secret"
183
172
 
184
173
 
185
- class InputPrometheusSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
174
+ class InputPrometheusSignatureVersion(str, Enum):
186
175
  r"""Signature version to use for signing EC2 requests"""
187
176
 
188
177
  V2 = "v2"
@@ -313,11 +302,7 @@ class InputPrometheus(BaseModel):
313
302
  r"""Other dimensions to include in events"""
314
303
 
315
304
  discovery_type: Annotated[
316
- Annotated[
317
- Optional[InputPrometheusDiscoveryType],
318
- PlainValidator(validate_open_enum(False)),
319
- ],
320
- pydantic.Field(alias="discoveryType"),
305
+ Optional[InputPrometheusDiscoveryType], pydantic.Field(alias="discoveryType")
321
306
  ] = InputPrometheusDiscoveryType.STATIC
322
307
  r"""Target discovery mechanism. Use static to manually enter a list of targets."""
323
308
 
@@ -325,10 +310,7 @@ class InputPrometheus(BaseModel):
325
310
  r"""How often in minutes to scrape targets for metrics, 60 must be evenly divisible by the value or save will fail."""
326
311
 
327
312
  log_level: Annotated[
328
- Annotated[
329
- Optional[InputPrometheusLogLevel], PlainValidator(validate_open_enum(False))
330
- ],
331
- pydantic.Field(alias="logLevel"),
313
+ Optional[InputPrometheusLogLevel], pydantic.Field(alias="logLevel")
332
314
  ] = InputPrometheusLogLevel.INFO
333
315
  r"""Collector runtime Log Level"""
334
316
 
@@ -362,10 +344,7 @@ class InputPrometheus(BaseModel):
362
344
  r"""Fields to add to events from this input"""
363
345
 
364
346
  auth_type: Annotated[
365
- Annotated[
366
- Optional[InputPrometheusAuthTypeAuthenticationMethod],
367
- PlainValidator(validate_open_enum(False)),
368
- ],
347
+ Optional[InputPrometheusAuthTypeAuthenticationMethod],
369
348
  pydantic.Field(alias="authType"),
370
349
  ] = InputPrometheusAuthTypeAuthenticationMethod.MANUAL
371
350
  r"""Enter credentials directly, or select a stored secret"""
@@ -381,17 +360,12 @@ class InputPrometheus(BaseModel):
381
360
  r"""List of DNS names to resolve"""
382
361
 
383
362
  record_type: Annotated[
384
- Annotated[
385
- Optional[InputPrometheusRecordType],
386
- PlainValidator(validate_open_enum(False)),
387
- ],
388
- pydantic.Field(alias="recordType"),
363
+ Optional[InputPrometheusRecordType], pydantic.Field(alias="recordType")
389
364
  ] = InputPrometheusRecordType.SRV
390
365
  r"""DNS Record type to resolve"""
391
366
 
392
367
  scrape_protocol: Annotated[
393
- Annotated[Optional[MetricsProtocol], PlainValidator(validate_open_enum(False))],
394
- pydantic.Field(alias="scrapeProtocol"),
368
+ Optional[MetricsProtocol], pydantic.Field(alias="scrapeProtocol")
395
369
  ] = MetricsProtocol.HTTP
396
370
  r"""Protocol to use when collecting metrics"""
397
371
 
@@ -413,10 +387,7 @@ class InputPrometheus(BaseModel):
413
387
  r"""EC2 Instance Search Filter"""
414
388
 
415
389
  aws_authentication_method: Annotated[
416
- Annotated[
417
- Optional[InputPrometheusAwsAuthenticationMethodAuthenticationMethod],
418
- PlainValidator(validate_open_enum(False)),
419
- ],
390
+ Optional[InputPrometheusAwsAuthenticationMethodAuthenticationMethod],
420
391
  pydantic.Field(alias="awsAuthenticationMethod"),
421
392
  ] = InputPrometheusAwsAuthenticationMethodAuthenticationMethod.AUTO
422
393
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -432,10 +403,7 @@ class InputPrometheus(BaseModel):
432
403
  r"""EC2 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to EC2-compatible endpoint."""
433
404
 
434
405
  signature_version: Annotated[
435
- Annotated[
436
- Optional[InputPrometheusSignatureVersion],
437
- PlainValidator(validate_open_enum(False)),
438
- ],
406
+ Optional[InputPrometheusSignatureVersion],
439
407
  pydantic.Field(alias="signatureVersion"),
440
408
  ] = InputPrometheusSignatureVersion.V4
441
409
  r"""Signature version to use for signing EC2 requests"""
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputPrometheusRwConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputPrometheusRwMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputPrometheusRwMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputPrometheusRwCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputPrometheusRwCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputPrometheusRwPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputPrometheusRwPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputPrometheusRwMode], PlainValidator(validate_open_enum(False))
72
- ] = InputPrometheusRwMode.ALWAYS
67
+ mode: Optional[InputPrometheusRwMode] = InputPrometheusRwMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,10 +88,7 @@ class InputPrometheusRwPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputPrometheusRwCompression],
98
- PlainValidator(validate_open_enum(False)),
99
- ] = InputPrometheusRwCompression.NONE
91
+ compress: Optional[InputPrometheusRwCompression] = InputPrometheusRwCompression.NONE
100
92
  r"""Codec to use to compress the persisted data"""
101
93
 
102
94
  pq_controls: Annotated[
@@ -104,14 +96,14 @@ class InputPrometheusRwPq(BaseModel):
104
96
  ] = None
105
97
 
106
98
 
107
- class InputPrometheusRwMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputPrometheusRwMinimumTLSVersion(str, Enum):
108
100
  TL_SV1 = "TLSv1"
109
101
  TL_SV1_1 = "TLSv1.1"
110
102
  TL_SV1_2 = "TLSv1.2"
111
103
  TL_SV1_3 = "TLSv1.3"
112
104
 
113
105
 
114
- class InputPrometheusRwMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
106
+ class InputPrometheusRwMaximumTLSVersion(str, Enum):
115
107
  TL_SV1 = "TLSv1"
116
108
  TL_SV1_1 = "TLSv1.1"
117
109
  TL_SV1_2 = "TLSv1.2"
@@ -170,23 +162,15 @@ class InputPrometheusRwTLSSettingsServerSide(BaseModel):
170
162
  ] = None
171
163
 
172
164
  min_version: Annotated[
173
- Annotated[
174
- Optional[InputPrometheusRwMinimumTLSVersion],
175
- PlainValidator(validate_open_enum(False)),
176
- ],
177
- pydantic.Field(alias="minVersion"),
165
+ Optional[InputPrometheusRwMinimumTLSVersion], pydantic.Field(alias="minVersion")
178
166
  ] = None
179
167
 
180
168
  max_version: Annotated[
181
- Annotated[
182
- Optional[InputPrometheusRwMaximumTLSVersion],
183
- PlainValidator(validate_open_enum(False)),
184
- ],
185
- pydantic.Field(alias="maxVersion"),
169
+ Optional[InputPrometheusRwMaximumTLSVersion], pydantic.Field(alias="maxVersion")
186
170
  ] = None
187
171
 
188
172
 
189
- class InputPrometheusRwAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
173
+ class InputPrometheusRwAuthenticationType(str, Enum):
190
174
  r"""Remote Write authentication type"""
191
175
 
192
176
  NONE = "none"
@@ -417,11 +401,7 @@ class InputPrometheusRw(BaseModel):
417
401
  r"""Absolute path on which to listen for Prometheus requests. Defaults to /write, which will expand as: http://<your‑upstream‑URL>:<your‑port>/write."""
418
402
 
419
403
  auth_type: Annotated[
420
- Annotated[
421
- Optional[InputPrometheusRwAuthenticationType],
422
- PlainValidator(validate_open_enum(False)),
423
- ],
424
- pydantic.Field(alias="authType"),
404
+ Optional[InputPrometheusRwAuthenticationType], pydantic.Field(alias="authType")
425
405
  ] = InputPrometheusRwAuthenticationType.NONE
426
406
  r"""Remote Write authentication type"""
427
407
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputRawUDPConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputRawUDPMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputRawUDPMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputRawUDPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputRawUDPCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputRawUDPPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputRawUDPPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputRawUDPMode], PlainValidator(validate_open_enum(False))
72
- ] = InputRawUDPMode.ALWAYS
67
+ mode: Optional[InputRawUDPMode] = InputRawUDPMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputRawUDPPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputRawUDPCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputRawUDPCompression.NONE
91
+ compress: Optional[InputRawUDPCompression] = InputRawUDPCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputS3Connection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputS3Mode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputS3Mode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputS3Compression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputS3Compression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputS3PqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputS3Pq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputS3Mode], PlainValidator(validate_open_enum(False))
72
- ] = InputS3Mode.ALWAYS
67
+ mode: Optional[InputS3Mode] = InputS3Mode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputS3Pq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputS3Compression], PlainValidator(validate_open_enum(False))
98
- ] = InputS3Compression.NONE
91
+ compress: Optional[InputS3Compression] = InputS3Compression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,7 +96,7 @@ class InputS3Pq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputS3AuthenticationMethod(str, Enum):
107
100
  r"""AWS authentication method. Choose Auto to use IAM roles."""
108
101
 
109
102
  AUTO = "auto"
@@ -111,7 +104,7 @@ class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
111
104
  SECRET = "secret"
112
105
 
113
106
 
114
- class InputS3SignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
107
+ class InputS3SignatureVersion(str, Enum):
115
108
  r"""Signature version to use for signing S3 requests"""
116
109
 
117
110
  V2 = "v2"
@@ -293,10 +286,7 @@ class InputS3(BaseModel):
293
286
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
294
287
 
295
288
  aws_authentication_method: Annotated[
296
- Annotated[
297
- Optional[InputS3AuthenticationMethod],
298
- PlainValidator(validate_open_enum(False)),
299
- ],
289
+ Optional[InputS3AuthenticationMethod],
300
290
  pydantic.Field(alias="awsAuthenticationMethod"),
301
291
  ] = InputS3AuthenticationMethod.AUTO
302
292
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -312,10 +302,7 @@ class InputS3(BaseModel):
312
302
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
313
303
 
314
304
  signature_version: Annotated[
315
- Annotated[
316
- Optional[InputS3SignatureVersion], PlainValidator(validate_open_enum(False))
317
- ],
318
- pydantic.Field(alias="signatureVersion"),
305
+ Optional[InputS3SignatureVersion], pydantic.Field(alias="signatureVersion")
319
306
  ] = InputS3SignatureVersion.V4
320
307
  r"""Signature version to use for signing S3 requests"""
321
308