cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (144) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/{outputs.py → destinations.py} +69 -71
  3. cribl_control_plane/errors/healthstatus_error.py +2 -8
  4. cribl_control_plane/models/__init__.py +5347 -115
  5. cribl_control_plane/models/createinputop.py +18216 -2
  6. cribl_control_plane/models/createoutputop.py +18417 -4
  7. cribl_control_plane/models/createoutputtestbyidop.py +2 -2
  8. cribl_control_plane/models/deleteoutputbyidop.py +2 -2
  9. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  10. cribl_control_plane/models/getoutputbyidop.py +2 -2
  11. cribl_control_plane/models/getoutputpqbyidop.py +2 -2
  12. cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
  13. cribl_control_plane/models/healthstatus.py +4 -7
  14. cribl_control_plane/models/inputappscope.py +16 -36
  15. cribl_control_plane/models/inputazureblob.py +8 -19
  16. cribl_control_plane/models/inputcollection.py +6 -15
  17. cribl_control_plane/models/inputconfluentcloud.py +22 -45
  18. cribl_control_plane/models/inputcribl.py +6 -13
  19. cribl_control_plane/models/inputcriblhttp.py +12 -27
  20. cribl_control_plane/models/inputcribllakehttp.py +14 -26
  21. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  22. cribl_control_plane/models/inputcribltcp.py +12 -27
  23. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  24. cribl_control_plane/models/inputdatadogagent.py +12 -28
  25. cribl_control_plane/models/inputdatagen.py +6 -13
  26. cribl_control_plane/models/inputedgeprometheus.py +33 -64
  27. cribl_control_plane/models/inputelastic.py +18 -44
  28. cribl_control_plane/models/inputeventhub.py +10 -19
  29. cribl_control_plane/models/inputexec.py +8 -16
  30. cribl_control_plane/models/inputfile.py +8 -17
  31. cribl_control_plane/models/inputfirehose.py +12 -27
  32. cribl_control_plane/models/inputgooglepubsub.py +10 -23
  33. cribl_control_plane/models/inputgrafana_union.py +39 -81
  34. cribl_control_plane/models/inputhttp.py +12 -27
  35. cribl_control_plane/models/inputhttpraw.py +12 -27
  36. cribl_control_plane/models/inputjournalfiles.py +8 -16
  37. cribl_control_plane/models/inputkafka.py +18 -45
  38. cribl_control_plane/models/inputkinesis.py +18 -42
  39. cribl_control_plane/models/inputkubeevents.py +6 -13
  40. cribl_control_plane/models/inputkubelogs.py +10 -18
  41. cribl_control_plane/models/inputkubemetrics.py +10 -18
  42. cribl_control_plane/models/inputloki.py +14 -33
  43. cribl_control_plane/models/inputmetrics.py +10 -25
  44. cribl_control_plane/models/inputmodeldriventelemetry.py +14 -33
  45. cribl_control_plane/models/inputmsk.py +20 -52
  46. cribl_control_plane/models/inputnetflow.py +8 -15
  47. cribl_control_plane/models/inputoffice365mgmt.py +18 -37
  48. cribl_control_plane/models/inputoffice365msgtrace.py +20 -41
  49. cribl_control_plane/models/inputoffice365service.py +20 -41
  50. cribl_control_plane/models/inputopentelemetry.py +20 -42
  51. cribl_control_plane/models/inputprometheus.py +22 -54
  52. cribl_control_plane/models/inputprometheusrw.py +14 -34
  53. cribl_control_plane/models/inputrawudp.py +8 -15
  54. cribl_control_plane/models/inputs3.py +10 -23
  55. cribl_control_plane/models/inputs3inventory.py +12 -28
  56. cribl_control_plane/models/inputsecuritylake.py +12 -29
  57. cribl_control_plane/models/inputsnmp.py +10 -20
  58. cribl_control_plane/models/inputsplunk.py +16 -37
  59. cribl_control_plane/models/inputsplunkhec.py +14 -33
  60. cribl_control_plane/models/inputsplunksearch.py +18 -37
  61. cribl_control_plane/models/inputsqs.py +14 -31
  62. cribl_control_plane/models/inputsyslog_union.py +29 -53
  63. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  64. cribl_control_plane/models/inputsystemstate.py +10 -18
  65. cribl_control_plane/models/inputtcp.py +14 -33
  66. cribl_control_plane/models/inputtcpjson.py +14 -33
  67. cribl_control_plane/models/inputwef.py +22 -45
  68. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  69. cribl_control_plane/models/inputwineventlogs.py +12 -22
  70. cribl_control_plane/models/inputwiz.py +12 -25
  71. cribl_control_plane/models/inputzscalerhec.py +14 -33
  72. cribl_control_plane/models/listoutputop.py +2 -2
  73. cribl_control_plane/models/output.py +3 -6
  74. cribl_control_plane/models/outputazureblob.py +20 -52
  75. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  76. cribl_control_plane/models/outputazureeventhub.py +20 -44
  77. cribl_control_plane/models/outputazurelogs.py +14 -37
  78. cribl_control_plane/models/outputclickhouse.py +22 -59
  79. cribl_control_plane/models/outputcloudwatch.py +12 -33
  80. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  81. cribl_control_plane/models/outputcriblhttp.py +18 -46
  82. cribl_control_plane/models/outputcribllake.py +18 -48
  83. cribl_control_plane/models/outputcribltcp.py +20 -47
  84. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  85. cribl_control_plane/models/outputdatadog.py +22 -50
  86. cribl_control_plane/models/outputdataset.py +20 -48
  87. cribl_control_plane/models/outputdefault.py +2 -5
  88. cribl_control_plane/models/outputdevnull.py +2 -5
  89. cribl_control_plane/models/outputdiskspool.py +4 -9
  90. cribl_control_plane/models/outputdls3.py +26 -72
  91. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  92. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  93. cribl_control_plane/models/outputelastic.py +20 -45
  94. cribl_control_plane/models/outputelasticcloud.py +14 -40
  95. cribl_control_plane/models/outputexabeam.py +12 -33
  96. cribl_control_plane/models/outputfilesystem.py +16 -41
  97. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  98. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  99. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  100. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  101. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  102. cribl_control_plane/models/outputgraphite.py +16 -35
  103. cribl_control_plane/models/outputhoneycomb.py +14 -37
  104. cribl_control_plane/models/outputhumiohec.py +18 -47
  105. cribl_control_plane/models/outputinfluxdb.py +18 -44
  106. cribl_control_plane/models/outputkafka.py +28 -73
  107. cribl_control_plane/models/outputkinesis.py +18 -44
  108. cribl_control_plane/models/outputloki.py +18 -43
  109. cribl_control_plane/models/outputminio.py +26 -69
  110. cribl_control_plane/models/outputmsk.py +30 -81
  111. cribl_control_plane/models/outputnetflow.py +2 -5
  112. cribl_control_plane/models/outputnewrelic.py +20 -45
  113. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  114. cribl_control_plane/models/outputopentelemetry.py +28 -69
  115. cribl_control_plane/models/outputprometheus.py +14 -37
  116. cribl_control_plane/models/outputring.py +10 -21
  117. cribl_control_plane/models/outputrouter.py +2 -5
  118. cribl_control_plane/models/outputs3.py +28 -72
  119. cribl_control_plane/models/outputsecuritylake.py +20 -56
  120. cribl_control_plane/models/outputsentinel.py +20 -49
  121. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  122. cribl_control_plane/models/outputservicenow.py +26 -64
  123. cribl_control_plane/models/outputsignalfx.py +16 -39
  124. cribl_control_plane/models/outputsnmp.py +2 -5
  125. cribl_control_plane/models/outputsns.py +16 -40
  126. cribl_control_plane/models/outputsplunk.py +26 -64
  127. cribl_control_plane/models/outputsplunkhec.py +14 -37
  128. cribl_control_plane/models/outputsplunklb.py +36 -83
  129. cribl_control_plane/models/outputsqs.py +18 -45
  130. cribl_control_plane/models/outputstatsd.py +16 -34
  131. cribl_control_plane/models/outputstatsdext.py +14 -33
  132. cribl_control_plane/models/outputsumologic.py +14 -37
  133. cribl_control_plane/models/outputsyslog.py +26 -60
  134. cribl_control_plane/models/outputtcpjson.py +22 -54
  135. cribl_control_plane/models/outputwavefront.py +14 -37
  136. cribl_control_plane/models/outputwebhook.py +24 -60
  137. cribl_control_plane/models/outputxsiam.py +16 -37
  138. cribl_control_plane/models/updateoutputbyidop.py +4 -4
  139. cribl_control_plane/sdk.py +3 -5
  140. cribl_control_plane/sources.py +8 -10
  141. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +13 -13
  142. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  143. cribl_control_plane-0.0.15.dist-info/RECORD +0 -215
  144. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputDatadogAgentType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputDatadogAgentType(str, Enum):
15
12
  DATADOG_AGENT = "datadog_agent"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputDatadogAgentConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputDatadogAgentMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputDatadogAgentMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputDatadogAgentCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputDatadogAgentCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputDatadogAgentPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputDatadogAgentPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputDatadogAgentMode], PlainValidator(validate_open_enum(False))
63
- ] = InputDatadogAgentMode.ALWAYS
58
+ mode: Optional[InputDatadogAgentMode] = InputDatadogAgentMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,21 +79,18 @@ class InputDatadogAgentPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputDatadogAgentCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputDatadogAgentCompression.NONE
82
+ compress: Optional[InputDatadogAgentCompression] = InputDatadogAgentCompression.NONE
91
83
  r"""Codec to use to compress the persisted data"""
92
84
 
93
85
 
94
- class InputDatadogAgentMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputDatadogAgentMinimumTLSVersion(str, Enum):
95
87
  TL_SV1 = "TLSv1"
96
88
  TL_SV1_1 = "TLSv1.1"
97
89
  TL_SV1_2 = "TLSv1.2"
98
90
  TL_SV1_3 = "TLSv1.3"
99
91
 
100
92
 
101
- class InputDatadogAgentMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class InputDatadogAgentMaximumTLSVersion(str, Enum):
102
94
  TL_SV1 = "TLSv1"
103
95
  TL_SV1_1 = "TLSv1.1"
104
96
  TL_SV1_2 = "TLSv1.2"
@@ -157,19 +149,11 @@ class InputDatadogAgentTLSSettingsServerSide(BaseModel):
157
149
  ] = None
158
150
 
159
151
  min_version: Annotated[
160
- Annotated[
161
- Optional[InputDatadogAgentMinimumTLSVersion],
162
- PlainValidator(validate_open_enum(False)),
163
- ],
164
- pydantic.Field(alias="minVersion"),
152
+ Optional[InputDatadogAgentMinimumTLSVersion], pydantic.Field(alias="minVersion")
165
153
  ] = None
166
154
 
167
155
  max_version: Annotated[
168
- Annotated[
169
- Optional[InputDatadogAgentMaximumTLSVersion],
170
- PlainValidator(validate_open_enum(False)),
171
- ],
172
- pydantic.Field(alias="maxVersion"),
156
+ Optional[InputDatadogAgentMaximumTLSVersion], pydantic.Field(alias="maxVersion")
173
157
  ] = None
174
158
 
175
159
 
@@ -204,11 +188,11 @@ class InputDatadogAgentProxyMode(BaseModel):
204
188
 
205
189
 
206
190
  class InputDatadogAgentTypedDict(TypedDict):
207
- type: InputDatadogAgentType
208
191
  port: float
209
192
  r"""Port to listen on"""
210
193
  id: NotRequired[str]
211
194
  r"""Unique ID for this input"""
195
+ type: NotRequired[InputDatadogAgentType]
212
196
  disabled: NotRequired[bool]
213
197
  pipeline: NotRequired[str]
214
198
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -257,14 +241,14 @@ class InputDatadogAgentTypedDict(TypedDict):
257
241
 
258
242
 
259
243
  class InputDatadogAgent(BaseModel):
260
- type: Annotated[InputDatadogAgentType, PlainValidator(validate_open_enum(False))]
261
-
262
244
  port: float
263
245
  r"""Port to listen on"""
264
246
 
265
247
  id: Optional[str] = None
266
248
  r"""Unique ID for this input"""
267
249
 
250
+ type: Optional[InputDatadogAgentType] = None
251
+
268
252
  disabled: Optional[bool] = False
269
253
 
270
254
  pipeline: Optional[str] = None
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputDatagenType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputDatagenType(str, Enum):
15
12
  DATAGEN = "datagen"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputDatagenConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputDatagenMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputDatagenMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputDatagenCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputDatagenCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputDatagenPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputDatagenPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputDatagenMode], PlainValidator(validate_open_enum(False))
63
- ] = InputDatagenMode.ALWAYS
58
+ mode: Optional[InputDatagenMode] = InputDatagenMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputDatagenPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputDatagenCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputDatagenCompression.NONE
82
+ compress: Optional[InputDatagenCompression] = InputDatagenCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -143,7 +136,7 @@ class InputDatagenTypedDict(TypedDict):
143
136
 
144
137
 
145
138
  class InputDatagen(BaseModel):
146
- type: Annotated[InputDatagenType, PlainValidator(validate_open_enum(False))]
139
+ type: InputDatagenType
147
140
 
148
141
  samples: List[Sample]
149
142
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputEdgePrometheusType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputEdgePrometheusType(str, Enum):
15
12
  EDGE_PROMETHEUS = "edge_prometheus"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputEdgePrometheusConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputEdgePrometheusMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputEdgePrometheusMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputEdgePrometheusPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputEdgePrometheusPqCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputEdgePrometheusPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputEdgePrometheusPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputEdgePrometheusMode], PlainValidator(validate_open_enum(False))
63
- ] = InputEdgePrometheusMode.ALWAYS
58
+ mode: Optional[InputEdgePrometheusMode] = InputEdgePrometheusMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,14 +79,13 @@ class InputEdgePrometheusPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputEdgePrometheusPqCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputEdgePrometheusPqCompression.NONE
82
+ compress: Optional[InputEdgePrometheusPqCompression] = (
83
+ InputEdgePrometheusPqCompression.NONE
84
+ )
91
85
  r"""Codec to use to compress the persisted data"""
92
86
 
93
87
 
94
- class InputEdgePrometheusDiscoveryType(str, Enum, metaclass=utils.OpenEnumMeta):
88
+ class InputEdgePrometheusDiscoveryType(str, Enum):
95
89
  r"""Target discovery mechanism. Use static to manually enter a list of targets."""
96
90
 
97
91
  STATIC = "static"
@@ -101,9 +95,7 @@ class InputEdgePrometheusDiscoveryType(str, Enum, metaclass=utils.OpenEnumMeta):
101
95
  K8S_PODS = "k8s-pods"
102
96
 
103
97
 
104
- class InputEdgePrometheusPersistenceCompression(
105
- str, Enum, metaclass=utils.OpenEnumMeta
106
- ):
98
+ class InputEdgePrometheusPersistenceCompression(str, Enum):
107
99
  r"""Data compression format. Default is gzip."""
108
100
 
109
101
  NONE = "none"
@@ -136,10 +128,9 @@ class InputEdgePrometheusDiskSpooling(BaseModel):
136
128
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
137
129
  r"""Maximum amount of time to retain data before older buckets are deleted. Examples: 2h, 4d. Default is 24h."""
138
130
 
139
- compress: Annotated[
140
- Optional[InputEdgePrometheusPersistenceCompression],
141
- PlainValidator(validate_open_enum(False)),
142
- ] = InputEdgePrometheusPersistenceCompression.GZIP
131
+ compress: Optional[InputEdgePrometheusPersistenceCompression] = (
132
+ InputEdgePrometheusPersistenceCompression.GZIP
133
+ )
143
134
  r"""Data compression format. Default is gzip."""
144
135
 
145
136
 
@@ -156,9 +147,7 @@ class InputEdgePrometheusMetadatum(BaseModel):
156
147
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
157
148
 
158
149
 
159
- class InputEdgePrometheusAuthTypeAuthenticationMethod(
160
- str, Enum, metaclass=utils.OpenEnumMeta
161
- ):
150
+ class InputEdgePrometheusAuthTypeAuthenticationMethod(str, Enum):
162
151
  r"""Enter credentials directly, or select a stored secret"""
163
152
 
164
153
  MANUAL = "manual"
@@ -166,7 +155,7 @@ class InputEdgePrometheusAuthTypeAuthenticationMethod(
166
155
  KUBERNETES = "kubernetes"
167
156
 
168
157
 
169
- class TargetProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
158
+ class InputEdgePrometheusTargetProtocol(str, Enum):
170
159
  r"""Protocol to use when collecting metrics"""
171
160
 
172
161
  HTTP = "http"
@@ -176,7 +165,7 @@ class TargetProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
176
165
  class TargetTypedDict(TypedDict):
177
166
  host: str
178
167
  r"""Name of host from which to pull metrics."""
179
- protocol: NotRequired[TargetProtocol]
168
+ protocol: NotRequired[InputEdgePrometheusTargetProtocol]
180
169
  r"""Protocol to use when collecting metrics"""
181
170
  port: NotRequired[float]
182
171
  r"""The port number in the metrics URL for discovered targets."""
@@ -188,9 +177,9 @@ class Target(BaseModel):
188
177
  host: str
189
178
  r"""Name of host from which to pull metrics."""
190
179
 
191
- protocol: Annotated[
192
- Optional[TargetProtocol], PlainValidator(validate_open_enum(False))
193
- ] = TargetProtocol.HTTP
180
+ protocol: Optional[InputEdgePrometheusTargetProtocol] = (
181
+ InputEdgePrometheusTargetProtocol.HTTP
182
+ )
194
183
  r"""Protocol to use when collecting metrics"""
195
184
 
196
185
  port: Optional[float] = 9090
@@ -200,7 +189,7 @@ class Target(BaseModel):
200
189
  r"""Path to use when collecting metrics from discovered targets"""
201
190
 
202
191
 
203
- class InputEdgePrometheusRecordType(str, Enum, metaclass=utils.OpenEnumMeta):
192
+ class InputEdgePrometheusRecordType(str, Enum):
204
193
  r"""DNS Record type to resolve"""
205
194
 
206
195
  SRV = "SRV"
@@ -208,7 +197,7 @@ class InputEdgePrometheusRecordType(str, Enum, metaclass=utils.OpenEnumMeta):
208
197
  AAAA = "AAAA"
209
198
 
210
199
 
211
- class ScrapeProtocolProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
200
+ class InputEdgePrometheusScrapeProtocolProtocol(str, Enum):
212
201
  r"""Protocol to use when collecting metrics"""
213
202
 
214
203
  HTTP = "http"
@@ -230,9 +219,7 @@ class InputEdgePrometheusSearchFilter(BaseModel):
230
219
  r"""Search Filter Values, if empty only \"running\" EC2 instances will be returned"""
231
220
 
232
221
 
233
- class InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod(
234
- str, Enum, metaclass=utils.OpenEnumMeta
235
- ):
222
+ class InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod(str, Enum):
236
223
  r"""AWS authentication method. Choose Auto to use IAM roles."""
237
224
 
238
225
  AUTO = "auto"
@@ -240,7 +227,7 @@ class InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod(
240
227
  SECRET = "secret"
241
228
 
242
229
 
243
- class InputEdgePrometheusSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
230
+ class InputEdgePrometheusSignatureVersion(str, Enum):
244
231
  r"""Signature version to use for signing EC2 requests"""
245
232
 
246
233
  V2 = "v2"
@@ -263,9 +250,9 @@ class PodFilter(BaseModel):
263
250
 
264
251
 
265
252
  class InputEdgePrometheusTypedDict(TypedDict):
266
- type: InputEdgePrometheusType
267
253
  id: NotRequired[str]
268
254
  r"""Unique ID for this input"""
255
+ type: NotRequired[InputEdgePrometheusType]
269
256
  disabled: NotRequired[bool]
270
257
  pipeline: NotRequired[str]
271
258
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -299,7 +286,7 @@ class InputEdgePrometheusTypedDict(TypedDict):
299
286
  r"""List of DNS names to resolve"""
300
287
  record_type: NotRequired[InputEdgePrometheusRecordType]
301
288
  r"""DNS Record type to resolve"""
302
- scrape_protocol: NotRequired[ScrapeProtocolProtocol]
289
+ scrape_protocol: NotRequired[InputEdgePrometheusScrapeProtocolProtocol]
303
290
  r"""Protocol to use when collecting metrics"""
304
291
  scrape_path: NotRequired[str]
305
292
  r"""Path to use when collecting metrics from discovered targets"""
@@ -353,11 +340,11 @@ class InputEdgePrometheusTypedDict(TypedDict):
353
340
 
354
341
 
355
342
  class InputEdgePrometheus(BaseModel):
356
- type: Annotated[InputEdgePrometheusType, PlainValidator(validate_open_enum(False))]
357
-
358
343
  id: Optional[str] = None
359
344
  r"""Unique ID for this input"""
360
345
 
346
+ type: Optional[InputEdgePrometheusType] = None
347
+
361
348
  disabled: Optional[bool] = False
362
349
 
363
350
  pipeline: Optional[str] = None
@@ -388,10 +375,7 @@ class InputEdgePrometheus(BaseModel):
388
375
  r"""Other dimensions to include in events"""
389
376
 
390
377
  discovery_type: Annotated[
391
- Annotated[
392
- Optional[InputEdgePrometheusDiscoveryType],
393
- PlainValidator(validate_open_enum(False)),
394
- ],
378
+ Optional[InputEdgePrometheusDiscoveryType],
395
379
  pydantic.Field(alias="discoveryType"),
396
380
  ] = InputEdgePrometheusDiscoveryType.STATIC
397
381
  r"""Target discovery mechanism. Use static to manually enter a list of targets."""
@@ -408,10 +392,7 @@ class InputEdgePrometheus(BaseModel):
408
392
  r"""Fields to add to events from this input"""
409
393
 
410
394
  auth_type: Annotated[
411
- Annotated[
412
- Optional[InputEdgePrometheusAuthTypeAuthenticationMethod],
413
- PlainValidator(validate_open_enum(False)),
414
- ],
395
+ Optional[InputEdgePrometheusAuthTypeAuthenticationMethod],
415
396
  pydantic.Field(alias="authType"),
416
397
  ] = InputEdgePrometheusAuthTypeAuthenticationMethod.MANUAL
417
398
  r"""Enter credentials directly, or select a stored secret"""
@@ -424,20 +405,14 @@ class InputEdgePrometheus(BaseModel):
424
405
  r"""List of DNS names to resolve"""
425
406
 
426
407
  record_type: Annotated[
427
- Annotated[
428
- Optional[InputEdgePrometheusRecordType],
429
- PlainValidator(validate_open_enum(False)),
430
- ],
431
- pydantic.Field(alias="recordType"),
408
+ Optional[InputEdgePrometheusRecordType], pydantic.Field(alias="recordType")
432
409
  ] = InputEdgePrometheusRecordType.SRV
433
410
  r"""DNS Record type to resolve"""
434
411
 
435
412
  scrape_protocol: Annotated[
436
- Annotated[
437
- Optional[ScrapeProtocolProtocol], PlainValidator(validate_open_enum(False))
438
- ],
413
+ Optional[InputEdgePrometheusScrapeProtocolProtocol],
439
414
  pydantic.Field(alias="scrapeProtocol"),
440
- ] = ScrapeProtocolProtocol.HTTP
415
+ ] = InputEdgePrometheusScrapeProtocolProtocol.HTTP
441
416
  r"""Protocol to use when collecting metrics"""
442
417
 
443
418
  scrape_path: Annotated[Optional[str], pydantic.Field(alias="scrapePath")] = (
@@ -458,10 +433,7 @@ class InputEdgePrometheus(BaseModel):
458
433
  r"""EC2 Instance Search Filter"""
459
434
 
460
435
  aws_authentication_method: Annotated[
461
- Annotated[
462
- Optional[InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod],
463
- PlainValidator(validate_open_enum(False)),
464
- ],
436
+ Optional[InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod],
465
437
  pydantic.Field(alias="awsAuthenticationMethod"),
466
438
  ] = InputEdgePrometheusAwsAuthenticationMethodAuthenticationMethod.AUTO
467
439
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -477,10 +449,7 @@ class InputEdgePrometheus(BaseModel):
477
449
  r"""EC2 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to EC2-compatible endpoint."""
478
450
 
479
451
  signature_version: Annotated[
480
- Annotated[
481
- Optional[InputEdgePrometheusSignatureVersion],
482
- PlainValidator(validate_open_enum(False)),
483
- ],
452
+ Optional[InputEdgePrometheusSignatureVersion],
484
453
  pydantic.Field(alias="signatureVersion"),
485
454
  ] = InputEdgePrometheusSignatureVersion.V4
486
455
  r"""Signature version to use for signing EC2 requests"""
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputElasticType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputElasticType(str, Enum):
15
12
  ELASTIC = "elastic"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputElasticConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputElasticMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputElasticMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputElasticCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputElasticCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputElasticPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputElasticPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputElasticMode], PlainValidator(validate_open_enum(False))
63
- ] = InputElasticMode.ALWAYS
58
+ mode: Optional[InputElasticMode] = InputElasticMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,20 +79,18 @@ class InputElasticPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputElasticCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputElasticCompression.NONE
82
+ compress: Optional[InputElasticCompression] = InputElasticCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputElasticMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputElasticMinimumTLSVersion(str, Enum):
94
87
  TL_SV1 = "TLSv1"
95
88
  TL_SV1_1 = "TLSv1.1"
96
89
  TL_SV1_2 = "TLSv1.2"
97
90
  TL_SV1_3 = "TLSv1.3"
98
91
 
99
92
 
100
- class InputElasticMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class InputElasticMaximumTLSVersion(str, Enum):
101
94
  TL_SV1 = "TLSv1"
102
95
  TL_SV1_1 = "TLSv1.1"
103
96
  TL_SV1_2 = "TLSv1.2"
@@ -156,30 +149,22 @@ class InputElasticTLSSettingsServerSide(BaseModel):
156
149
  ] = None
157
150
 
158
151
  min_version: Annotated[
159
- Annotated[
160
- Optional[InputElasticMinimumTLSVersion],
161
- PlainValidator(validate_open_enum(False)),
162
- ],
163
- pydantic.Field(alias="minVersion"),
152
+ Optional[InputElasticMinimumTLSVersion], pydantic.Field(alias="minVersion")
164
153
  ] = None
165
154
 
166
155
  max_version: Annotated[
167
- Annotated[
168
- Optional[InputElasticMaximumTLSVersion],
169
- PlainValidator(validate_open_enum(False)),
170
- ],
171
- pydantic.Field(alias="maxVersion"),
156
+ Optional[InputElasticMaximumTLSVersion], pydantic.Field(alias="maxVersion")
172
157
  ] = None
173
158
 
174
159
 
175
- class InputElasticAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
160
+ class InputElasticAuthenticationType(str, Enum):
176
161
  NONE = "none"
177
162
  BASIC = "basic"
178
163
  CREDENTIALS_SECRET = "credentialsSecret"
179
164
  AUTH_TOKENS = "authTokens"
180
165
 
181
166
 
182
- class InputElasticAPIVersion(str, Enum, metaclass=utils.OpenEnumMeta):
167
+ class InputElasticAPIVersion(str, Enum):
183
168
  r"""The API version to use for communicating with the server"""
184
169
 
185
170
  SIX_DOT_8_DOT_4 = "6.8.4"
@@ -211,7 +196,7 @@ class InputElasticMetadatum(BaseModel):
211
196
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
212
197
 
213
198
 
214
- class InputElasticAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
199
+ class InputElasticAuthenticationMethod(str, Enum):
215
200
  r"""Enter credentials directly, or select a stored secret"""
216
201
 
217
202
  NONE = "none"
@@ -255,21 +240,17 @@ class InputElasticProxyMode(BaseModel):
255
240
  r"""Amount of time, in seconds, to wait for a proxy request to complete before canceling it"""
256
241
 
257
242
  auth_type: Annotated[
258
- Annotated[
259
- Optional[InputElasticAuthenticationMethod],
260
- PlainValidator(validate_open_enum(False)),
261
- ],
262
- pydantic.Field(alias="authType"),
243
+ Optional[InputElasticAuthenticationMethod], pydantic.Field(alias="authType")
263
244
  ] = InputElasticAuthenticationMethod.NONE
264
245
  r"""Enter credentials directly, or select a stored secret"""
265
246
 
266
247
 
267
248
  class InputElasticTypedDict(TypedDict):
268
- type: InputElasticType
269
249
  port: float
270
250
  r"""Port to listen on"""
271
251
  id: NotRequired[str]
272
252
  r"""Unique ID for this input"""
253
+ type: NotRequired[InputElasticType]
273
254
  disabled: NotRequired[bool]
274
255
  pipeline: NotRequired[str]
275
256
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -331,14 +312,14 @@ class InputElasticTypedDict(TypedDict):
331
312
 
332
313
 
333
314
  class InputElastic(BaseModel):
334
- type: Annotated[InputElasticType, PlainValidator(validate_open_enum(False))]
335
-
336
315
  port: float
337
316
  r"""Port to listen on"""
338
317
 
339
318
  id: Optional[str] = None
340
319
  r"""Unique ID for this input"""
341
320
 
321
+ type: Optional[InputElasticType] = None
322
+
342
323
  disabled: Optional[bool] = False
343
324
 
344
325
  pipeline: Optional[str] = None
@@ -427,18 +408,11 @@ class InputElastic(BaseModel):
427
408
  r"""Absolute path on which to listen for Elasticsearch API requests. Defaults to /. _bulk will be appended automatically. For example, /myPath becomes /myPath/_bulk. Requests can then be made to either /myPath/_bulk or /myPath/<myIndexName>/_bulk. Other entries are faked as success."""
428
409
 
429
410
  auth_type: Annotated[
430
- Annotated[
431
- Optional[InputElasticAuthenticationType],
432
- PlainValidator(validate_open_enum(False)),
433
- ],
434
- pydantic.Field(alias="authType"),
411
+ Optional[InputElasticAuthenticationType], pydantic.Field(alias="authType")
435
412
  ] = InputElasticAuthenticationType.NONE
436
413
 
437
414
  api_version: Annotated[
438
- Annotated[
439
- Optional[InputElasticAPIVersion], PlainValidator(validate_open_enum(False))
440
- ],
441
- pydantic.Field(alias="apiVersion"),
415
+ Optional[InputElasticAPIVersion], pydantic.Field(alias="apiVersion")
442
416
  ] = InputElasticAPIVersion.EIGHT_DOT_3_DOT_2
443
417
  r"""The API version to use for communicating with the server"""
444
418