cribl-control-plane 0.0.44a2__py3-none-any.whl → 0.0.45__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (158) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/models/__init__.py +3 -3
  4. cribl_control_plane/models/appmode.py +1 -2
  5. cribl_control_plane/models/cacheconnection.py +2 -10
  6. cribl_control_plane/models/cacheconnectionbackfillstatus.py +1 -2
  7. cribl_control_plane/models/cloudprovider.py +1 -2
  8. cribl_control_plane/models/configgroup.py +2 -7
  9. cribl_control_plane/models/configgroupcloud.py +2 -6
  10. cribl_control_plane/models/createconfiggroupbyproductop.py +2 -8
  11. cribl_control_plane/models/cribllakedataset.py +2 -8
  12. cribl_control_plane/models/datasetmetadata.py +2 -8
  13. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +2 -7
  14. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +2 -4
  15. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +2 -4
  16. cribl_control_plane/models/getconfiggroupbyproductandidop.py +1 -3
  17. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +2 -7
  18. cribl_control_plane/models/getsummaryop.py +2 -7
  19. cribl_control_plane/models/hbcriblinfo.py +6 -6
  20. cribl_control_plane/models/healthstatus.py +4 -7
  21. cribl_control_plane/models/inputappscope.py +14 -34
  22. cribl_control_plane/models/inputazureblob.py +6 -17
  23. cribl_control_plane/models/inputcollection.py +4 -11
  24. cribl_control_plane/models/inputconfluentcloud.py +20 -47
  25. cribl_control_plane/models/inputcribl.py +4 -11
  26. cribl_control_plane/models/inputcriblhttp.py +8 -23
  27. cribl_control_plane/models/inputcribllakehttp.py +10 -22
  28. cribl_control_plane/models/inputcriblmetrics.py +4 -12
  29. cribl_control_plane/models/inputcribltcp.py +8 -23
  30. cribl_control_plane/models/inputcrowdstrike.py +10 -26
  31. cribl_control_plane/models/inputdatadogagent.py +8 -24
  32. cribl_control_plane/models/inputdatagen.py +4 -11
  33. cribl_control_plane/models/inputedgeprometheus.py +24 -58
  34. cribl_control_plane/models/inputelastic.py +14 -40
  35. cribl_control_plane/models/inputeventhub.py +6 -15
  36. cribl_control_plane/models/inputexec.py +6 -14
  37. cribl_control_plane/models/inputfile.py +6 -15
  38. cribl_control_plane/models/inputfirehose.py +8 -23
  39. cribl_control_plane/models/inputgooglepubsub.py +6 -19
  40. cribl_control_plane/models/inputgrafana.py +24 -67
  41. cribl_control_plane/models/inputhttp.py +8 -23
  42. cribl_control_plane/models/inputhttpraw.py +8 -23
  43. cribl_control_plane/models/inputjournalfiles.py +4 -12
  44. cribl_control_plane/models/inputkafka.py +16 -46
  45. cribl_control_plane/models/inputkinesis.py +14 -38
  46. cribl_control_plane/models/inputkubeevents.py +4 -11
  47. cribl_control_plane/models/inputkubelogs.py +8 -16
  48. cribl_control_plane/models/inputkubemetrics.py +8 -16
  49. cribl_control_plane/models/inputloki.py +10 -29
  50. cribl_control_plane/models/inputmetrics.py +8 -23
  51. cribl_control_plane/models/inputmodeldriventelemetry.py +10 -27
  52. cribl_control_plane/models/inputmsk.py +18 -53
  53. cribl_control_plane/models/inputnetflow.py +4 -11
  54. cribl_control_plane/models/inputoffice365mgmt.py +14 -33
  55. cribl_control_plane/models/inputoffice365msgtrace.py +16 -35
  56. cribl_control_plane/models/inputoffice365service.py +16 -35
  57. cribl_control_plane/models/inputopentelemetry.py +16 -38
  58. cribl_control_plane/models/inputprometheus.py +18 -50
  59. cribl_control_plane/models/inputprometheusrw.py +10 -30
  60. cribl_control_plane/models/inputrawudp.py +4 -11
  61. cribl_control_plane/models/inputs3.py +8 -21
  62. cribl_control_plane/models/inputs3inventory.py +10 -26
  63. cribl_control_plane/models/inputsecuritylake.py +10 -27
  64. cribl_control_plane/models/inputsnmp.py +6 -16
  65. cribl_control_plane/models/inputsplunk.py +12 -33
  66. cribl_control_plane/models/inputsplunkhec.py +10 -29
  67. cribl_control_plane/models/inputsplunksearch.py +14 -33
  68. cribl_control_plane/models/inputsqs.py +10 -27
  69. cribl_control_plane/models/inputsyslog.py +16 -43
  70. cribl_control_plane/models/inputsystemmetrics.py +24 -48
  71. cribl_control_plane/models/inputsystemstate.py +8 -16
  72. cribl_control_plane/models/inputtcp.py +10 -29
  73. cribl_control_plane/models/inputtcpjson.py +10 -29
  74. cribl_control_plane/models/inputwef.py +14 -37
  75. cribl_control_plane/models/inputwindowsmetrics.py +24 -44
  76. cribl_control_plane/models/inputwineventlogs.py +10 -20
  77. cribl_control_plane/models/inputwiz.py +8 -21
  78. cribl_control_plane/models/inputwizwebhook.py +8 -23
  79. cribl_control_plane/models/inputzscalerhec.py +10 -29
  80. cribl_control_plane/models/lakehouseconnectiontype.py +1 -2
  81. cribl_control_plane/models/listconfiggroupbyproductop.py +1 -3
  82. cribl_control_plane/models/masterworkerentry.py +2 -7
  83. cribl_control_plane/models/nodeactiveupgradestatus.py +1 -2
  84. cribl_control_plane/models/nodefailedupgradestatus.py +1 -2
  85. cribl_control_plane/models/nodeskippedupgradestatus.py +1 -2
  86. cribl_control_plane/models/nodeupgradestate.py +1 -2
  87. cribl_control_plane/models/nodeupgradestatus.py +5 -13
  88. cribl_control_plane/models/outputazureblob.py +18 -48
  89. cribl_control_plane/models/outputazuredataexplorer.py +28 -73
  90. cribl_control_plane/models/outputazureeventhub.py +18 -40
  91. cribl_control_plane/models/outputazurelogs.py +12 -35
  92. cribl_control_plane/models/outputclickhouse.py +20 -55
  93. cribl_control_plane/models/outputcloudwatch.py +10 -29
  94. cribl_control_plane/models/outputconfluentcloud.py +32 -77
  95. cribl_control_plane/models/outputcriblhttp.py +16 -44
  96. cribl_control_plane/models/outputcribllake.py +16 -46
  97. cribl_control_plane/models/outputcribltcp.py +18 -45
  98. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +14 -49
  99. cribl_control_plane/models/outputdatadog.py +20 -48
  100. cribl_control_plane/models/outputdataset.py +18 -46
  101. cribl_control_plane/models/outputdiskspool.py +2 -7
  102. cribl_control_plane/models/outputdls3.py +24 -68
  103. cribl_control_plane/models/outputdynatracehttp.py +20 -53
  104. cribl_control_plane/models/outputdynatraceotlp.py +22 -55
  105. cribl_control_plane/models/outputelastic.py +18 -43
  106. cribl_control_plane/models/outputelasticcloud.py +12 -36
  107. cribl_control_plane/models/outputexabeam.py +10 -29
  108. cribl_control_plane/models/outputfilesystem.py +14 -39
  109. cribl_control_plane/models/outputgooglechronicle.py +16 -50
  110. cribl_control_plane/models/outputgooglecloudlogging.py +14 -41
  111. cribl_control_plane/models/outputgooglecloudstorage.py +24 -66
  112. cribl_control_plane/models/outputgooglepubsub.py +10 -31
  113. cribl_control_plane/models/outputgrafanacloud.py +32 -97
  114. cribl_control_plane/models/outputgraphite.py +14 -31
  115. cribl_control_plane/models/outputhoneycomb.py +12 -35
  116. cribl_control_plane/models/outputhumiohec.py +16 -43
  117. cribl_control_plane/models/outputinfluxdb.py +16 -42
  118. cribl_control_plane/models/outputkafka.py +28 -74
  119. cribl_control_plane/models/outputkinesis.py +16 -40
  120. cribl_control_plane/models/outputloki.py +16 -41
  121. cribl_control_plane/models/outputminio.py +24 -65
  122. cribl_control_plane/models/outputmsk.py +30 -82
  123. cribl_control_plane/models/outputnewrelic.py +18 -43
  124. cribl_control_plane/models/outputnewrelicevents.py +14 -41
  125. cribl_control_plane/models/outputopentelemetry.py +26 -67
  126. cribl_control_plane/models/outputprometheus.py +12 -35
  127. cribl_control_plane/models/outputring.py +8 -19
  128. cribl_control_plane/models/outputs3.py +26 -68
  129. cribl_control_plane/models/outputsecuritylake.py +18 -52
  130. cribl_control_plane/models/outputsentinel.py +18 -45
  131. cribl_control_plane/models/outputsentineloneaisiem.py +18 -50
  132. cribl_control_plane/models/outputservicenow.py +24 -60
  133. cribl_control_plane/models/outputsignalfx.py +14 -37
  134. cribl_control_plane/models/outputsns.py +14 -36
  135. cribl_control_plane/models/outputsplunk.py +24 -60
  136. cribl_control_plane/models/outputsplunkhec.py +12 -35
  137. cribl_control_plane/models/outputsplunklb.py +30 -77
  138. cribl_control_plane/models/outputsqs.py +16 -41
  139. cribl_control_plane/models/outputstatsd.py +14 -30
  140. cribl_control_plane/models/outputstatsdext.py +12 -29
  141. cribl_control_plane/models/outputsumologic.py +12 -35
  142. cribl_control_plane/models/outputsyslog.py +24 -58
  143. cribl_control_plane/models/outputtcpjson.py +20 -52
  144. cribl_control_plane/models/outputwavefront.py +12 -35
  145. cribl_control_plane/models/outputwebhook.py +22 -58
  146. cribl_control_plane/models/outputxsiam.py +14 -35
  147. cribl_control_plane/models/productscore.py +1 -2
  148. cribl_control_plane/models/rbacresource.py +1 -2
  149. cribl_control_plane/models/resourcepolicy.py +2 -4
  150. cribl_control_plane/models/runnablejobcollection.py +13 -30
  151. cribl_control_plane/models/runnablejobexecutor.py +4 -13
  152. cribl_control_plane/models/runnablejobscheduledsearch.py +2 -7
  153. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +2 -8
  154. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +2 -8
  155. cribl_control_plane/models/workertypes.py +1 -2
  156. {cribl_control_plane-0.0.44a2.dist-info → cribl_control_plane-0.0.45.dist-info}/METADATA +1 -1
  157. {cribl_control_plane-0.0.44a2.dist-info → cribl_control_plane-0.0.45.dist-info}/RECORD +158 -158
  158. {cribl_control_plane-0.0.44a2.dist-info → cribl_control_plane-0.0.45.dist-info}/WHEEL +0 -0
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputSnmpConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSnmpMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSnmpMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSnmpCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSnmpCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputSnmpPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputSnmpPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputSnmpMode], PlainValidator(validate_open_enum(False))
72
- ] = InputSnmpMode.ALWAYS
67
+ mode: Optional[InputSnmpMode] = InputSnmpMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputSnmpPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputSnmpCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputSnmpCompression.NONE
91
+ compress: Optional[InputSnmpCompression] = InputSnmpCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,7 +96,7 @@ class InputSnmpPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class AuthenticationProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class AuthenticationProtocol(str, Enum):
107
100
  NONE = "none"
108
101
  MD5 = "md5"
109
102
  SHA = "sha"
@@ -124,10 +117,7 @@ class V3User(BaseModel):
124
117
  name: str
125
118
 
126
119
  auth_protocol: Annotated[
127
- Annotated[
128
- Optional[AuthenticationProtocol], PlainValidator(validate_open_enum(False))
129
- ],
130
- pydantic.Field(alias="authProtocol"),
120
+ Optional[AuthenticationProtocol], pydantic.Field(alias="authProtocol")
131
121
  ] = AuthenticationProtocol.NONE
132
122
 
133
123
  auth_key: Annotated[Optional[Any], pydantic.Field(alias="authKey")] = None
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputSplunkConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkPqCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputSplunkPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputSplunkPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputSplunkMode], PlainValidator(validate_open_enum(False))
72
- ] = InputSplunkMode.ALWAYS
67
+ mode: Optional[InputSplunkMode] = InputSplunkMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputSplunkPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputSplunkPqCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputSplunkPqCompression.NONE
91
+ compress: Optional[InputSplunkPqCompression] = InputSplunkPqCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,14 +96,14 @@ class InputSplunkPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputSplunkMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputSplunkMinimumTLSVersion(str, Enum):
107
100
  TL_SV1 = "TLSv1"
108
101
  TL_SV1_1 = "TLSv1.1"
109
102
  TL_SV1_2 = "TLSv1.2"
110
103
  TL_SV1_3 = "TLSv1.3"
111
104
 
112
105
 
113
- class InputSplunkMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
106
+ class InputSplunkMaximumTLSVersion(str, Enum):
114
107
  TL_SV1 = "TLSv1"
115
108
  TL_SV1_1 = "TLSv1.1"
116
109
  TL_SV1_2 = "TLSv1.2"
@@ -169,19 +162,11 @@ class InputSplunkTLSSettingsServerSide(BaseModel):
169
162
  ] = None
170
163
 
171
164
  min_version: Annotated[
172
- Annotated[
173
- Optional[InputSplunkMinimumTLSVersion],
174
- PlainValidator(validate_open_enum(False)),
175
- ],
176
- pydantic.Field(alias="minVersion"),
165
+ Optional[InputSplunkMinimumTLSVersion], pydantic.Field(alias="minVersion")
177
166
  ] = None
178
167
 
179
168
  max_version: Annotated[
180
- Annotated[
181
- Optional[InputSplunkMaximumTLSVersion],
182
- PlainValidator(validate_open_enum(False)),
183
- ],
184
- pydantic.Field(alias="maxVersion"),
169
+ Optional[InputSplunkMaximumTLSVersion], pydantic.Field(alias="maxVersion")
185
170
  ] = None
186
171
 
187
172
 
@@ -211,14 +196,14 @@ class InputSplunkAuthToken(BaseModel):
211
196
  description: Optional[str] = None
212
197
 
213
198
 
214
- class InputSplunkMaxS2SVersion(str, Enum, metaclass=utils.OpenEnumMeta):
199
+ class InputSplunkMaxS2SVersion(str, Enum):
215
200
  r"""The highest S2S protocol version to advertise during handshake"""
216
201
 
217
202
  V3 = "v3"
218
203
  V4 = "v4"
219
204
 
220
205
 
221
- class InputSplunkCompression(str, Enum, metaclass=utils.OpenEnumMeta):
206
+ class InputSplunkCompression(str, Enum):
222
207
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
223
208
 
224
209
  DISABLED = "disabled"
@@ -369,11 +354,7 @@ class InputSplunk(BaseModel):
369
354
  r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
370
355
 
371
356
  max_s2_sversion: Annotated[
372
- Annotated[
373
- Optional[InputSplunkMaxS2SVersion],
374
- PlainValidator(validate_open_enum(False)),
375
- ],
376
- pydantic.Field(alias="maxS2Sversion"),
357
+ Optional[InputSplunkMaxS2SVersion], pydantic.Field(alias="maxS2Sversion")
377
358
  ] = InputSplunkMaxS2SVersion.V3
378
359
  r"""The highest S2S protocol version to advertise during handshake"""
379
360
 
@@ -394,7 +375,5 @@ class InputSplunk(BaseModel):
394
375
  ] = False
395
376
  r"""Extract and process Splunk-generated metrics as Cribl metrics"""
396
377
 
397
- compress: Annotated[
398
- Optional[InputSplunkCompression], PlainValidator(validate_open_enum(False))
399
- ] = InputSplunkCompression.DISABLED
378
+ compress: Optional[InputSplunkCompression] = InputSplunkCompression.DISABLED
400
379
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputSplunkHecConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkHecMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkHecMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkHecCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkHecCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputSplunkHecPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputSplunkHecPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputSplunkHecMode], PlainValidator(validate_open_enum(False))
72
- ] = InputSplunkHecMode.ALWAYS
67
+ mode: Optional[InputSplunkHecMode] = InputSplunkHecMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputSplunkHecPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputSplunkHecCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputSplunkHecCompression.NONE
91
+ compress: Optional[InputSplunkHecCompression] = InputSplunkHecCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,7 +96,7 @@ class InputSplunkHecPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputSplunkHecAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputSplunkHecAuthenticationMethod(str, Enum):
107
100
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
108
101
 
109
102
  MANUAL = "manual"
@@ -141,11 +134,7 @@ class InputSplunkHecAuthToken(BaseModel):
141
134
  token: Any
142
135
 
143
136
  auth_type: Annotated[
144
- Annotated[
145
- Optional[InputSplunkHecAuthenticationMethod],
146
- PlainValidator(validate_open_enum(False)),
147
- ],
148
- pydantic.Field(alias="authType"),
137
+ Optional[InputSplunkHecAuthenticationMethod], pydantic.Field(alias="authType")
149
138
  ] = InputSplunkHecAuthenticationMethod.MANUAL
150
139
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
151
140
 
@@ -165,14 +154,14 @@ class InputSplunkHecAuthToken(BaseModel):
165
154
  r"""Fields to add to events referencing this token"""
166
155
 
167
156
 
168
- class InputSplunkHecMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
157
+ class InputSplunkHecMinimumTLSVersion(str, Enum):
169
158
  TL_SV1 = "TLSv1"
170
159
  TL_SV1_1 = "TLSv1.1"
171
160
  TL_SV1_2 = "TLSv1.2"
172
161
  TL_SV1_3 = "TLSv1.3"
173
162
 
174
163
 
175
- class InputSplunkHecMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
164
+ class InputSplunkHecMaximumTLSVersion(str, Enum):
176
165
  TL_SV1 = "TLSv1"
177
166
  TL_SV1_1 = "TLSv1.1"
178
167
  TL_SV1_2 = "TLSv1.2"
@@ -231,19 +220,11 @@ class InputSplunkHecTLSSettingsServerSide(BaseModel):
231
220
  ] = None
232
221
 
233
222
  min_version: Annotated[
234
- Annotated[
235
- Optional[InputSplunkHecMinimumTLSVersion],
236
- PlainValidator(validate_open_enum(False)),
237
- ],
238
- pydantic.Field(alias="minVersion"),
223
+ Optional[InputSplunkHecMinimumTLSVersion], pydantic.Field(alias="minVersion")
239
224
  ] = None
240
225
 
241
226
  max_version: Annotated[
242
- Annotated[
243
- Optional[InputSplunkHecMaximumTLSVersion],
244
- PlainValidator(validate_open_enum(False)),
245
- ],
246
- pydantic.Field(alias="maxVersion"),
227
+ Optional[InputSplunkHecMaximumTLSVersion], pydantic.Field(alias="maxVersion")
247
228
  ] = None
248
229
 
249
230
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputSplunkSearchConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkSearchMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkSearchMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkSearchCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkSearchCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputSplunkSearchPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputSplunkSearchPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputSplunkSearchMode], PlainValidator(validate_open_enum(False))
72
- ] = InputSplunkSearchMode.ALWAYS
67
+ mode: Optional[InputSplunkSearchMode] = InputSplunkSearchMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,10 +88,7 @@ class InputSplunkSearchPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputSplunkSearchCompression],
98
- PlainValidator(validate_open_enum(False)),
99
- ] = InputSplunkSearchCompression.NONE
91
+ compress: Optional[InputSplunkSearchCompression] = InputSplunkSearchCompression.NONE
100
92
  r"""Codec to use to compress the persisted data"""
101
93
 
102
94
  pq_controls: Annotated[
@@ -104,7 +96,7 @@ class InputSplunkSearchPq(BaseModel):
104
96
  ] = None
105
97
 
106
98
 
107
- class OutputMode(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class OutputMode(str, Enum):
108
100
  r"""Format of the returned output"""
109
101
 
110
102
  CSV = "csv"
@@ -137,7 +129,7 @@ class EndpointHeader(BaseModel):
137
129
  r"""JavaScript expression to compute the header's value, normally enclosed in backticks (e.g., `${earliest}`). If a constant, use single quotes (e.g., 'earliest'). Values without delimiters (e.g., earliest) are evaluated as strings."""
138
130
 
139
131
 
140
- class InputSplunkSearchLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
132
+ class InputSplunkSearchLogLevel(str, Enum):
141
133
  r"""Collector runtime log level (verbosity)"""
142
134
 
143
135
  ERROR = "error"
@@ -159,7 +151,7 @@ class InputSplunkSearchMetadatum(BaseModel):
159
151
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
160
152
 
161
153
 
162
- class InputSplunkSearchRetryType(str, Enum, metaclass=utils.OpenEnumMeta):
154
+ class InputSplunkSearchRetryType(str, Enum):
163
155
  r"""The algorithm to use when performing HTTP retries"""
164
156
 
165
157
  NONE = "none"
@@ -187,9 +179,7 @@ class InputSplunkSearchRetryRulesTypedDict(TypedDict):
187
179
 
188
180
 
189
181
  class InputSplunkSearchRetryRules(BaseModel):
190
- type: Annotated[
191
- Optional[InputSplunkSearchRetryType], PlainValidator(validate_open_enum(False))
192
- ] = InputSplunkSearchRetryType.BACKOFF
182
+ type: Optional[InputSplunkSearchRetryType] = InputSplunkSearchRetryType.BACKOFF
193
183
  r"""The algorithm to use when performing HTTP retries"""
194
184
 
195
185
  interval: Optional[float] = 1000
@@ -220,7 +210,7 @@ class InputSplunkSearchRetryRules(BaseModel):
220
210
  r"""Retry request when a connection reset (ECONNRESET) error occurs"""
221
211
 
222
212
 
223
- class InputSplunkSearchAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
213
+ class InputSplunkSearchAuthenticationType(str, Enum):
224
214
  r"""Splunk Search authentication type"""
225
215
 
226
216
  NONE = "none"
@@ -405,10 +395,9 @@ class InputSplunkSearch(BaseModel):
405
395
  endpoint: Optional[str] = "/services/search/v2/jobs/export"
406
396
  r"""REST API used to create a search"""
407
397
 
408
- output_mode: Annotated[
409
- Annotated[Optional[OutputMode], PlainValidator(validate_open_enum(False))],
410
- pydantic.Field(alias="outputMode"),
411
- ] = OutputMode.JSON
398
+ output_mode: Annotated[Optional[OutputMode], pydantic.Field(alias="outputMode")] = (
399
+ OutputMode.JSON
400
+ )
412
401
  r"""Format of the returned output"""
413
402
 
414
403
  endpoint_params: Annotated[
@@ -422,11 +411,7 @@ class InputSplunkSearch(BaseModel):
422
411
  r"""Optional request headers to send to the endpoint"""
423
412
 
424
413
  log_level: Annotated[
425
- Annotated[
426
- Optional[InputSplunkSearchLogLevel],
427
- PlainValidator(validate_open_enum(False)),
428
- ],
429
- pydantic.Field(alias="logLevel"),
414
+ Optional[InputSplunkSearchLogLevel], pydantic.Field(alias="logLevel")
430
415
  ] = None
431
416
  r"""Collector runtime log level (verbosity)"""
432
417
 
@@ -487,11 +472,7 @@ class InputSplunkSearch(BaseModel):
487
472
  r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
488
473
 
489
474
  auth_type: Annotated[
490
- Annotated[
491
- Optional[InputSplunkSearchAuthenticationType],
492
- PlainValidator(validate_open_enum(False)),
493
- ],
494
- pydantic.Field(alias="authType"),
475
+ Optional[InputSplunkSearchAuthenticationType], pydantic.Field(alias="authType")
495
476
  ] = InputSplunkSearchAuthenticationType.BASIC
496
477
  r"""Splunk Search authentication type"""
497
478
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -26,14 +23,14 @@ class InputSqsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSqsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSqsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSqsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSqsCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -67,9 +64,7 @@ class InputSqsPqTypedDict(TypedDict):
67
64
 
68
65
 
69
66
  class InputSqsPq(BaseModel):
70
- mode: Annotated[
71
- Optional[InputSqsMode], PlainValidator(validate_open_enum(False))
72
- ] = InputSqsMode.ALWAYS
67
+ mode: Optional[InputSqsMode] = InputSqsMode.ALWAYS
73
68
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
74
69
 
75
70
  max_buffer_size: Annotated[
@@ -93,9 +88,7 @@ class InputSqsPq(BaseModel):
93
88
  path: Optional[str] = "$CRIBL_HOME/state/queues"
94
89
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
95
90
 
96
- compress: Annotated[
97
- Optional[InputSqsCompression], PlainValidator(validate_open_enum(False))
98
- ] = InputSqsCompression.NONE
91
+ compress: Optional[InputSqsCompression] = InputSqsCompression.NONE
99
92
  r"""Codec to use to compress the persisted data"""
100
93
 
101
94
  pq_controls: Annotated[
@@ -103,14 +96,14 @@ class InputSqsPq(BaseModel):
103
96
  ] = None
104
97
 
105
98
 
106
- class InputSqsQueueType(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputSqsQueueType(str, Enum):
107
100
  r"""The queue type used (or created)"""
108
101
 
109
102
  STANDARD = "standard"
110
103
  FIFO = "fifo"
111
104
 
112
105
 
113
- class InputSqsAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
106
+ class InputSqsAuthenticationMethod(str, Enum):
114
107
  r"""AWS authentication method. Choose Auto to use IAM roles."""
115
108
 
116
109
  AUTO = "auto"
@@ -118,7 +111,7 @@ class InputSqsAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
118
111
  SECRET = "secret"
119
112
 
120
113
 
121
- class InputSqsSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
114
+ class InputSqsSignatureVersion(str, Enum):
122
115
  r"""Signature version to use for signing SQS requests"""
123
116
 
124
117
  V2 = "v2"
@@ -235,10 +228,7 @@ class InputSqs(BaseModel):
235
228
  pq: Optional[InputSqsPq] = None
236
229
 
237
230
  queue_type: Annotated[
238
- Annotated[
239
- Optional[InputSqsQueueType], PlainValidator(validate_open_enum(False))
240
- ],
241
- pydantic.Field(alias="queueType"),
231
+ Optional[InputSqsQueueType], pydantic.Field(alias="queueType")
242
232
  ] = InputSqsQueueType.STANDARD
243
233
  r"""The queue type used (or created)"""
244
234
 
@@ -251,10 +241,7 @@ class InputSqs(BaseModel):
251
241
  r"""Create queue if it does not exist"""
252
242
 
253
243
  aws_authentication_method: Annotated[
254
- Annotated[
255
- Optional[InputSqsAuthenticationMethod],
256
- PlainValidator(validate_open_enum(False)),
257
- ],
244
+ Optional[InputSqsAuthenticationMethod],
258
245
  pydantic.Field(alias="awsAuthenticationMethod"),
259
246
  ] = InputSqsAuthenticationMethod.AUTO
260
247
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -270,11 +257,7 @@ class InputSqs(BaseModel):
270
257
  r"""SQS service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to SQS-compatible endpoint."""
271
258
 
272
259
  signature_version: Annotated[
273
- Annotated[
274
- Optional[InputSqsSignatureVersion],
275
- PlainValidator(validate_open_enum(False)),
276
- ],
277
- pydantic.Field(alias="signatureVersion"),
260
+ Optional[InputSqsSignatureVersion], pydantic.Field(alias="signatureVersion")
278
261
  ] = InputSqsSignatureVersion.V4
279
262
  r"""Signature version to use for signing SQS requests"""
280
263