cribl-control-plane 0.0.16__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (133) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/models/__init__.py +4124 -4124
  4. cribl_control_plane/models/createinputop.py +1734 -2771
  5. cribl_control_plane/models/createoutputop.py +2153 -4314
  6. cribl_control_plane/models/healthstatus.py +4 -7
  7. cribl_control_plane/models/inputappscope.py +16 -36
  8. cribl_control_plane/models/inputazureblob.py +8 -19
  9. cribl_control_plane/models/inputcollection.py +6 -15
  10. cribl_control_plane/models/inputconfluentcloud.py +20 -45
  11. cribl_control_plane/models/inputcribl.py +6 -13
  12. cribl_control_plane/models/inputcriblhttp.py +10 -27
  13. cribl_control_plane/models/inputcribllakehttp.py +12 -26
  14. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  15. cribl_control_plane/models/inputcribltcp.py +10 -27
  16. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  17. cribl_control_plane/models/inputdatadogagent.py +10 -28
  18. cribl_control_plane/models/inputdatagen.py +6 -13
  19. cribl_control_plane/models/inputedgeprometheus.py +31 -64
  20. cribl_control_plane/models/inputelastic.py +16 -44
  21. cribl_control_plane/models/inputeventhub.py +8 -19
  22. cribl_control_plane/models/inputexec.py +8 -16
  23. cribl_control_plane/models/inputfile.py +8 -17
  24. cribl_control_plane/models/inputfirehose.py +10 -27
  25. cribl_control_plane/models/inputgooglepubsub.py +8 -23
  26. cribl_control_plane/models/inputgrafana_union.py +35 -81
  27. cribl_control_plane/models/inputhttp.py +10 -27
  28. cribl_control_plane/models/inputhttpraw.py +10 -27
  29. cribl_control_plane/models/inputjournalfiles.py +6 -16
  30. cribl_control_plane/models/inputkafka.py +16 -45
  31. cribl_control_plane/models/inputkinesis.py +16 -42
  32. cribl_control_plane/models/inputkubeevents.py +6 -13
  33. cribl_control_plane/models/inputkubelogs.py +10 -18
  34. cribl_control_plane/models/inputkubemetrics.py +10 -18
  35. cribl_control_plane/models/inputloki.py +12 -33
  36. cribl_control_plane/models/inputmetrics.py +10 -25
  37. cribl_control_plane/models/inputmodeldriventelemetry.py +12 -32
  38. cribl_control_plane/models/inputmsk.py +18 -52
  39. cribl_control_plane/models/inputnetflow.py +6 -15
  40. cribl_control_plane/models/inputoffice365mgmt.py +16 -37
  41. cribl_control_plane/models/inputoffice365msgtrace.py +18 -39
  42. cribl_control_plane/models/inputoffice365service.py +18 -39
  43. cribl_control_plane/models/inputopentelemetry.py +18 -42
  44. cribl_control_plane/models/inputprometheus.py +20 -54
  45. cribl_control_plane/models/inputprometheusrw.py +12 -34
  46. cribl_control_plane/models/inputrawudp.py +6 -15
  47. cribl_control_plane/models/inputs3.py +10 -23
  48. cribl_control_plane/models/inputs3inventory.py +12 -28
  49. cribl_control_plane/models/inputsecuritylake.py +12 -29
  50. cribl_control_plane/models/inputsnmp.py +8 -20
  51. cribl_control_plane/models/inputsplunk.py +14 -37
  52. cribl_control_plane/models/inputsplunkhec.py +12 -33
  53. cribl_control_plane/models/inputsplunksearch.py +16 -37
  54. cribl_control_plane/models/inputsqs.py +12 -31
  55. cribl_control_plane/models/inputsyslog_union.py +29 -53
  56. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  57. cribl_control_plane/models/inputsystemstate.py +10 -18
  58. cribl_control_plane/models/inputtcp.py +12 -33
  59. cribl_control_plane/models/inputtcpjson.py +12 -33
  60. cribl_control_plane/models/inputwef.py +20 -45
  61. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  62. cribl_control_plane/models/inputwineventlogs.py +12 -22
  63. cribl_control_plane/models/inputwiz.py +10 -25
  64. cribl_control_plane/models/inputzscalerhec.py +12 -33
  65. cribl_control_plane/models/output.py +3 -6
  66. cribl_control_plane/models/outputazureblob.py +20 -52
  67. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  68. cribl_control_plane/models/outputazureeventhub.py +20 -44
  69. cribl_control_plane/models/outputazurelogs.py +14 -37
  70. cribl_control_plane/models/outputclickhouse.py +22 -59
  71. cribl_control_plane/models/outputcloudwatch.py +12 -33
  72. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  73. cribl_control_plane/models/outputcriblhttp.py +18 -46
  74. cribl_control_plane/models/outputcribllake.py +18 -48
  75. cribl_control_plane/models/outputcribltcp.py +20 -47
  76. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  77. cribl_control_plane/models/outputdatadog.py +22 -50
  78. cribl_control_plane/models/outputdataset.py +20 -48
  79. cribl_control_plane/models/outputdefault.py +2 -5
  80. cribl_control_plane/models/outputdevnull.py +2 -5
  81. cribl_control_plane/models/outputdiskspool.py +4 -9
  82. cribl_control_plane/models/outputdls3.py +26 -72
  83. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  84. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  85. cribl_control_plane/models/outputelastic.py +20 -45
  86. cribl_control_plane/models/outputelasticcloud.py +14 -40
  87. cribl_control_plane/models/outputexabeam.py +12 -33
  88. cribl_control_plane/models/outputfilesystem.py +16 -41
  89. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  90. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  91. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  92. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  93. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  94. cribl_control_plane/models/outputgraphite.py +16 -35
  95. cribl_control_plane/models/outputhoneycomb.py +14 -37
  96. cribl_control_plane/models/outputhumiohec.py +18 -47
  97. cribl_control_plane/models/outputinfluxdb.py +18 -44
  98. cribl_control_plane/models/outputkafka.py +28 -73
  99. cribl_control_plane/models/outputkinesis.py +18 -44
  100. cribl_control_plane/models/outputloki.py +18 -43
  101. cribl_control_plane/models/outputminio.py +26 -69
  102. cribl_control_plane/models/outputmsk.py +30 -81
  103. cribl_control_plane/models/outputnetflow.py +2 -5
  104. cribl_control_plane/models/outputnewrelic.py +20 -45
  105. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  106. cribl_control_plane/models/outputopentelemetry.py +28 -69
  107. cribl_control_plane/models/outputprometheus.py +14 -37
  108. cribl_control_plane/models/outputring.py +10 -21
  109. cribl_control_plane/models/outputrouter.py +2 -5
  110. cribl_control_plane/models/outputs3.py +28 -72
  111. cribl_control_plane/models/outputsecuritylake.py +20 -56
  112. cribl_control_plane/models/outputsentinel.py +20 -49
  113. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  114. cribl_control_plane/models/outputservicenow.py +26 -64
  115. cribl_control_plane/models/outputsignalfx.py +16 -39
  116. cribl_control_plane/models/outputsnmp.py +2 -5
  117. cribl_control_plane/models/outputsns.py +16 -40
  118. cribl_control_plane/models/outputsplunk.py +26 -64
  119. cribl_control_plane/models/outputsplunkhec.py +14 -37
  120. cribl_control_plane/models/outputsplunklb.py +36 -83
  121. cribl_control_plane/models/outputsqs.py +18 -45
  122. cribl_control_plane/models/outputstatsd.py +16 -34
  123. cribl_control_plane/models/outputstatsdext.py +14 -33
  124. cribl_control_plane/models/outputsumologic.py +14 -37
  125. cribl_control_plane/models/outputsyslog.py +26 -60
  126. cribl_control_plane/models/outputtcpjson.py +22 -54
  127. cribl_control_plane/models/outputwavefront.py +14 -37
  128. cribl_control_plane/models/outputwebhook.py +24 -60
  129. cribl_control_plane/models/outputxsiam.py +16 -37
  130. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +1 -1
  131. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  132. cribl_control_plane-0.0.16.dist-info/RECORD +0 -215
  133. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSecurityLakeType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSecurityLakeType(str, Enum):
15
12
  SECURITY_LAKE = "security_lake"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSecurityLakeConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSecurityLakeMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSecurityLakeMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSecurityLakeCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSecurityLakeCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSecurityLakePqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSecurityLakePq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSecurityLakeMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSecurityLakeMode.ALWAYS
58
+ mode: Optional[InputSecurityLakeMode] = InputSecurityLakeMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,14 +79,11 @@ class InputSecurityLakePq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSecurityLakeCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputSecurityLakeCompression.NONE
82
+ compress: Optional[InputSecurityLakeCompression] = InputSecurityLakeCompression.NONE
91
83
  r"""Codec to use to compress the persisted data"""
92
84
 
93
85
 
94
- class InputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputSecurityLakeAuthenticationMethod(str, Enum):
95
87
  r"""AWS authentication method. Choose Auto to use IAM roles."""
96
88
 
97
89
  AUTO = "auto"
@@ -99,7 +91,7 @@ class InputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumM
99
91
  SECRET = "secret"
100
92
 
101
93
 
102
- class InputSecurityLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ class InputSecurityLakeSignatureVersion(str, Enum):
103
95
  r"""Signature version to use for signing S3 requests"""
104
96
 
105
97
  V2 = "v2"
@@ -152,7 +144,7 @@ class InputSecurityLakeCheckpointing(BaseModel):
152
144
  r"""The number of times to retry processing when a processing error occurs. If Skip file on error is enabled, this setting is ignored."""
153
145
 
154
146
 
155
- class InputSecurityLakeTagAfterProcessing(str, Enum, metaclass=utils.OpenEnumMeta):
147
+ class InputSecurityLakeTagAfterProcessing(str, Enum):
156
148
  FALSE = "false"
157
149
  TRUE = "true"
158
150
 
@@ -242,7 +234,7 @@ class InputSecurityLakeTypedDict(TypedDict):
242
234
 
243
235
 
244
236
  class InputSecurityLake(BaseModel):
245
- type: Annotated[InputSecurityLakeType, PlainValidator(validate_open_enum(False))]
237
+ type: InputSecurityLakeType
246
238
 
247
239
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
248
240
  r"""The name, URL, or ARN of the SQS queue to read notifications from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
@@ -283,10 +275,7 @@ class InputSecurityLake(BaseModel):
283
275
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
284
276
 
285
277
  aws_authentication_method: Annotated[
286
- Annotated[
287
- Optional[InputSecurityLakeAuthenticationMethod],
288
- PlainValidator(validate_open_enum(False)),
289
- ],
278
+ Optional[InputSecurityLakeAuthenticationMethod],
290
279
  pydantic.Field(alias="awsAuthenticationMethod"),
291
280
  ] = InputSecurityLakeAuthenticationMethod.AUTO
292
281
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -302,10 +291,7 @@ class InputSecurityLake(BaseModel):
302
291
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
303
292
 
304
293
  signature_version: Annotated[
305
- Annotated[
306
- Optional[InputSecurityLakeSignatureVersion],
307
- PlainValidator(validate_open_enum(False)),
308
- ],
294
+ Optional[InputSecurityLakeSignatureVersion],
309
295
  pydantic.Field(alias="signatureVersion"),
310
296
  ] = InputSecurityLakeSignatureVersion.V4
311
297
  r"""Signature version to use for signing S3 requests"""
@@ -407,10 +393,7 @@ class InputSecurityLake(BaseModel):
407
393
  r"""Select or create a stored secret that references your access key and secret key"""
408
394
 
409
395
  tag_after_processing: Annotated[
410
- Annotated[
411
- Optional[InputSecurityLakeTagAfterProcessing],
412
- PlainValidator(validate_open_enum(False)),
413
- ],
396
+ Optional[InputSecurityLakeTagAfterProcessing],
414
397
  pydantic.Field(alias="tagAfterProcessing"),
415
398
  ] = None
416
399
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSnmpType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSnmpType(str, Enum):
15
12
  SNMP = "snmp"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSnmpConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSnmpMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSnmpMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSnmpCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSnmpCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSnmpPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSnmpPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSnmpMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSnmpMode.ALWAYS
58
+ mode: Optional[InputSnmpMode] = InputSnmpMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputSnmpPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSnmpCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputSnmpCompression.NONE
82
+ compress: Optional[InputSnmpCompression] = InputSnmpCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class AuthenticationProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class AuthenticationProtocol(str, Enum):
94
87
  NONE = "none"
95
88
  MD5 = "md5"
96
89
  SHA = "sha"
@@ -111,10 +104,7 @@ class V3User(BaseModel):
111
104
  name: str
112
105
 
113
106
  auth_protocol: Annotated[
114
- Annotated[
115
- Optional[AuthenticationProtocol], PlainValidator(validate_open_enum(False))
116
- ],
117
- pydantic.Field(alias="authProtocol"),
107
+ Optional[AuthenticationProtocol], pydantic.Field(alias="authProtocol")
118
108
  ] = AuthenticationProtocol.NONE
119
109
 
120
110
  auth_key: Annotated[Optional[Any], pydantic.Field(alias="authKey")] = None
@@ -206,9 +196,7 @@ class InputSnmp(BaseModel):
206
196
  id: Optional[str] = None
207
197
  r"""Unique ID for this input"""
208
198
 
209
- type: Annotated[
210
- Optional[InputSnmpType], PlainValidator(validate_open_enum(False))
211
- ] = None
199
+ type: Optional[InputSnmpType] = None
212
200
 
213
201
  disabled: Optional[bool] = False
214
202
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSplunkType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSplunkType(str, Enum):
15
12
  SPLUNK = "splunk"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSplunkConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkPqCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSplunkPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSplunkPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSplunkMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSplunkMode.ALWAYS
58
+ mode: Optional[InputSplunkMode] = InputSplunkMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,20 +79,18 @@ class InputSplunkPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSplunkPqCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputSplunkPqCompression.NONE
82
+ compress: Optional[InputSplunkPqCompression] = InputSplunkPqCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputSplunkMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputSplunkMinimumTLSVersion(str, Enum):
94
87
  TL_SV1 = "TLSv1"
95
88
  TL_SV1_1 = "TLSv1.1"
96
89
  TL_SV1_2 = "TLSv1.2"
97
90
  TL_SV1_3 = "TLSv1.3"
98
91
 
99
92
 
100
- class InputSplunkMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class InputSplunkMaximumTLSVersion(str, Enum):
101
94
  TL_SV1 = "TLSv1"
102
95
  TL_SV1_1 = "TLSv1.1"
103
96
  TL_SV1_2 = "TLSv1.2"
@@ -156,19 +149,11 @@ class InputSplunkTLSSettingsServerSide(BaseModel):
156
149
  ] = None
157
150
 
158
151
  min_version: Annotated[
159
- Annotated[
160
- Optional[InputSplunkMinimumTLSVersion],
161
- PlainValidator(validate_open_enum(False)),
162
- ],
163
- pydantic.Field(alias="minVersion"),
152
+ Optional[InputSplunkMinimumTLSVersion], pydantic.Field(alias="minVersion")
164
153
  ] = None
165
154
 
166
155
  max_version: Annotated[
167
- Annotated[
168
- Optional[InputSplunkMaximumTLSVersion],
169
- PlainValidator(validate_open_enum(False)),
170
- ],
171
- pydantic.Field(alias="maxVersion"),
156
+ Optional[InputSplunkMaximumTLSVersion], pydantic.Field(alias="maxVersion")
172
157
  ] = None
173
158
 
174
159
 
@@ -198,14 +183,14 @@ class InputSplunkAuthToken(BaseModel):
198
183
  description: Optional[str] = None
199
184
 
200
185
 
201
- class InputSplunkMaxS2SVersion(str, Enum, metaclass=utils.OpenEnumMeta):
186
+ class InputSplunkMaxS2SVersion(str, Enum):
202
187
  r"""The highest S2S protocol version to advertise during handshake"""
203
188
 
204
189
  V3 = "v3"
205
190
  V4 = "v4"
206
191
 
207
192
 
208
- class InputSplunkCompression(str, Enum, metaclass=utils.OpenEnumMeta):
193
+ class InputSplunkCompression(str, Enum):
209
194
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
210
195
 
211
196
  DISABLED = "disabled"
@@ -276,9 +261,7 @@ class InputSplunk(BaseModel):
276
261
  id: Optional[str] = None
277
262
  r"""Unique ID for this input"""
278
263
 
279
- type: Annotated[
280
- Optional[InputSplunkType], PlainValidator(validate_open_enum(False))
281
- ] = None
264
+ type: Optional[InputSplunkType] = None
282
265
 
283
266
  disabled: Optional[bool] = False
284
267
 
@@ -358,11 +341,7 @@ class InputSplunk(BaseModel):
358
341
  r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
359
342
 
360
343
  max_s2_sversion: Annotated[
361
- Annotated[
362
- Optional[InputSplunkMaxS2SVersion],
363
- PlainValidator(validate_open_enum(False)),
364
- ],
365
- pydantic.Field(alias="maxS2Sversion"),
344
+ Optional[InputSplunkMaxS2SVersion], pydantic.Field(alias="maxS2Sversion")
366
345
  ] = InputSplunkMaxS2SVersion.V3
367
346
  r"""The highest S2S protocol version to advertise during handshake"""
368
347
 
@@ -383,7 +362,5 @@ class InputSplunk(BaseModel):
383
362
  ] = False
384
363
  r"""Extract and process Splunk-generated metrics as Cribl metrics"""
385
364
 
386
- compress: Annotated[
387
- Optional[InputSplunkCompression], PlainValidator(validate_open_enum(False))
388
- ] = InputSplunkCompression.DISABLED
365
+ compress: Optional[InputSplunkCompression] = InputSplunkCompression.DISABLED
389
366
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSplunkHecType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSplunkHecType(str, Enum):
15
12
  SPLUNK_HEC = "splunk_hec"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSplunkHecConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkHecMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkHecMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkHecCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkHecCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSplunkHecPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSplunkHecPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSplunkHecMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSplunkHecMode.ALWAYS
58
+ mode: Optional[InputSplunkHecMode] = InputSplunkHecMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputSplunkHecPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSplunkHecCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputSplunkHecCompression.NONE
82
+ compress: Optional[InputSplunkHecCompression] = InputSplunkHecCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputSplunkHecAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputSplunkHecAuthenticationMethod(str, Enum):
94
87
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
95
88
 
96
89
  MANUAL = "manual"
@@ -128,11 +121,7 @@ class InputSplunkHecAuthToken(BaseModel):
128
121
  token: Any
129
122
 
130
123
  auth_type: Annotated[
131
- Annotated[
132
- Optional[InputSplunkHecAuthenticationMethod],
133
- PlainValidator(validate_open_enum(False)),
134
- ],
135
- pydantic.Field(alias="authType"),
124
+ Optional[InputSplunkHecAuthenticationMethod], pydantic.Field(alias="authType")
136
125
  ] = InputSplunkHecAuthenticationMethod.MANUAL
137
126
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
138
127
 
@@ -152,14 +141,14 @@ class InputSplunkHecAuthToken(BaseModel):
152
141
  r"""Fields to add to events referencing this token"""
153
142
 
154
143
 
155
- class InputSplunkHecMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
144
+ class InputSplunkHecMinimumTLSVersion(str, Enum):
156
145
  TL_SV1 = "TLSv1"
157
146
  TL_SV1_1 = "TLSv1.1"
158
147
  TL_SV1_2 = "TLSv1.2"
159
148
  TL_SV1_3 = "TLSv1.3"
160
149
 
161
150
 
162
- class InputSplunkHecMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
151
+ class InputSplunkHecMaximumTLSVersion(str, Enum):
163
152
  TL_SV1 = "TLSv1"
164
153
  TL_SV1_1 = "TLSv1.1"
165
154
  TL_SV1_2 = "TLSv1.2"
@@ -218,19 +207,11 @@ class InputSplunkHecTLSSettingsServerSide(BaseModel):
218
207
  ] = None
219
208
 
220
209
  min_version: Annotated[
221
- Annotated[
222
- Optional[InputSplunkHecMinimumTLSVersion],
223
- PlainValidator(validate_open_enum(False)),
224
- ],
225
- pydantic.Field(alias="minVersion"),
210
+ Optional[InputSplunkHecMinimumTLSVersion], pydantic.Field(alias="minVersion")
226
211
  ] = None
227
212
 
228
213
  max_version: Annotated[
229
- Annotated[
230
- Optional[InputSplunkHecMaximumTLSVersion],
231
- PlainValidator(validate_open_enum(False)),
232
- ],
233
- pydantic.Field(alias="maxVersion"),
214
+ Optional[InputSplunkHecMaximumTLSVersion], pydantic.Field(alias="maxVersion")
234
215
  ] = None
235
216
 
236
217
 
@@ -327,9 +308,7 @@ class InputSplunkHec(BaseModel):
327
308
  id: Optional[str] = None
328
309
  r"""Unique ID for this input"""
329
310
 
330
- type: Annotated[
331
- Optional[InputSplunkHecType], PlainValidator(validate_open_enum(False))
332
- ] = None
311
+ type: Optional[InputSplunkHecType] = None
333
312
 
334
313
  disabled: Optional[bool] = False
335
314
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSplunkSearchType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSplunkSearchType(str, Enum):
15
12
  SPLUNK_SEARCH = "splunk_search"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSplunkSearchConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkSearchMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkSearchMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkSearchCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkSearchCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSplunkSearchPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSplunkSearchPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSplunkSearchMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSplunkSearchMode.ALWAYS
58
+ mode: Optional[InputSplunkSearchMode] = InputSplunkSearchMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,14 +79,11 @@ class InputSplunkSearchPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSplunkSearchCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputSplunkSearchCompression.NONE
82
+ compress: Optional[InputSplunkSearchCompression] = InputSplunkSearchCompression.NONE
91
83
  r"""Codec to use to compress the persisted data"""
92
84
 
93
85
 
94
- class OutputMode(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class OutputMode(str, Enum):
95
87
  r"""Format of the returned output"""
96
88
 
97
89
  CSV = "csv"
@@ -124,7 +116,7 @@ class EndpointHeader(BaseModel):
124
116
  r"""JavaScript expression to compute the header's value, normally enclosed in backticks (e.g., `${earliest}`). If a constant, use single quotes (e.g., 'earliest'). Values without delimiters (e.g., earliest) are evaluated as strings."""
125
117
 
126
118
 
127
- class InputSplunkSearchLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
119
+ class InputSplunkSearchLogLevel(str, Enum):
128
120
  r"""Collector runtime log level (verbosity)"""
129
121
 
130
122
  ERROR = "error"
@@ -146,7 +138,7 @@ class InputSplunkSearchMetadatum(BaseModel):
146
138
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
147
139
 
148
140
 
149
- class InputSplunkSearchRetryType(str, Enum, metaclass=utils.OpenEnumMeta):
141
+ class InputSplunkSearchRetryType(str, Enum):
150
142
  r"""The algorithm to use when performing HTTP retries"""
151
143
 
152
144
  NONE = "none"
@@ -174,9 +166,7 @@ class InputSplunkSearchRetryRulesTypedDict(TypedDict):
174
166
 
175
167
 
176
168
  class InputSplunkSearchRetryRules(BaseModel):
177
- type: Annotated[
178
- Optional[InputSplunkSearchRetryType], PlainValidator(validate_open_enum(False))
179
- ] = InputSplunkSearchRetryType.BACKOFF
169
+ type: Optional[InputSplunkSearchRetryType] = InputSplunkSearchRetryType.BACKOFF
180
170
  r"""The algorithm to use when performing HTTP retries"""
181
171
 
182
172
  interval: Optional[float] = 1000
@@ -207,7 +197,7 @@ class InputSplunkSearchRetryRules(BaseModel):
207
197
  r"""Retry request when a connection reset (ECONNRESET) error occurs"""
208
198
 
209
199
 
210
- class InputSplunkSearchAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
200
+ class InputSplunkSearchAuthenticationType(str, Enum):
211
201
  r"""Splunk Search authentication type"""
212
202
 
213
203
  NONE = "none"
@@ -347,9 +337,7 @@ class InputSplunkSearch(BaseModel):
347
337
  id: Optional[str] = None
348
338
  r"""Unique ID for this input"""
349
339
 
350
- type: Annotated[
351
- Optional[InputSplunkSearchType], PlainValidator(validate_open_enum(False))
352
- ] = None
340
+ type: Optional[InputSplunkSearchType] = None
353
341
 
354
342
  disabled: Optional[bool] = False
355
343
 
@@ -394,10 +382,9 @@ class InputSplunkSearch(BaseModel):
394
382
  endpoint: Optional[str] = "/services/search/v2/jobs/export"
395
383
  r"""REST API used to create a search"""
396
384
 
397
- output_mode: Annotated[
398
- Annotated[Optional[OutputMode], PlainValidator(validate_open_enum(False))],
399
- pydantic.Field(alias="outputMode"),
400
- ] = OutputMode.JSON
385
+ output_mode: Annotated[Optional[OutputMode], pydantic.Field(alias="outputMode")] = (
386
+ OutputMode.JSON
387
+ )
401
388
  r"""Format of the returned output"""
402
389
 
403
390
  endpoint_params: Annotated[
@@ -411,11 +398,7 @@ class InputSplunkSearch(BaseModel):
411
398
  r"""Optional request headers to send to the endpoint"""
412
399
 
413
400
  log_level: Annotated[
414
- Annotated[
415
- Optional[InputSplunkSearchLogLevel],
416
- PlainValidator(validate_open_enum(False)),
417
- ],
418
- pydantic.Field(alias="logLevel"),
401
+ Optional[InputSplunkSearchLogLevel], pydantic.Field(alias="logLevel")
419
402
  ] = None
420
403
  r"""Collector runtime log level (verbosity)"""
421
404
 
@@ -476,11 +459,7 @@ class InputSplunkSearch(BaseModel):
476
459
  r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
477
460
 
478
461
  auth_type: Annotated[
479
- Annotated[
480
- Optional[InputSplunkSearchAuthenticationType],
481
- PlainValidator(validate_open_enum(False)),
482
- ],
483
- pydantic.Field(alias="authType"),
462
+ Optional[InputSplunkSearchAuthenticationType], pydantic.Field(alias="authType")
484
463
  ] = InputSplunkSearchAuthenticationType.BASIC
485
464
  r"""Splunk Search authentication type"""
486
465