cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (144) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/{outputs.py → destinations.py} +69 -71
  3. cribl_control_plane/errors/healthstatus_error.py +2 -8
  4. cribl_control_plane/models/__init__.py +5347 -115
  5. cribl_control_plane/models/createinputop.py +18216 -2
  6. cribl_control_plane/models/createoutputop.py +18417 -4
  7. cribl_control_plane/models/createoutputtestbyidop.py +2 -2
  8. cribl_control_plane/models/deleteoutputbyidop.py +2 -2
  9. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  10. cribl_control_plane/models/getoutputbyidop.py +2 -2
  11. cribl_control_plane/models/getoutputpqbyidop.py +2 -2
  12. cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
  13. cribl_control_plane/models/healthstatus.py +4 -7
  14. cribl_control_plane/models/inputappscope.py +16 -36
  15. cribl_control_plane/models/inputazureblob.py +8 -19
  16. cribl_control_plane/models/inputcollection.py +6 -15
  17. cribl_control_plane/models/inputconfluentcloud.py +22 -45
  18. cribl_control_plane/models/inputcribl.py +6 -13
  19. cribl_control_plane/models/inputcriblhttp.py +12 -27
  20. cribl_control_plane/models/inputcribllakehttp.py +14 -26
  21. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  22. cribl_control_plane/models/inputcribltcp.py +12 -27
  23. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  24. cribl_control_plane/models/inputdatadogagent.py +12 -28
  25. cribl_control_plane/models/inputdatagen.py +6 -13
  26. cribl_control_plane/models/inputedgeprometheus.py +33 -64
  27. cribl_control_plane/models/inputelastic.py +18 -44
  28. cribl_control_plane/models/inputeventhub.py +10 -19
  29. cribl_control_plane/models/inputexec.py +8 -16
  30. cribl_control_plane/models/inputfile.py +8 -17
  31. cribl_control_plane/models/inputfirehose.py +12 -27
  32. cribl_control_plane/models/inputgooglepubsub.py +10 -23
  33. cribl_control_plane/models/inputgrafana_union.py +39 -81
  34. cribl_control_plane/models/inputhttp.py +12 -27
  35. cribl_control_plane/models/inputhttpraw.py +12 -27
  36. cribl_control_plane/models/inputjournalfiles.py +8 -16
  37. cribl_control_plane/models/inputkafka.py +18 -45
  38. cribl_control_plane/models/inputkinesis.py +18 -42
  39. cribl_control_plane/models/inputkubeevents.py +6 -13
  40. cribl_control_plane/models/inputkubelogs.py +10 -18
  41. cribl_control_plane/models/inputkubemetrics.py +10 -18
  42. cribl_control_plane/models/inputloki.py +14 -33
  43. cribl_control_plane/models/inputmetrics.py +10 -25
  44. cribl_control_plane/models/inputmodeldriventelemetry.py +14 -33
  45. cribl_control_plane/models/inputmsk.py +20 -52
  46. cribl_control_plane/models/inputnetflow.py +8 -15
  47. cribl_control_plane/models/inputoffice365mgmt.py +18 -37
  48. cribl_control_plane/models/inputoffice365msgtrace.py +20 -41
  49. cribl_control_plane/models/inputoffice365service.py +20 -41
  50. cribl_control_plane/models/inputopentelemetry.py +20 -42
  51. cribl_control_plane/models/inputprometheus.py +22 -54
  52. cribl_control_plane/models/inputprometheusrw.py +14 -34
  53. cribl_control_plane/models/inputrawudp.py +8 -15
  54. cribl_control_plane/models/inputs3.py +10 -23
  55. cribl_control_plane/models/inputs3inventory.py +12 -28
  56. cribl_control_plane/models/inputsecuritylake.py +12 -29
  57. cribl_control_plane/models/inputsnmp.py +10 -20
  58. cribl_control_plane/models/inputsplunk.py +16 -37
  59. cribl_control_plane/models/inputsplunkhec.py +14 -33
  60. cribl_control_plane/models/inputsplunksearch.py +18 -37
  61. cribl_control_plane/models/inputsqs.py +14 -31
  62. cribl_control_plane/models/inputsyslog_union.py +29 -53
  63. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  64. cribl_control_plane/models/inputsystemstate.py +10 -18
  65. cribl_control_plane/models/inputtcp.py +14 -33
  66. cribl_control_plane/models/inputtcpjson.py +14 -33
  67. cribl_control_plane/models/inputwef.py +22 -45
  68. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  69. cribl_control_plane/models/inputwineventlogs.py +12 -22
  70. cribl_control_plane/models/inputwiz.py +12 -25
  71. cribl_control_plane/models/inputzscalerhec.py +14 -33
  72. cribl_control_plane/models/listoutputop.py +2 -2
  73. cribl_control_plane/models/output.py +3 -6
  74. cribl_control_plane/models/outputazureblob.py +20 -52
  75. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  76. cribl_control_plane/models/outputazureeventhub.py +20 -44
  77. cribl_control_plane/models/outputazurelogs.py +14 -37
  78. cribl_control_plane/models/outputclickhouse.py +22 -59
  79. cribl_control_plane/models/outputcloudwatch.py +12 -33
  80. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  81. cribl_control_plane/models/outputcriblhttp.py +18 -46
  82. cribl_control_plane/models/outputcribllake.py +18 -48
  83. cribl_control_plane/models/outputcribltcp.py +20 -47
  84. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  85. cribl_control_plane/models/outputdatadog.py +22 -50
  86. cribl_control_plane/models/outputdataset.py +20 -48
  87. cribl_control_plane/models/outputdefault.py +2 -5
  88. cribl_control_plane/models/outputdevnull.py +2 -5
  89. cribl_control_plane/models/outputdiskspool.py +4 -9
  90. cribl_control_plane/models/outputdls3.py +26 -72
  91. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  92. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  93. cribl_control_plane/models/outputelastic.py +20 -45
  94. cribl_control_plane/models/outputelasticcloud.py +14 -40
  95. cribl_control_plane/models/outputexabeam.py +12 -33
  96. cribl_control_plane/models/outputfilesystem.py +16 -41
  97. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  98. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  99. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  100. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  101. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  102. cribl_control_plane/models/outputgraphite.py +16 -35
  103. cribl_control_plane/models/outputhoneycomb.py +14 -37
  104. cribl_control_plane/models/outputhumiohec.py +18 -47
  105. cribl_control_plane/models/outputinfluxdb.py +18 -44
  106. cribl_control_plane/models/outputkafka.py +28 -73
  107. cribl_control_plane/models/outputkinesis.py +18 -44
  108. cribl_control_plane/models/outputloki.py +18 -43
  109. cribl_control_plane/models/outputminio.py +26 -69
  110. cribl_control_plane/models/outputmsk.py +30 -81
  111. cribl_control_plane/models/outputnetflow.py +2 -5
  112. cribl_control_plane/models/outputnewrelic.py +20 -45
  113. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  114. cribl_control_plane/models/outputopentelemetry.py +28 -69
  115. cribl_control_plane/models/outputprometheus.py +14 -37
  116. cribl_control_plane/models/outputring.py +10 -21
  117. cribl_control_plane/models/outputrouter.py +2 -5
  118. cribl_control_plane/models/outputs3.py +28 -72
  119. cribl_control_plane/models/outputsecuritylake.py +20 -56
  120. cribl_control_plane/models/outputsentinel.py +20 -49
  121. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  122. cribl_control_plane/models/outputservicenow.py +26 -64
  123. cribl_control_plane/models/outputsignalfx.py +16 -39
  124. cribl_control_plane/models/outputsnmp.py +2 -5
  125. cribl_control_plane/models/outputsns.py +16 -40
  126. cribl_control_plane/models/outputsplunk.py +26 -64
  127. cribl_control_plane/models/outputsplunkhec.py +14 -37
  128. cribl_control_plane/models/outputsplunklb.py +36 -83
  129. cribl_control_plane/models/outputsqs.py +18 -45
  130. cribl_control_plane/models/outputstatsd.py +16 -34
  131. cribl_control_plane/models/outputstatsdext.py +14 -33
  132. cribl_control_plane/models/outputsumologic.py +14 -37
  133. cribl_control_plane/models/outputsyslog.py +26 -60
  134. cribl_control_plane/models/outputtcpjson.py +22 -54
  135. cribl_control_plane/models/outputwavefront.py +14 -37
  136. cribl_control_plane/models/outputwebhook.py +24 -60
  137. cribl_control_plane/models/outputxsiam.py +16 -37
  138. cribl_control_plane/models/updateoutputbyidop.py +4 -4
  139. cribl_control_plane/sdk.py +3 -5
  140. cribl_control_plane/sources.py +8 -10
  141. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +13 -13
  142. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  143. cribl_control_plane-0.0.15.dist-info/RECORD +0 -215
  144. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputS3Type(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputS3Type(str, Enum):
15
12
  S3 = "s3"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputS3Connection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputS3Mode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputS3Mode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputS3Compression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputS3Compression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputS3PqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputS3Pq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputS3Mode], PlainValidator(validate_open_enum(False))
63
- ] = InputS3Mode.ALWAYS
58
+ mode: Optional[InputS3Mode] = InputS3Mode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputS3Pq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputS3Compression], PlainValidator(validate_open_enum(False))
89
- ] = InputS3Compression.NONE
82
+ compress: Optional[InputS3Compression] = InputS3Compression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputS3AuthenticationMethod(str, Enum):
94
87
  r"""AWS authentication method. Choose Auto to use IAM roles."""
95
88
 
96
89
  AUTO = "auto"
@@ -98,7 +91,7 @@ class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
98
91
  SECRET = "secret"
99
92
 
100
93
 
101
- class InputS3SignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ class InputS3SignatureVersion(str, Enum):
102
95
  r"""Signature version to use for signing S3 requests"""
103
96
 
104
97
  V2 = "v2"
@@ -237,7 +230,7 @@ class InputS3TypedDict(TypedDict):
237
230
 
238
231
 
239
232
  class InputS3(BaseModel):
240
- type: Annotated[InputS3Type, PlainValidator(validate_open_enum(False))]
233
+ type: InputS3Type
241
234
 
242
235
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
243
236
  r"""The name, URL, or ARN of the SQS queue to read notifications from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
@@ -278,10 +271,7 @@ class InputS3(BaseModel):
278
271
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
279
272
 
280
273
  aws_authentication_method: Annotated[
281
- Annotated[
282
- Optional[InputS3AuthenticationMethod],
283
- PlainValidator(validate_open_enum(False)),
284
- ],
274
+ Optional[InputS3AuthenticationMethod],
285
275
  pydantic.Field(alias="awsAuthenticationMethod"),
286
276
  ] = InputS3AuthenticationMethod.AUTO
287
277
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -297,10 +287,7 @@ class InputS3(BaseModel):
297
287
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
298
288
 
299
289
  signature_version: Annotated[
300
- Annotated[
301
- Optional[InputS3SignatureVersion], PlainValidator(validate_open_enum(False))
302
- ],
303
- pydantic.Field(alias="signatureVersion"),
290
+ Optional[InputS3SignatureVersion], pydantic.Field(alias="signatureVersion")
304
291
  ] = InputS3SignatureVersion.V4
305
292
  r"""Signature version to use for signing S3 requests"""
306
293
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputS3InventoryType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputS3InventoryType(str, Enum):
15
12
  S3_INVENTORY = "s3_inventory"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputS3InventoryConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputS3InventoryMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputS3InventoryMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputS3InventoryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputS3InventoryCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputS3InventoryPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputS3InventoryPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputS3InventoryMode], PlainValidator(validate_open_enum(False))
63
- ] = InputS3InventoryMode.ALWAYS
58
+ mode: Optional[InputS3InventoryMode] = InputS3InventoryMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputS3InventoryPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputS3InventoryCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputS3InventoryCompression.NONE
82
+ compress: Optional[InputS3InventoryCompression] = InputS3InventoryCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputS3InventoryAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputS3InventoryAuthenticationMethod(str, Enum):
94
87
  r"""AWS authentication method. Choose Auto to use IAM roles."""
95
88
 
96
89
  AUTO = "auto"
@@ -98,7 +91,7 @@ class InputS3InventoryAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMe
98
91
  SECRET = "secret"
99
92
 
100
93
 
101
- class InputS3InventorySignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ class InputS3InventorySignatureVersion(str, Enum):
102
95
  r"""Signature version to use for signing S3 requests"""
103
96
 
104
97
  V2 = "v2"
@@ -151,7 +144,7 @@ class InputS3InventoryCheckpointing(BaseModel):
151
144
  r"""The number of times to retry processing when a processing error occurs. If Skip file on error is enabled, this setting is ignored."""
152
145
 
153
146
 
154
- class InputS3InventoryTagAfterProcessing(str, Enum, metaclass=utils.OpenEnumMeta):
147
+ class InputS3InventoryTagAfterProcessing(str, Enum):
155
148
  FALSE = "false"
156
149
  TRUE = "true"
157
150
 
@@ -245,7 +238,7 @@ class InputS3InventoryTypedDict(TypedDict):
245
238
 
246
239
 
247
240
  class InputS3Inventory(BaseModel):
248
- type: Annotated[InputS3InventoryType, PlainValidator(validate_open_enum(False))]
241
+ type: InputS3InventoryType
249
242
 
250
243
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
251
244
  r"""The name, URL, or ARN of the SQS queue to read notifications from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
@@ -286,10 +279,7 @@ class InputS3Inventory(BaseModel):
286
279
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
287
280
 
288
281
  aws_authentication_method: Annotated[
289
- Annotated[
290
- Optional[InputS3InventoryAuthenticationMethod],
291
- PlainValidator(validate_open_enum(False)),
292
- ],
282
+ Optional[InputS3InventoryAuthenticationMethod],
293
283
  pydantic.Field(alias="awsAuthenticationMethod"),
294
284
  ] = InputS3InventoryAuthenticationMethod.AUTO
295
285
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -305,10 +295,7 @@ class InputS3Inventory(BaseModel):
305
295
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
306
296
 
307
297
  signature_version: Annotated[
308
- Annotated[
309
- Optional[InputS3InventorySignatureVersion],
310
- PlainValidator(validate_open_enum(False)),
311
- ],
298
+ Optional[InputS3InventorySignatureVersion],
312
299
  pydantic.Field(alias="signatureVersion"),
313
300
  ] = InputS3InventorySignatureVersion.V4
314
301
  r"""Signature version to use for signing S3 requests"""
@@ -422,10 +409,7 @@ class InputS3Inventory(BaseModel):
422
409
  r"""Select or create a stored secret that references your access key and secret key"""
423
410
 
424
411
  tag_after_processing: Annotated[
425
- Annotated[
426
- Optional[InputS3InventoryTagAfterProcessing],
427
- PlainValidator(validate_open_enum(False)),
428
- ],
412
+ Optional[InputS3InventoryTagAfterProcessing],
429
413
  pydantic.Field(alias="tagAfterProcessing"),
430
414
  ] = None
431
415
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSecurityLakeType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSecurityLakeType(str, Enum):
15
12
  SECURITY_LAKE = "security_lake"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSecurityLakeConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSecurityLakeMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSecurityLakeMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSecurityLakeCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSecurityLakeCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSecurityLakePqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSecurityLakePq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSecurityLakeMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSecurityLakeMode.ALWAYS
58
+ mode: Optional[InputSecurityLakeMode] = InputSecurityLakeMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,14 +79,11 @@ class InputSecurityLakePq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSecurityLakeCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputSecurityLakeCompression.NONE
82
+ compress: Optional[InputSecurityLakeCompression] = InputSecurityLakeCompression.NONE
91
83
  r"""Codec to use to compress the persisted data"""
92
84
 
93
85
 
94
- class InputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputSecurityLakeAuthenticationMethod(str, Enum):
95
87
  r"""AWS authentication method. Choose Auto to use IAM roles."""
96
88
 
97
89
  AUTO = "auto"
@@ -99,7 +91,7 @@ class InputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumM
99
91
  SECRET = "secret"
100
92
 
101
93
 
102
- class InputSecurityLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ class InputSecurityLakeSignatureVersion(str, Enum):
103
95
  r"""Signature version to use for signing S3 requests"""
104
96
 
105
97
  V2 = "v2"
@@ -152,7 +144,7 @@ class InputSecurityLakeCheckpointing(BaseModel):
152
144
  r"""The number of times to retry processing when a processing error occurs. If Skip file on error is enabled, this setting is ignored."""
153
145
 
154
146
 
155
- class InputSecurityLakeTagAfterProcessing(str, Enum, metaclass=utils.OpenEnumMeta):
147
+ class InputSecurityLakeTagAfterProcessing(str, Enum):
156
148
  FALSE = "false"
157
149
  TRUE = "true"
158
150
 
@@ -242,7 +234,7 @@ class InputSecurityLakeTypedDict(TypedDict):
242
234
 
243
235
 
244
236
  class InputSecurityLake(BaseModel):
245
- type: Annotated[InputSecurityLakeType, PlainValidator(validate_open_enum(False))]
237
+ type: InputSecurityLakeType
246
238
 
247
239
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
248
240
  r"""The name, URL, or ARN of the SQS queue to read notifications from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
@@ -283,10 +275,7 @@ class InputSecurityLake(BaseModel):
283
275
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
284
276
 
285
277
  aws_authentication_method: Annotated[
286
- Annotated[
287
- Optional[InputSecurityLakeAuthenticationMethod],
288
- PlainValidator(validate_open_enum(False)),
289
- ],
278
+ Optional[InputSecurityLakeAuthenticationMethod],
290
279
  pydantic.Field(alias="awsAuthenticationMethod"),
291
280
  ] = InputSecurityLakeAuthenticationMethod.AUTO
292
281
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -302,10 +291,7 @@ class InputSecurityLake(BaseModel):
302
291
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
303
292
 
304
293
  signature_version: Annotated[
305
- Annotated[
306
- Optional[InputSecurityLakeSignatureVersion],
307
- PlainValidator(validate_open_enum(False)),
308
- ],
294
+ Optional[InputSecurityLakeSignatureVersion],
309
295
  pydantic.Field(alias="signatureVersion"),
310
296
  ] = InputSecurityLakeSignatureVersion.V4
311
297
  r"""Signature version to use for signing S3 requests"""
@@ -407,10 +393,7 @@ class InputSecurityLake(BaseModel):
407
393
  r"""Select or create a stored secret that references your access key and secret key"""
408
394
 
409
395
  tag_after_processing: Annotated[
410
- Annotated[
411
- Optional[InputSecurityLakeTagAfterProcessing],
412
- PlainValidator(validate_open_enum(False)),
413
- ],
396
+ Optional[InputSecurityLakeTagAfterProcessing],
414
397
  pydantic.Field(alias="tagAfterProcessing"),
415
398
  ] = None
416
399
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSnmpType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSnmpType(str, Enum):
15
12
  SNMP = "snmp"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSnmpConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSnmpMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSnmpMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSnmpCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSnmpCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSnmpPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSnmpPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSnmpMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSnmpMode.ALWAYS
58
+ mode: Optional[InputSnmpMode] = InputSnmpMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputSnmpPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSnmpCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputSnmpCompression.NONE
82
+ compress: Optional[InputSnmpCompression] = InputSnmpCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class AuthenticationProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class AuthenticationProtocol(str, Enum):
94
87
  NONE = "none"
95
88
  MD5 = "md5"
96
89
  SHA = "sha"
@@ -111,10 +104,7 @@ class V3User(BaseModel):
111
104
  name: str
112
105
 
113
106
  auth_protocol: Annotated[
114
- Annotated[
115
- Optional[AuthenticationProtocol], PlainValidator(validate_open_enum(False))
116
- ],
117
- pydantic.Field(alias="authProtocol"),
107
+ Optional[AuthenticationProtocol], pydantic.Field(alias="authProtocol")
118
108
  ] = AuthenticationProtocol.NONE
119
109
 
120
110
  auth_key: Annotated[Optional[Any], pydantic.Field(alias="authKey")] = None
@@ -164,9 +154,9 @@ class InputSnmpMetadatum(BaseModel):
164
154
 
165
155
 
166
156
  class InputSnmpTypedDict(TypedDict):
167
- type: InputSnmpType
168
157
  id: NotRequired[str]
169
158
  r"""Unique ID for this input"""
159
+ type: NotRequired[InputSnmpType]
170
160
  disabled: NotRequired[bool]
171
161
  pipeline: NotRequired[str]
172
162
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -203,11 +193,11 @@ class InputSnmpTypedDict(TypedDict):
203
193
 
204
194
 
205
195
  class InputSnmp(BaseModel):
206
- type: Annotated[InputSnmpType, PlainValidator(validate_open_enum(False))]
207
-
208
196
  id: Optional[str] = None
209
197
  r"""Unique ID for this input"""
210
198
 
199
+ type: Optional[InputSnmpType] = None
200
+
211
201
  disabled: Optional[bool] = False
212
202
 
213
203
  pipeline: Optional[str] = None
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSplunkType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSplunkType(str, Enum):
15
12
  SPLUNK = "splunk"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSplunkConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkPqCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSplunkPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSplunkPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSplunkMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSplunkMode.ALWAYS
58
+ mode: Optional[InputSplunkMode] = InputSplunkMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,20 +79,18 @@ class InputSplunkPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSplunkPqCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputSplunkPqCompression.NONE
82
+ compress: Optional[InputSplunkPqCompression] = InputSplunkPqCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputSplunkMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputSplunkMinimumTLSVersion(str, Enum):
94
87
  TL_SV1 = "TLSv1"
95
88
  TL_SV1_1 = "TLSv1.1"
96
89
  TL_SV1_2 = "TLSv1.2"
97
90
  TL_SV1_3 = "TLSv1.3"
98
91
 
99
92
 
100
- class InputSplunkMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class InputSplunkMaximumTLSVersion(str, Enum):
101
94
  TL_SV1 = "TLSv1"
102
95
  TL_SV1_1 = "TLSv1.1"
103
96
  TL_SV1_2 = "TLSv1.2"
@@ -156,19 +149,11 @@ class InputSplunkTLSSettingsServerSide(BaseModel):
156
149
  ] = None
157
150
 
158
151
  min_version: Annotated[
159
- Annotated[
160
- Optional[InputSplunkMinimumTLSVersion],
161
- PlainValidator(validate_open_enum(False)),
162
- ],
163
- pydantic.Field(alias="minVersion"),
152
+ Optional[InputSplunkMinimumTLSVersion], pydantic.Field(alias="minVersion")
164
153
  ] = None
165
154
 
166
155
  max_version: Annotated[
167
- Annotated[
168
- Optional[InputSplunkMaximumTLSVersion],
169
- PlainValidator(validate_open_enum(False)),
170
- ],
171
- pydantic.Field(alias="maxVersion"),
156
+ Optional[InputSplunkMaximumTLSVersion], pydantic.Field(alias="maxVersion")
172
157
  ] = None
173
158
 
174
159
 
@@ -198,14 +183,14 @@ class InputSplunkAuthToken(BaseModel):
198
183
  description: Optional[str] = None
199
184
 
200
185
 
201
- class InputSplunkMaxS2SVersion(str, Enum, metaclass=utils.OpenEnumMeta):
186
+ class InputSplunkMaxS2SVersion(str, Enum):
202
187
  r"""The highest S2S protocol version to advertise during handshake"""
203
188
 
204
189
  V3 = "v3"
205
190
  V4 = "v4"
206
191
 
207
192
 
208
- class InputSplunkCompression(str, Enum, metaclass=utils.OpenEnumMeta):
193
+ class InputSplunkCompression(str, Enum):
209
194
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
210
195
 
211
196
  DISABLED = "disabled"
@@ -214,11 +199,11 @@ class InputSplunkCompression(str, Enum, metaclass=utils.OpenEnumMeta):
214
199
 
215
200
 
216
201
  class InputSplunkTypedDict(TypedDict):
217
- type: InputSplunkType
218
202
  port: float
219
203
  r"""Port to listen on"""
220
204
  id: NotRequired[str]
221
205
  r"""Unique ID for this input"""
206
+ type: NotRequired[InputSplunkType]
222
207
  disabled: NotRequired[bool]
223
208
  pipeline: NotRequired[str]
224
209
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -270,14 +255,14 @@ class InputSplunkTypedDict(TypedDict):
270
255
 
271
256
 
272
257
  class InputSplunk(BaseModel):
273
- type: Annotated[InputSplunkType, PlainValidator(validate_open_enum(False))]
274
-
275
258
  port: float
276
259
  r"""Port to listen on"""
277
260
 
278
261
  id: Optional[str] = None
279
262
  r"""Unique ID for this input"""
280
263
 
264
+ type: Optional[InputSplunkType] = None
265
+
281
266
  disabled: Optional[bool] = False
282
267
 
283
268
  pipeline: Optional[str] = None
@@ -356,11 +341,7 @@ class InputSplunk(BaseModel):
356
341
  r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
357
342
 
358
343
  max_s2_sversion: Annotated[
359
- Annotated[
360
- Optional[InputSplunkMaxS2SVersion],
361
- PlainValidator(validate_open_enum(False)),
362
- ],
363
- pydantic.Field(alias="maxS2Sversion"),
344
+ Optional[InputSplunkMaxS2SVersion], pydantic.Field(alias="maxS2Sversion")
364
345
  ] = InputSplunkMaxS2SVersion.V3
365
346
  r"""The highest S2S protocol version to advertise during handshake"""
366
347
 
@@ -381,7 +362,5 @@ class InputSplunk(BaseModel):
381
362
  ] = False
382
363
  r"""Extract and process Splunk-generated metrics as Cribl metrics"""
383
364
 
384
- compress: Annotated[
385
- Optional[InputSplunkCompression], PlainValidator(validate_open_enum(False))
386
- ] = InputSplunkCompression.DISABLED
365
+ compress: Optional[InputSplunkCompression] = InputSplunkCompression.DISABLED
387
366
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""