cribl-control-plane 0.0.47__py3-none-any.whl → 0.0.48a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (162) hide show
  1. cribl_control_plane/_version.py +3 -5
  2. cribl_control_plane/errors/healthstatus_error.py +8 -2
  3. cribl_control_plane/models/__init__.py +12 -12
  4. cribl_control_plane/models/cacheconnection.py +10 -2
  5. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  6. cribl_control_plane/models/cloudprovider.py +2 -1
  7. cribl_control_plane/models/configgroup.py +7 -2
  8. cribl_control_plane/models/configgroupcloud.py +6 -2
  9. cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
  10. cribl_control_plane/models/cribllakedataset.py +8 -2
  11. cribl_control_plane/models/datasetmetadata.py +8 -2
  12. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
  13. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
  14. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
  15. cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
  16. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
  17. cribl_control_plane/models/getsummaryop.py +7 -2
  18. cribl_control_plane/models/hbcriblinfo.py +19 -3
  19. cribl_control_plane/models/healthstatus.py +7 -4
  20. cribl_control_plane/models/heartbeatmetadata.py +3 -0
  21. cribl_control_plane/models/inputappscope.py +34 -14
  22. cribl_control_plane/models/inputazureblob.py +17 -6
  23. cribl_control_plane/models/inputcollection.py +11 -4
  24. cribl_control_plane/models/inputconfluentcloud.py +47 -20
  25. cribl_control_plane/models/inputcribl.py +11 -4
  26. cribl_control_plane/models/inputcriblhttp.py +23 -8
  27. cribl_control_plane/models/inputcribllakehttp.py +22 -10
  28. cribl_control_plane/models/inputcriblmetrics.py +12 -4
  29. cribl_control_plane/models/inputcribltcp.py +23 -8
  30. cribl_control_plane/models/inputcrowdstrike.py +26 -10
  31. cribl_control_plane/models/inputdatadogagent.py +24 -8
  32. cribl_control_plane/models/inputdatagen.py +11 -4
  33. cribl_control_plane/models/inputedgeprometheus.py +58 -24
  34. cribl_control_plane/models/inputelastic.py +40 -14
  35. cribl_control_plane/models/inputeventhub.py +15 -6
  36. cribl_control_plane/models/inputexec.py +14 -6
  37. cribl_control_plane/models/inputfile.py +15 -6
  38. cribl_control_plane/models/inputfirehose.py +23 -8
  39. cribl_control_plane/models/inputgooglepubsub.py +19 -6
  40. cribl_control_plane/models/inputgrafana.py +67 -24
  41. cribl_control_plane/models/inputhttp.py +23 -8
  42. cribl_control_plane/models/inputhttpraw.py +23 -8
  43. cribl_control_plane/models/inputjournalfiles.py +12 -4
  44. cribl_control_plane/models/inputkafka.py +46 -16
  45. cribl_control_plane/models/inputkinesis.py +38 -14
  46. cribl_control_plane/models/inputkubeevents.py +11 -4
  47. cribl_control_plane/models/inputkubelogs.py +16 -8
  48. cribl_control_plane/models/inputkubemetrics.py +16 -8
  49. cribl_control_plane/models/inputloki.py +29 -10
  50. cribl_control_plane/models/inputmetrics.py +23 -8
  51. cribl_control_plane/models/inputmodeldriventelemetry.py +32 -10
  52. cribl_control_plane/models/inputmsk.py +53 -18
  53. cribl_control_plane/models/inputnetflow.py +11 -4
  54. cribl_control_plane/models/inputoffice365mgmt.py +33 -14
  55. cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
  56. cribl_control_plane/models/inputoffice365service.py +35 -16
  57. cribl_control_plane/models/inputopentelemetry.py +38 -16
  58. cribl_control_plane/models/inputprometheus.py +50 -18
  59. cribl_control_plane/models/inputprometheusrw.py +30 -10
  60. cribl_control_plane/models/inputrawudp.py +11 -4
  61. cribl_control_plane/models/inputs3.py +21 -8
  62. cribl_control_plane/models/inputs3inventory.py +26 -10
  63. cribl_control_plane/models/inputsecuritylake.py +27 -10
  64. cribl_control_plane/models/inputsnmp.py +16 -6
  65. cribl_control_plane/models/inputsplunk.py +33 -12
  66. cribl_control_plane/models/inputsplunkhec.py +29 -10
  67. cribl_control_plane/models/inputsplunksearch.py +33 -14
  68. cribl_control_plane/models/inputsqs.py +27 -10
  69. cribl_control_plane/models/inputsyslog.py +43 -16
  70. cribl_control_plane/models/inputsystemmetrics.py +48 -24
  71. cribl_control_plane/models/inputsystemstate.py +16 -8
  72. cribl_control_plane/models/inputtcp.py +29 -10
  73. cribl_control_plane/models/inputtcpjson.py +29 -10
  74. cribl_control_plane/models/inputwef.py +37 -14
  75. cribl_control_plane/models/inputwindowsmetrics.py +44 -24
  76. cribl_control_plane/models/inputwineventlogs.py +20 -10
  77. cribl_control_plane/models/inputwiz.py +21 -8
  78. cribl_control_plane/models/inputwizwebhook.py +23 -8
  79. cribl_control_plane/models/inputzscalerhec.py +29 -10
  80. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  81. cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
  82. cribl_control_plane/models/masterworkerentry.py +7 -2
  83. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  84. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  85. cribl_control_plane/models/nodeprovidedinfo.py +3 -0
  86. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  87. cribl_control_plane/models/nodeupgradestate.py +2 -1
  88. cribl_control_plane/models/nodeupgradestatus.py +13 -5
  89. cribl_control_plane/models/outputazureblob.py +48 -18
  90. cribl_control_plane/models/outputazuredataexplorer.py +73 -28
  91. cribl_control_plane/models/outputazureeventhub.py +40 -18
  92. cribl_control_plane/models/outputazurelogs.py +35 -12
  93. cribl_control_plane/models/outputclickhouse.py +55 -20
  94. cribl_control_plane/models/outputcloudwatch.py +29 -10
  95. cribl_control_plane/models/outputconfluentcloud.py +77 -32
  96. cribl_control_plane/models/outputcriblhttp.py +44 -16
  97. cribl_control_plane/models/outputcribllake.py +46 -16
  98. cribl_control_plane/models/outputcribltcp.py +45 -18
  99. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +49 -14
  100. cribl_control_plane/models/outputdatadog.py +48 -20
  101. cribl_control_plane/models/outputdataset.py +46 -18
  102. cribl_control_plane/models/outputdiskspool.py +7 -2
  103. cribl_control_plane/models/outputdls3.py +68 -24
  104. cribl_control_plane/models/outputdynatracehttp.py +53 -20
  105. cribl_control_plane/models/outputdynatraceotlp.py +55 -22
  106. cribl_control_plane/models/outputelastic.py +43 -18
  107. cribl_control_plane/models/outputelasticcloud.py +36 -12
  108. cribl_control_plane/models/outputexabeam.py +29 -10
  109. cribl_control_plane/models/outputfilesystem.py +39 -14
  110. cribl_control_plane/models/outputgooglechronicle.py +50 -16
  111. cribl_control_plane/models/outputgooglecloudlogging.py +41 -14
  112. cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
  113. cribl_control_plane/models/outputgooglepubsub.py +31 -10
  114. cribl_control_plane/models/outputgrafanacloud.py +97 -32
  115. cribl_control_plane/models/outputgraphite.py +31 -14
  116. cribl_control_plane/models/outputhoneycomb.py +35 -12
  117. cribl_control_plane/models/outputhumiohec.py +43 -16
  118. cribl_control_plane/models/outputinfluxdb.py +42 -16
  119. cribl_control_plane/models/outputkafka.py +74 -28
  120. cribl_control_plane/models/outputkinesis.py +40 -16
  121. cribl_control_plane/models/outputloki.py +41 -16
  122. cribl_control_plane/models/outputminio.py +65 -24
  123. cribl_control_plane/models/outputmsk.py +82 -30
  124. cribl_control_plane/models/outputnewrelic.py +43 -18
  125. cribl_control_plane/models/outputnewrelicevents.py +41 -14
  126. cribl_control_plane/models/outputopentelemetry.py +67 -26
  127. cribl_control_plane/models/outputprometheus.py +35 -12
  128. cribl_control_plane/models/outputring.py +19 -8
  129. cribl_control_plane/models/outputs3.py +68 -26
  130. cribl_control_plane/models/outputsecuritylake.py +52 -18
  131. cribl_control_plane/models/outputsentinel.py +45 -18
  132. cribl_control_plane/models/outputsentineloneaisiem.py +50 -18
  133. cribl_control_plane/models/outputservicenow.py +60 -24
  134. cribl_control_plane/models/outputsignalfx.py +37 -14
  135. cribl_control_plane/models/outputsns.py +36 -14
  136. cribl_control_plane/models/outputsplunk.py +60 -24
  137. cribl_control_plane/models/outputsplunkhec.py +35 -12
  138. cribl_control_plane/models/outputsplunklb.py +77 -30
  139. cribl_control_plane/models/outputsqs.py +41 -16
  140. cribl_control_plane/models/outputstatsd.py +30 -14
  141. cribl_control_plane/models/outputstatsdext.py +29 -12
  142. cribl_control_plane/models/outputsumologic.py +35 -12
  143. cribl_control_plane/models/outputsyslog.py +58 -24
  144. cribl_control_plane/models/outputtcpjson.py +52 -20
  145. cribl_control_plane/models/outputwavefront.py +35 -12
  146. cribl_control_plane/models/outputwebhook.py +58 -22
  147. cribl_control_plane/models/outputxsiam.py +35 -14
  148. cribl_control_plane/models/productscore.py +2 -1
  149. cribl_control_plane/models/rbacresource.py +2 -1
  150. cribl_control_plane/models/resourcepolicy.py +4 -2
  151. cribl_control_plane/models/routeconf.py +3 -4
  152. cribl_control_plane/models/runnablejobcollection.py +30 -13
  153. cribl_control_plane/models/runnablejobexecutor.py +13 -4
  154. cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
  155. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
  156. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
  157. cribl_control_plane/models/workertypes.py +2 -1
  158. {cribl_control_plane-0.0.47.dist-info → cribl_control_plane-0.0.48a1.dist-info}/METADATA +1 -1
  159. {cribl_control_plane-0.0.47.dist-info → cribl_control_plane-0.0.48a1.dist-info}/RECORD +160 -162
  160. {cribl_control_plane-0.0.47.dist-info → cribl_control_plane-0.0.48a1.dist-info}/WHEEL +1 -1
  161. cribl_control_plane/models/appmode.py +0 -13
  162. cribl_control_plane/models/routecloneconf.py +0 -13
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputS3InventoryConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputS3InventoryMode(str, Enum):
29
+ class InputS3InventoryMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputS3InventoryCompression(str, Enum):
36
+ class InputS3InventoryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputS3InventoryPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputS3InventoryPq(BaseModel):
67
- mode: Optional[InputS3InventoryMode] = InputS3InventoryMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputS3InventoryMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputS3InventoryMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputS3InventoryPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputS3InventoryCompression] = InputS3InventoryCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputS3InventoryCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputS3InventoryCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -96,7 +103,7 @@ class InputS3InventoryPq(BaseModel):
96
103
  ] = None
97
104
 
98
105
 
99
- class InputS3InventoryAuthenticationMethod(str, Enum):
106
+ class InputS3InventoryAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
100
107
  r"""AWS authentication method. Choose Auto to use IAM roles."""
101
108
 
102
109
  AUTO = "auto"
@@ -104,7 +111,7 @@ class InputS3InventoryAuthenticationMethod(str, Enum):
104
111
  SECRET = "secret"
105
112
 
106
113
 
107
- class InputS3InventorySignatureVersion(str, Enum):
114
+ class InputS3InventorySignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
108
115
  r"""Signature version to use for signing S3 requests"""
109
116
 
110
117
  V2 = "v2"
@@ -157,7 +164,7 @@ class InputS3InventoryCheckpointing(BaseModel):
157
164
  r"""The number of times to retry processing when a processing error occurs. If Skip file on error is enabled, this setting is ignored."""
158
165
 
159
166
 
160
- class InputS3InventoryTagAfterProcessing(str, Enum):
167
+ class InputS3InventoryTagAfterProcessing(str, Enum, metaclass=utils.OpenEnumMeta):
161
168
  FALSE = "false"
162
169
  TRUE = "true"
163
170
 
@@ -294,7 +301,10 @@ class InputS3Inventory(BaseModel):
294
301
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
295
302
 
296
303
  aws_authentication_method: Annotated[
297
- Optional[InputS3InventoryAuthenticationMethod],
304
+ Annotated[
305
+ Optional[InputS3InventoryAuthenticationMethod],
306
+ PlainValidator(validate_open_enum(False)),
307
+ ],
298
308
  pydantic.Field(alias="awsAuthenticationMethod"),
299
309
  ] = InputS3InventoryAuthenticationMethod.AUTO
300
310
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -310,7 +320,10 @@ class InputS3Inventory(BaseModel):
310
320
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
311
321
 
312
322
  signature_version: Annotated[
313
- Optional[InputS3InventorySignatureVersion],
323
+ Annotated[
324
+ Optional[InputS3InventorySignatureVersion],
325
+ PlainValidator(validate_open_enum(False)),
326
+ ],
314
327
  pydantic.Field(alias="signatureVersion"),
315
328
  ] = InputS3InventorySignatureVersion.V4
316
329
  r"""Signature version to use for signing S3 requests"""
@@ -429,7 +442,10 @@ class InputS3Inventory(BaseModel):
429
442
  r"""Select or create a stored secret that references your access key and secret key"""
430
443
 
431
444
  tag_after_processing: Annotated[
432
- Optional[InputS3InventoryTagAfterProcessing],
445
+ Annotated[
446
+ Optional[InputS3InventoryTagAfterProcessing],
447
+ PlainValidator(validate_open_enum(False)),
448
+ ],
433
449
  pydantic.Field(alias="tagAfterProcessing"),
434
450
  ] = None
435
451
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputSecurityLakeConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputSecurityLakeMode(str, Enum):
29
+ class InputSecurityLakeMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputSecurityLakeCompression(str, Enum):
36
+ class InputSecurityLakeCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputSecurityLakePqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputSecurityLakePq(BaseModel):
67
- mode: Optional[InputSecurityLakeMode] = InputSecurityLakeMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputSecurityLakeMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputSecurityLakeMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,10 @@ class InputSecurityLakePq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputSecurityLakeCompression] = InputSecurityLakeCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputSecurityLakeCompression],
98
+ PlainValidator(validate_open_enum(False)),
99
+ ] = InputSecurityLakeCompression.NONE
92
100
  r"""Codec to use to compress the persisted data"""
93
101
 
94
102
  pq_controls: Annotated[
@@ -96,7 +104,7 @@ class InputSecurityLakePq(BaseModel):
96
104
  ] = None
97
105
 
98
106
 
99
- class InputSecurityLakeAuthenticationMethod(str, Enum):
107
+ class InputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
100
108
  r"""AWS authentication method. Choose Auto to use IAM roles."""
101
109
 
102
110
  AUTO = "auto"
@@ -104,7 +112,7 @@ class InputSecurityLakeAuthenticationMethod(str, Enum):
104
112
  SECRET = "secret"
105
113
 
106
114
 
107
- class InputSecurityLakeSignatureVersion(str, Enum):
115
+ class InputSecurityLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
108
116
  r"""Signature version to use for signing S3 requests"""
109
117
 
110
118
  V2 = "v2"
@@ -157,7 +165,7 @@ class InputSecurityLakeCheckpointing(BaseModel):
157
165
  r"""The number of times to retry processing when a processing error occurs. If Skip file on error is enabled, this setting is ignored."""
158
166
 
159
167
 
160
- class InputSecurityLakeTagAfterProcessing(str, Enum):
168
+ class InputSecurityLakeTagAfterProcessing(str, Enum, metaclass=utils.OpenEnumMeta):
161
169
  FALSE = "false"
162
170
  TRUE = "true"
163
171
 
@@ -290,7 +298,10 @@ class InputSecurityLake(BaseModel):
290
298
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
291
299
 
292
300
  aws_authentication_method: Annotated[
293
- Optional[InputSecurityLakeAuthenticationMethod],
301
+ Annotated[
302
+ Optional[InputSecurityLakeAuthenticationMethod],
303
+ PlainValidator(validate_open_enum(False)),
304
+ ],
294
305
  pydantic.Field(alias="awsAuthenticationMethod"),
295
306
  ] = InputSecurityLakeAuthenticationMethod.AUTO
296
307
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -306,7 +317,10 @@ class InputSecurityLake(BaseModel):
306
317
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
307
318
 
308
319
  signature_version: Annotated[
309
- Optional[InputSecurityLakeSignatureVersion],
320
+ Annotated[
321
+ Optional[InputSecurityLakeSignatureVersion],
322
+ PlainValidator(validate_open_enum(False)),
323
+ ],
310
324
  pydantic.Field(alias="signatureVersion"),
311
325
  ] = InputSecurityLakeSignatureVersion.V4
312
326
  r"""Signature version to use for signing S3 requests"""
@@ -413,7 +427,10 @@ class InputSecurityLake(BaseModel):
413
427
  r"""Select or create a stored secret that references your access key and secret key"""
414
428
 
415
429
  tag_after_processing: Annotated[
416
- Optional[InputSecurityLakeTagAfterProcessing],
430
+ Annotated[
431
+ Optional[InputSecurityLakeTagAfterProcessing],
432
+ PlainValidator(validate_open_enum(False)),
433
+ ],
417
434
  pydantic.Field(alias="tagAfterProcessing"),
418
435
  ] = None
419
436
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Any, List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputSnmpConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputSnmpMode(str, Enum):
29
+ class InputSnmpMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputSnmpCompression(str, Enum):
36
+ class InputSnmpCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputSnmpPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputSnmpPq(BaseModel):
67
- mode: Optional[InputSnmpMode] = InputSnmpMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputSnmpMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputSnmpMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputSnmpPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputSnmpCompression] = InputSnmpCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputSnmpCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputSnmpCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -96,7 +103,7 @@ class InputSnmpPq(BaseModel):
96
103
  ] = None
97
104
 
98
105
 
99
- class AuthenticationProtocol(str, Enum):
106
+ class AuthenticationProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
100
107
  NONE = "none"
101
108
  MD5 = "md5"
102
109
  SHA = "sha"
@@ -117,7 +124,10 @@ class V3User(BaseModel):
117
124
  name: str
118
125
 
119
126
  auth_protocol: Annotated[
120
- Optional[AuthenticationProtocol], pydantic.Field(alias="authProtocol")
127
+ Annotated[
128
+ Optional[AuthenticationProtocol], PlainValidator(validate_open_enum(False))
129
+ ],
130
+ pydantic.Field(alias="authProtocol"),
121
131
  ] = AuthenticationProtocol.NONE
122
132
 
123
133
  auth_key: Annotated[Optional[Any], pydantic.Field(alias="authKey")] = None
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Any, List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputSplunkConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputSplunkMode(str, Enum):
29
+ class InputSplunkMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputSplunkPqCompression(str, Enum):
36
+ class InputSplunkPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputSplunkPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputSplunkPq(BaseModel):
67
- mode: Optional[InputSplunkMode] = InputSplunkMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputSplunkMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputSplunkMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputSplunkPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputSplunkPqCompression] = InputSplunkPqCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputSplunkPqCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputSplunkPqCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -96,14 +103,14 @@ class InputSplunkPq(BaseModel):
96
103
  ] = None
97
104
 
98
105
 
99
- class InputSplunkMinimumTLSVersion(str, Enum):
106
+ class InputSplunkMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
100
107
  TL_SV1 = "TLSv1"
101
108
  TL_SV1_1 = "TLSv1.1"
102
109
  TL_SV1_2 = "TLSv1.2"
103
110
  TL_SV1_3 = "TLSv1.3"
104
111
 
105
112
 
106
- class InputSplunkMaximumTLSVersion(str, Enum):
113
+ class InputSplunkMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
107
114
  TL_SV1 = "TLSv1"
108
115
  TL_SV1_1 = "TLSv1.1"
109
116
  TL_SV1_2 = "TLSv1.2"
@@ -162,11 +169,19 @@ class InputSplunkTLSSettingsServerSide(BaseModel):
162
169
  ] = None
163
170
 
164
171
  min_version: Annotated[
165
- Optional[InputSplunkMinimumTLSVersion], pydantic.Field(alias="minVersion")
172
+ Annotated[
173
+ Optional[InputSplunkMinimumTLSVersion],
174
+ PlainValidator(validate_open_enum(False)),
175
+ ],
176
+ pydantic.Field(alias="minVersion"),
166
177
  ] = None
167
178
 
168
179
  max_version: Annotated[
169
- Optional[InputSplunkMaximumTLSVersion], pydantic.Field(alias="maxVersion")
180
+ Annotated[
181
+ Optional[InputSplunkMaximumTLSVersion],
182
+ PlainValidator(validate_open_enum(False)),
183
+ ],
184
+ pydantic.Field(alias="maxVersion"),
170
185
  ] = None
171
186
 
172
187
 
@@ -196,14 +211,14 @@ class InputSplunkAuthToken(BaseModel):
196
211
  description: Optional[str] = None
197
212
 
198
213
 
199
- class InputSplunkMaxS2SVersion(str, Enum):
214
+ class InputSplunkMaxS2SVersion(str, Enum, metaclass=utils.OpenEnumMeta):
200
215
  r"""The highest S2S protocol version to advertise during handshake"""
201
216
 
202
217
  V3 = "v3"
203
218
  V4 = "v4"
204
219
 
205
220
 
206
- class InputSplunkCompression(str, Enum):
221
+ class InputSplunkCompression(str, Enum, metaclass=utils.OpenEnumMeta):
207
222
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
208
223
 
209
224
  DISABLED = "disabled"
@@ -354,7 +369,11 @@ class InputSplunk(BaseModel):
354
369
  r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
355
370
 
356
371
  max_s2_sversion: Annotated[
357
- Optional[InputSplunkMaxS2SVersion], pydantic.Field(alias="maxS2Sversion")
372
+ Annotated[
373
+ Optional[InputSplunkMaxS2SVersion],
374
+ PlainValidator(validate_open_enum(False)),
375
+ ],
376
+ pydantic.Field(alias="maxS2Sversion"),
358
377
  ] = InputSplunkMaxS2SVersion.V3
359
378
  r"""The highest S2S protocol version to advertise during handshake"""
360
379
 
@@ -375,5 +394,7 @@ class InputSplunk(BaseModel):
375
394
  ] = False
376
395
  r"""Extract and process Splunk-generated metrics as Cribl metrics"""
377
396
 
378
- compress: Optional[InputSplunkCompression] = InputSplunkCompression.DISABLED
397
+ compress: Annotated[
398
+ Optional[InputSplunkCompression], PlainValidator(validate_open_enum(False))
399
+ ] = InputSplunkCompression.DISABLED
379
400
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import Any, List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputSplunkHecConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputSplunkHecMode(str, Enum):
29
+ class InputSplunkHecMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputSplunkHecCompression(str, Enum):
36
+ class InputSplunkHecCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputSplunkHecPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputSplunkHecPq(BaseModel):
67
- mode: Optional[InputSplunkHecMode] = InputSplunkHecMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputSplunkHecMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputSplunkHecMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,9 @@ class InputSplunkHecPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputSplunkHecCompression] = InputSplunkHecCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputSplunkHecCompression], PlainValidator(validate_open_enum(False))
98
+ ] = InputSplunkHecCompression.NONE
92
99
  r"""Codec to use to compress the persisted data"""
93
100
 
94
101
  pq_controls: Annotated[
@@ -96,7 +103,7 @@ class InputSplunkHecPq(BaseModel):
96
103
  ] = None
97
104
 
98
105
 
99
- class InputSplunkHecAuthenticationMethod(str, Enum):
106
+ class InputSplunkHecAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
100
107
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
101
108
 
102
109
  MANUAL = "manual"
@@ -134,7 +141,11 @@ class InputSplunkHecAuthToken(BaseModel):
134
141
  token: Any
135
142
 
136
143
  auth_type: Annotated[
137
- Optional[InputSplunkHecAuthenticationMethod], pydantic.Field(alias="authType")
144
+ Annotated[
145
+ Optional[InputSplunkHecAuthenticationMethod],
146
+ PlainValidator(validate_open_enum(False)),
147
+ ],
148
+ pydantic.Field(alias="authType"),
138
149
  ] = InputSplunkHecAuthenticationMethod.MANUAL
139
150
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
140
151
 
@@ -154,14 +165,14 @@ class InputSplunkHecAuthToken(BaseModel):
154
165
  r"""Fields to add to events referencing this token"""
155
166
 
156
167
 
157
- class InputSplunkHecMinimumTLSVersion(str, Enum):
168
+ class InputSplunkHecMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
158
169
  TL_SV1 = "TLSv1"
159
170
  TL_SV1_1 = "TLSv1.1"
160
171
  TL_SV1_2 = "TLSv1.2"
161
172
  TL_SV1_3 = "TLSv1.3"
162
173
 
163
174
 
164
- class InputSplunkHecMaximumTLSVersion(str, Enum):
175
+ class InputSplunkHecMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
165
176
  TL_SV1 = "TLSv1"
166
177
  TL_SV1_1 = "TLSv1.1"
167
178
  TL_SV1_2 = "TLSv1.2"
@@ -220,11 +231,19 @@ class InputSplunkHecTLSSettingsServerSide(BaseModel):
220
231
  ] = None
221
232
 
222
233
  min_version: Annotated[
223
- Optional[InputSplunkHecMinimumTLSVersion], pydantic.Field(alias="minVersion")
234
+ Annotated[
235
+ Optional[InputSplunkHecMinimumTLSVersion],
236
+ PlainValidator(validate_open_enum(False)),
237
+ ],
238
+ pydantic.Field(alias="minVersion"),
224
239
  ] = None
225
240
 
226
241
  max_version: Annotated[
227
- Optional[InputSplunkHecMaximumTLSVersion], pydantic.Field(alias="maxVersion")
242
+ Annotated[
243
+ Optional[InputSplunkHecMaximumTLSVersion],
244
+ PlainValidator(validate_open_enum(False)),
245
+ ],
246
+ pydantic.Field(alias="maxVersion"),
228
247
  ] = None
229
248
 
230
249
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,14 +26,14 @@ class InputSplunkSearchConnection(BaseModel):
23
26
  pipeline: Optional[str] = None
24
27
 
25
28
 
26
- class InputSplunkSearchMode(str, Enum):
29
+ class InputSplunkSearchMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
31
 
29
32
  SMART = "smart"
30
33
  ALWAYS = "always"
31
34
 
32
35
 
33
- class InputSplunkSearchCompression(str, Enum):
36
+ class InputSplunkSearchCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
37
  r"""Codec to use to compress the persisted data"""
35
38
 
36
39
  NONE = "none"
@@ -64,7 +67,9 @@ class InputSplunkSearchPqTypedDict(TypedDict):
64
67
 
65
68
 
66
69
  class InputSplunkSearchPq(BaseModel):
67
- mode: Optional[InputSplunkSearchMode] = InputSplunkSearchMode.ALWAYS
70
+ mode: Annotated[
71
+ Optional[InputSplunkSearchMode], PlainValidator(validate_open_enum(False))
72
+ ] = InputSplunkSearchMode.ALWAYS
68
73
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
69
74
 
70
75
  max_buffer_size: Annotated[
@@ -88,7 +93,10 @@ class InputSplunkSearchPq(BaseModel):
88
93
  path: Optional[str] = "$CRIBL_HOME/state/queues"
89
94
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
90
95
 
91
- compress: Optional[InputSplunkSearchCompression] = InputSplunkSearchCompression.NONE
96
+ compress: Annotated[
97
+ Optional[InputSplunkSearchCompression],
98
+ PlainValidator(validate_open_enum(False)),
99
+ ] = InputSplunkSearchCompression.NONE
92
100
  r"""Codec to use to compress the persisted data"""
93
101
 
94
102
  pq_controls: Annotated[
@@ -96,7 +104,7 @@ class InputSplunkSearchPq(BaseModel):
96
104
  ] = None
97
105
 
98
106
 
99
- class OutputMode(str, Enum):
107
+ class OutputMode(str, Enum, metaclass=utils.OpenEnumMeta):
100
108
  r"""Format of the returned output"""
101
109
 
102
110
  CSV = "csv"
@@ -129,7 +137,7 @@ class EndpointHeader(BaseModel):
129
137
  r"""JavaScript expression to compute the header's value, normally enclosed in backticks (e.g., `${earliest}`). If a constant, use single quotes (e.g., 'earliest'). Values without delimiters (e.g., earliest) are evaluated as strings."""
130
138
 
131
139
 
132
- class InputSplunkSearchLogLevel(str, Enum):
140
+ class InputSplunkSearchLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
133
141
  r"""Collector runtime log level (verbosity)"""
134
142
 
135
143
  ERROR = "error"
@@ -151,7 +159,7 @@ class InputSplunkSearchMetadatum(BaseModel):
151
159
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
152
160
 
153
161
 
154
- class InputSplunkSearchRetryType(str, Enum):
162
+ class InputSplunkSearchRetryType(str, Enum, metaclass=utils.OpenEnumMeta):
155
163
  r"""The algorithm to use when performing HTTP retries"""
156
164
 
157
165
  NONE = "none"
@@ -179,7 +187,9 @@ class InputSplunkSearchRetryRulesTypedDict(TypedDict):
179
187
 
180
188
 
181
189
  class InputSplunkSearchRetryRules(BaseModel):
182
- type: Optional[InputSplunkSearchRetryType] = InputSplunkSearchRetryType.BACKOFF
190
+ type: Annotated[
191
+ Optional[InputSplunkSearchRetryType], PlainValidator(validate_open_enum(False))
192
+ ] = InputSplunkSearchRetryType.BACKOFF
183
193
  r"""The algorithm to use when performing HTTP retries"""
184
194
 
185
195
  interval: Optional[float] = 1000
@@ -210,7 +220,7 @@ class InputSplunkSearchRetryRules(BaseModel):
210
220
  r"""Retry request when a connection reset (ECONNRESET) error occurs"""
211
221
 
212
222
 
213
- class InputSplunkSearchAuthenticationType(str, Enum):
223
+ class InputSplunkSearchAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
214
224
  r"""Splunk Search authentication type"""
215
225
 
216
226
  NONE = "none"
@@ -395,9 +405,10 @@ class InputSplunkSearch(BaseModel):
395
405
  endpoint: Optional[str] = "/services/search/v2/jobs/export"
396
406
  r"""REST API used to create a search"""
397
407
 
398
- output_mode: Annotated[Optional[OutputMode], pydantic.Field(alias="outputMode")] = (
399
- OutputMode.JSON
400
- )
408
+ output_mode: Annotated[
409
+ Annotated[Optional[OutputMode], PlainValidator(validate_open_enum(False))],
410
+ pydantic.Field(alias="outputMode"),
411
+ ] = OutputMode.JSON
401
412
  r"""Format of the returned output"""
402
413
 
403
414
  endpoint_params: Annotated[
@@ -411,7 +422,11 @@ class InputSplunkSearch(BaseModel):
411
422
  r"""Optional request headers to send to the endpoint"""
412
423
 
413
424
  log_level: Annotated[
414
- Optional[InputSplunkSearchLogLevel], pydantic.Field(alias="logLevel")
425
+ Annotated[
426
+ Optional[InputSplunkSearchLogLevel],
427
+ PlainValidator(validate_open_enum(False)),
428
+ ],
429
+ pydantic.Field(alias="logLevel"),
415
430
  ] = None
416
431
  r"""Collector runtime log level (verbosity)"""
417
432
 
@@ -472,7 +487,11 @@ class InputSplunkSearch(BaseModel):
472
487
  r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
473
488
 
474
489
  auth_type: Annotated[
475
- Optional[InputSplunkSearchAuthenticationType], pydantic.Field(alias="authType")
490
+ Annotated[
491
+ Optional[InputSplunkSearchAuthenticationType],
492
+ PlainValidator(validate_open_enum(False)),
493
+ ],
494
+ pydantic.Field(alias="authType"),
476
495
  ] = InputSplunkSearchAuthenticationType.BASIC
477
496
  r"""Splunk Search authentication type"""
478
497