cribl-control-plane 0.0.16__py3-none-any.whl → 0.0.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (156) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/models/__init__.py +4365 -4124
  4. cribl_control_plane/models/createinputop.py +1734 -2771
  5. cribl_control_plane/models/createoutputop.py +2153 -4314
  6. cribl_control_plane/models/createversioncommitop.py +24 -0
  7. cribl_control_plane/models/createversionpushop.py +23 -0
  8. cribl_control_plane/models/createversionrevertop.py +47 -0
  9. cribl_control_plane/models/createversionsyncop.py +23 -0
  10. cribl_control_plane/models/createversionundoop.py +37 -0
  11. cribl_control_plane/models/getversionbranchop.py +23 -0
  12. cribl_control_plane/models/getversioncountop.py +47 -0
  13. cribl_control_plane/models/getversioncurrentbranchop.py +23 -0
  14. cribl_control_plane/models/getversiondiffop.py +63 -0
  15. cribl_control_plane/models/getversionfilesop.py +48 -0
  16. cribl_control_plane/models/getversioninfoop.py +24 -0
  17. cribl_control_plane/models/getversionshowop.py +63 -0
  18. cribl_control_plane/models/getversionstatusop.py +38 -0
  19. cribl_control_plane/models/gitcommitparams.py +23 -0
  20. cribl_control_plane/models/gitcommitsummary.py +68 -0
  21. cribl_control_plane/models/gitfile.py +20 -0
  22. cribl_control_plane/models/gitfilesresponse.py +22 -0
  23. cribl_control_plane/models/gitinfo.py +23 -0
  24. cribl_control_plane/models/gitrevertparams.py +20 -0
  25. cribl_control_plane/models/gitrevertresult.py +48 -0
  26. cribl_control_plane/models/gitstatusresult.py +73 -0
  27. cribl_control_plane/models/healthstatus.py +4 -7
  28. cribl_control_plane/models/inputappscope.py +16 -36
  29. cribl_control_plane/models/inputazureblob.py +8 -19
  30. cribl_control_plane/models/inputcollection.py +6 -15
  31. cribl_control_plane/models/inputconfluentcloud.py +20 -45
  32. cribl_control_plane/models/inputcribl.py +6 -13
  33. cribl_control_plane/models/inputcriblhttp.py +10 -27
  34. cribl_control_plane/models/inputcribllakehttp.py +12 -26
  35. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  36. cribl_control_plane/models/inputcribltcp.py +10 -27
  37. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  38. cribl_control_plane/models/inputdatadogagent.py +10 -28
  39. cribl_control_plane/models/inputdatagen.py +6 -13
  40. cribl_control_plane/models/inputedgeprometheus.py +31 -64
  41. cribl_control_plane/models/inputelastic.py +16 -44
  42. cribl_control_plane/models/inputeventhub.py +8 -19
  43. cribl_control_plane/models/inputexec.py +8 -16
  44. cribl_control_plane/models/inputfile.py +8 -17
  45. cribl_control_plane/models/inputfirehose.py +10 -27
  46. cribl_control_plane/models/inputgooglepubsub.py +8 -23
  47. cribl_control_plane/models/inputgrafana_union.py +35 -81
  48. cribl_control_plane/models/inputhttp.py +10 -27
  49. cribl_control_plane/models/inputhttpraw.py +10 -27
  50. cribl_control_plane/models/inputjournalfiles.py +6 -16
  51. cribl_control_plane/models/inputkafka.py +16 -45
  52. cribl_control_plane/models/inputkinesis.py +16 -42
  53. cribl_control_plane/models/inputkubeevents.py +6 -13
  54. cribl_control_plane/models/inputkubelogs.py +10 -18
  55. cribl_control_plane/models/inputkubemetrics.py +10 -18
  56. cribl_control_plane/models/inputloki.py +12 -33
  57. cribl_control_plane/models/inputmetrics.py +10 -25
  58. cribl_control_plane/models/inputmodeldriventelemetry.py +12 -32
  59. cribl_control_plane/models/inputmsk.py +18 -52
  60. cribl_control_plane/models/inputnetflow.py +6 -15
  61. cribl_control_plane/models/inputoffice365mgmt.py +16 -37
  62. cribl_control_plane/models/inputoffice365msgtrace.py +18 -39
  63. cribl_control_plane/models/inputoffice365service.py +18 -39
  64. cribl_control_plane/models/inputopentelemetry.py +18 -42
  65. cribl_control_plane/models/inputprometheus.py +20 -54
  66. cribl_control_plane/models/inputprometheusrw.py +12 -34
  67. cribl_control_plane/models/inputrawudp.py +6 -15
  68. cribl_control_plane/models/inputs3.py +10 -23
  69. cribl_control_plane/models/inputs3inventory.py +12 -28
  70. cribl_control_plane/models/inputsecuritylake.py +12 -29
  71. cribl_control_plane/models/inputsnmp.py +8 -20
  72. cribl_control_plane/models/inputsplunk.py +14 -37
  73. cribl_control_plane/models/inputsplunkhec.py +12 -33
  74. cribl_control_plane/models/inputsplunksearch.py +16 -37
  75. cribl_control_plane/models/inputsqs.py +12 -31
  76. cribl_control_plane/models/inputsyslog_union.py +29 -53
  77. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  78. cribl_control_plane/models/inputsystemstate.py +10 -18
  79. cribl_control_plane/models/inputtcp.py +12 -33
  80. cribl_control_plane/models/inputtcpjson.py +12 -33
  81. cribl_control_plane/models/inputwef.py +20 -45
  82. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  83. cribl_control_plane/models/inputwineventlogs.py +12 -22
  84. cribl_control_plane/models/inputwiz.py +10 -25
  85. cribl_control_plane/models/inputzscalerhec.py +12 -33
  86. cribl_control_plane/models/output.py +3 -6
  87. cribl_control_plane/models/outputazureblob.py +20 -52
  88. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  89. cribl_control_plane/models/outputazureeventhub.py +20 -44
  90. cribl_control_plane/models/outputazurelogs.py +14 -37
  91. cribl_control_plane/models/outputclickhouse.py +22 -59
  92. cribl_control_plane/models/outputcloudwatch.py +12 -33
  93. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  94. cribl_control_plane/models/outputcriblhttp.py +18 -46
  95. cribl_control_plane/models/outputcribllake.py +18 -48
  96. cribl_control_plane/models/outputcribltcp.py +20 -47
  97. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  98. cribl_control_plane/models/outputdatadog.py +22 -50
  99. cribl_control_plane/models/outputdataset.py +20 -48
  100. cribl_control_plane/models/outputdefault.py +2 -5
  101. cribl_control_plane/models/outputdevnull.py +2 -5
  102. cribl_control_plane/models/outputdiskspool.py +4 -9
  103. cribl_control_plane/models/outputdls3.py +26 -72
  104. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  105. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  106. cribl_control_plane/models/outputelastic.py +20 -45
  107. cribl_control_plane/models/outputelasticcloud.py +14 -40
  108. cribl_control_plane/models/outputexabeam.py +12 -33
  109. cribl_control_plane/models/outputfilesystem.py +16 -41
  110. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  111. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  112. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  113. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  114. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  115. cribl_control_plane/models/outputgraphite.py +16 -35
  116. cribl_control_plane/models/outputhoneycomb.py +14 -37
  117. cribl_control_plane/models/outputhumiohec.py +18 -47
  118. cribl_control_plane/models/outputinfluxdb.py +18 -44
  119. cribl_control_plane/models/outputkafka.py +28 -73
  120. cribl_control_plane/models/outputkinesis.py +18 -44
  121. cribl_control_plane/models/outputloki.py +18 -43
  122. cribl_control_plane/models/outputminio.py +26 -69
  123. cribl_control_plane/models/outputmsk.py +30 -81
  124. cribl_control_plane/models/outputnetflow.py +2 -5
  125. cribl_control_plane/models/outputnewrelic.py +20 -45
  126. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  127. cribl_control_plane/models/outputopentelemetry.py +28 -69
  128. cribl_control_plane/models/outputprometheus.py +14 -37
  129. cribl_control_plane/models/outputring.py +10 -21
  130. cribl_control_plane/models/outputrouter.py +2 -5
  131. cribl_control_plane/models/outputs3.py +28 -72
  132. cribl_control_plane/models/outputsecuritylake.py +20 -56
  133. cribl_control_plane/models/outputsentinel.py +20 -49
  134. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  135. cribl_control_plane/models/outputservicenow.py +26 -64
  136. cribl_control_plane/models/outputsignalfx.py +16 -39
  137. cribl_control_plane/models/outputsnmp.py +2 -5
  138. cribl_control_plane/models/outputsns.py +16 -40
  139. cribl_control_plane/models/outputsplunk.py +26 -64
  140. cribl_control_plane/models/outputsplunkhec.py +14 -37
  141. cribl_control_plane/models/outputsplunklb.py +36 -83
  142. cribl_control_plane/models/outputsqs.py +18 -45
  143. cribl_control_plane/models/outputstatsd.py +16 -34
  144. cribl_control_plane/models/outputstatsdext.py +14 -33
  145. cribl_control_plane/models/outputsumologic.py +14 -37
  146. cribl_control_plane/models/outputsyslog.py +26 -60
  147. cribl_control_plane/models/outputtcpjson.py +22 -54
  148. cribl_control_plane/models/outputwavefront.py +14 -37
  149. cribl_control_plane/models/outputwebhook.py +24 -60
  150. cribl_control_plane/models/outputxsiam.py +16 -37
  151. cribl_control_plane/sdk.py +4 -0
  152. cribl_control_plane/versioning.py +2309 -0
  153. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.18.dist-info}/METADATA +18 -2
  154. cribl_control_plane-0.0.18.dist-info/RECORD +237 -0
  155. cribl_control_plane-0.0.16.dist-info/RECORD +0 -215
  156. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.18.dist-info}/WHEEL +0 -0
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputRawUDPType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputRawUDPType(str, Enum):
15
12
  RAW_UDP = "raw_udp"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputRawUDPConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputRawUDPMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputRawUDPMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputRawUDPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputRawUDPCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputRawUDPPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputRawUDPPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputRawUDPMode], PlainValidator(validate_open_enum(False))
63
- ] = InputRawUDPMode.ALWAYS
58
+ mode: Optional[InputRawUDPMode] = InputRawUDPMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputRawUDPPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputRawUDPCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputRawUDPCompression.NONE
82
+ compress: Optional[InputRawUDPCompression] = InputRawUDPCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -147,9 +140,7 @@ class InputRawUDP(BaseModel):
147
140
  id: Optional[str] = None
148
141
  r"""Unique ID for this input"""
149
142
 
150
- type: Annotated[
151
- Optional[InputRawUDPType], PlainValidator(validate_open_enum(False))
152
- ] = None
143
+ type: Optional[InputRawUDPType] = None
153
144
 
154
145
  disabled: Optional[bool] = False
155
146
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputS3Type(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputS3Type(str, Enum):
15
12
  S3 = "s3"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputS3Connection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputS3Mode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputS3Mode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputS3Compression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputS3Compression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputS3PqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputS3Pq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputS3Mode], PlainValidator(validate_open_enum(False))
63
- ] = InputS3Mode.ALWAYS
58
+ mode: Optional[InputS3Mode] = InputS3Mode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputS3Pq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputS3Compression], PlainValidator(validate_open_enum(False))
89
- ] = InputS3Compression.NONE
82
+ compress: Optional[InputS3Compression] = InputS3Compression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputS3AuthenticationMethod(str, Enum):
94
87
  r"""AWS authentication method. Choose Auto to use IAM roles."""
95
88
 
96
89
  AUTO = "auto"
@@ -98,7 +91,7 @@ class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
98
91
  SECRET = "secret"
99
92
 
100
93
 
101
- class InputS3SignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ class InputS3SignatureVersion(str, Enum):
102
95
  r"""Signature version to use for signing S3 requests"""
103
96
 
104
97
  V2 = "v2"
@@ -237,7 +230,7 @@ class InputS3TypedDict(TypedDict):
237
230
 
238
231
 
239
232
  class InputS3(BaseModel):
240
- type: Annotated[InputS3Type, PlainValidator(validate_open_enum(False))]
233
+ type: InputS3Type
241
234
 
242
235
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
243
236
  r"""The name, URL, or ARN of the SQS queue to read notifications from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
@@ -278,10 +271,7 @@ class InputS3(BaseModel):
278
271
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
279
272
 
280
273
  aws_authentication_method: Annotated[
281
- Annotated[
282
- Optional[InputS3AuthenticationMethod],
283
- PlainValidator(validate_open_enum(False)),
284
- ],
274
+ Optional[InputS3AuthenticationMethod],
285
275
  pydantic.Field(alias="awsAuthenticationMethod"),
286
276
  ] = InputS3AuthenticationMethod.AUTO
287
277
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -297,10 +287,7 @@ class InputS3(BaseModel):
297
287
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
298
288
 
299
289
  signature_version: Annotated[
300
- Annotated[
301
- Optional[InputS3SignatureVersion], PlainValidator(validate_open_enum(False))
302
- ],
303
- pydantic.Field(alias="signatureVersion"),
290
+ Optional[InputS3SignatureVersion], pydantic.Field(alias="signatureVersion")
304
291
  ] = InputS3SignatureVersion.V4
305
292
  r"""Signature version to use for signing S3 requests"""
306
293
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputS3InventoryType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputS3InventoryType(str, Enum):
15
12
  S3_INVENTORY = "s3_inventory"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputS3InventoryConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputS3InventoryMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputS3InventoryMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputS3InventoryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputS3InventoryCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputS3InventoryPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputS3InventoryPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputS3InventoryMode], PlainValidator(validate_open_enum(False))
63
- ] = InputS3InventoryMode.ALWAYS
58
+ mode: Optional[InputS3InventoryMode] = InputS3InventoryMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputS3InventoryPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputS3InventoryCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputS3InventoryCompression.NONE
82
+ compress: Optional[InputS3InventoryCompression] = InputS3InventoryCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputS3InventoryAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputS3InventoryAuthenticationMethod(str, Enum):
94
87
  r"""AWS authentication method. Choose Auto to use IAM roles."""
95
88
 
96
89
  AUTO = "auto"
@@ -98,7 +91,7 @@ class InputS3InventoryAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMe
98
91
  SECRET = "secret"
99
92
 
100
93
 
101
- class InputS3InventorySignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ class InputS3InventorySignatureVersion(str, Enum):
102
95
  r"""Signature version to use for signing S3 requests"""
103
96
 
104
97
  V2 = "v2"
@@ -151,7 +144,7 @@ class InputS3InventoryCheckpointing(BaseModel):
151
144
  r"""The number of times to retry processing when a processing error occurs. If Skip file on error is enabled, this setting is ignored."""
152
145
 
153
146
 
154
- class InputS3InventoryTagAfterProcessing(str, Enum, metaclass=utils.OpenEnumMeta):
147
+ class InputS3InventoryTagAfterProcessing(str, Enum):
155
148
  FALSE = "false"
156
149
  TRUE = "true"
157
150
 
@@ -245,7 +238,7 @@ class InputS3InventoryTypedDict(TypedDict):
245
238
 
246
239
 
247
240
  class InputS3Inventory(BaseModel):
248
- type: Annotated[InputS3InventoryType, PlainValidator(validate_open_enum(False))]
241
+ type: InputS3InventoryType
249
242
 
250
243
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
251
244
  r"""The name, URL, or ARN of the SQS queue to read notifications from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
@@ -286,10 +279,7 @@ class InputS3Inventory(BaseModel):
286
279
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
287
280
 
288
281
  aws_authentication_method: Annotated[
289
- Annotated[
290
- Optional[InputS3InventoryAuthenticationMethod],
291
- PlainValidator(validate_open_enum(False)),
292
- ],
282
+ Optional[InputS3InventoryAuthenticationMethod],
293
283
  pydantic.Field(alias="awsAuthenticationMethod"),
294
284
  ] = InputS3InventoryAuthenticationMethod.AUTO
295
285
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -305,10 +295,7 @@ class InputS3Inventory(BaseModel):
305
295
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
306
296
 
307
297
  signature_version: Annotated[
308
- Annotated[
309
- Optional[InputS3InventorySignatureVersion],
310
- PlainValidator(validate_open_enum(False)),
311
- ],
298
+ Optional[InputS3InventorySignatureVersion],
312
299
  pydantic.Field(alias="signatureVersion"),
313
300
  ] = InputS3InventorySignatureVersion.V4
314
301
  r"""Signature version to use for signing S3 requests"""
@@ -422,10 +409,7 @@ class InputS3Inventory(BaseModel):
422
409
  r"""Select or create a stored secret that references your access key and secret key"""
423
410
 
424
411
  tag_after_processing: Annotated[
425
- Annotated[
426
- Optional[InputS3InventoryTagAfterProcessing],
427
- PlainValidator(validate_open_enum(False)),
428
- ],
412
+ Optional[InputS3InventoryTagAfterProcessing],
429
413
  pydantic.Field(alias="tagAfterProcessing"),
430
414
  ] = None
431
415
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSecurityLakeType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSecurityLakeType(str, Enum):
15
12
  SECURITY_LAKE = "security_lake"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSecurityLakeConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSecurityLakeMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSecurityLakeMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSecurityLakeCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSecurityLakeCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSecurityLakePqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSecurityLakePq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSecurityLakeMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSecurityLakeMode.ALWAYS
58
+ mode: Optional[InputSecurityLakeMode] = InputSecurityLakeMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,14 +79,11 @@ class InputSecurityLakePq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSecurityLakeCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputSecurityLakeCompression.NONE
82
+ compress: Optional[InputSecurityLakeCompression] = InputSecurityLakeCompression.NONE
91
83
  r"""Codec to use to compress the persisted data"""
92
84
 
93
85
 
94
- class InputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputSecurityLakeAuthenticationMethod(str, Enum):
95
87
  r"""AWS authentication method. Choose Auto to use IAM roles."""
96
88
 
97
89
  AUTO = "auto"
@@ -99,7 +91,7 @@ class InputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumM
99
91
  SECRET = "secret"
100
92
 
101
93
 
102
- class InputSecurityLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ class InputSecurityLakeSignatureVersion(str, Enum):
103
95
  r"""Signature version to use for signing S3 requests"""
104
96
 
105
97
  V2 = "v2"
@@ -152,7 +144,7 @@ class InputSecurityLakeCheckpointing(BaseModel):
152
144
  r"""The number of times to retry processing when a processing error occurs. If Skip file on error is enabled, this setting is ignored."""
153
145
 
154
146
 
155
- class InputSecurityLakeTagAfterProcessing(str, Enum, metaclass=utils.OpenEnumMeta):
147
+ class InputSecurityLakeTagAfterProcessing(str, Enum):
156
148
  FALSE = "false"
157
149
  TRUE = "true"
158
150
 
@@ -242,7 +234,7 @@ class InputSecurityLakeTypedDict(TypedDict):
242
234
 
243
235
 
244
236
  class InputSecurityLake(BaseModel):
245
- type: Annotated[InputSecurityLakeType, PlainValidator(validate_open_enum(False))]
237
+ type: InputSecurityLakeType
246
238
 
247
239
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
248
240
  r"""The name, URL, or ARN of the SQS queue to read notifications from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
@@ -283,10 +275,7 @@ class InputSecurityLake(BaseModel):
283
275
  r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
284
276
 
285
277
  aws_authentication_method: Annotated[
286
- Annotated[
287
- Optional[InputSecurityLakeAuthenticationMethod],
288
- PlainValidator(validate_open_enum(False)),
289
- ],
278
+ Optional[InputSecurityLakeAuthenticationMethod],
290
279
  pydantic.Field(alias="awsAuthenticationMethod"),
291
280
  ] = InputSecurityLakeAuthenticationMethod.AUTO
292
281
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -302,10 +291,7 @@ class InputSecurityLake(BaseModel):
302
291
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
303
292
 
304
293
  signature_version: Annotated[
305
- Annotated[
306
- Optional[InputSecurityLakeSignatureVersion],
307
- PlainValidator(validate_open_enum(False)),
308
- ],
294
+ Optional[InputSecurityLakeSignatureVersion],
309
295
  pydantic.Field(alias="signatureVersion"),
310
296
  ] = InputSecurityLakeSignatureVersion.V4
311
297
  r"""Signature version to use for signing S3 requests"""
@@ -407,10 +393,7 @@ class InputSecurityLake(BaseModel):
407
393
  r"""Select or create a stored secret that references your access key and secret key"""
408
394
 
409
395
  tag_after_processing: Annotated[
410
- Annotated[
411
- Optional[InputSecurityLakeTagAfterProcessing],
412
- PlainValidator(validate_open_enum(False)),
413
- ],
396
+ Optional[InputSecurityLakeTagAfterProcessing],
414
397
  pydantic.Field(alias="tagAfterProcessing"),
415
398
  ] = None
416
399
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSnmpType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSnmpType(str, Enum):
15
12
  SNMP = "snmp"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSnmpConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSnmpMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSnmpMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSnmpCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSnmpCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSnmpPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSnmpPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSnmpMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSnmpMode.ALWAYS
58
+ mode: Optional[InputSnmpMode] = InputSnmpMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputSnmpPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSnmpCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputSnmpCompression.NONE
82
+ compress: Optional[InputSnmpCompression] = InputSnmpCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class AuthenticationProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class AuthenticationProtocol(str, Enum):
94
87
  NONE = "none"
95
88
  MD5 = "md5"
96
89
  SHA = "sha"
@@ -111,10 +104,7 @@ class V3User(BaseModel):
111
104
  name: str
112
105
 
113
106
  auth_protocol: Annotated[
114
- Annotated[
115
- Optional[AuthenticationProtocol], PlainValidator(validate_open_enum(False))
116
- ],
117
- pydantic.Field(alias="authProtocol"),
107
+ Optional[AuthenticationProtocol], pydantic.Field(alias="authProtocol")
118
108
  ] = AuthenticationProtocol.NONE
119
109
 
120
110
  auth_key: Annotated[Optional[Any], pydantic.Field(alias="authKey")] = None
@@ -206,9 +196,7 @@ class InputSnmp(BaseModel):
206
196
  id: Optional[str] = None
207
197
  r"""Unique ID for this input"""
208
198
 
209
- type: Annotated[
210
- Optional[InputSnmpType], PlainValidator(validate_open_enum(False))
211
- ] = None
199
+ type: Optional[InputSnmpType] = None
212
200
 
213
201
  disabled: Optional[bool] = False
214
202
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputSplunkType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputSplunkType(str, Enum):
15
12
  SPLUNK = "splunk"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputSplunkConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputSplunkMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputSplunkMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputSplunkPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputSplunkPqCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputSplunkPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputSplunkPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputSplunkMode], PlainValidator(validate_open_enum(False))
63
- ] = InputSplunkMode.ALWAYS
58
+ mode: Optional[InputSplunkMode] = InputSplunkMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,20 +79,18 @@ class InputSplunkPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputSplunkPqCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputSplunkPqCompression.NONE
82
+ compress: Optional[InputSplunkPqCompression] = InputSplunkPqCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputSplunkMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputSplunkMinimumTLSVersion(str, Enum):
94
87
  TL_SV1 = "TLSv1"
95
88
  TL_SV1_1 = "TLSv1.1"
96
89
  TL_SV1_2 = "TLSv1.2"
97
90
  TL_SV1_3 = "TLSv1.3"
98
91
 
99
92
 
100
- class InputSplunkMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class InputSplunkMaximumTLSVersion(str, Enum):
101
94
  TL_SV1 = "TLSv1"
102
95
  TL_SV1_1 = "TLSv1.1"
103
96
  TL_SV1_2 = "TLSv1.2"
@@ -156,19 +149,11 @@ class InputSplunkTLSSettingsServerSide(BaseModel):
156
149
  ] = None
157
150
 
158
151
  min_version: Annotated[
159
- Annotated[
160
- Optional[InputSplunkMinimumTLSVersion],
161
- PlainValidator(validate_open_enum(False)),
162
- ],
163
- pydantic.Field(alias="minVersion"),
152
+ Optional[InputSplunkMinimumTLSVersion], pydantic.Field(alias="minVersion")
164
153
  ] = None
165
154
 
166
155
  max_version: Annotated[
167
- Annotated[
168
- Optional[InputSplunkMaximumTLSVersion],
169
- PlainValidator(validate_open_enum(False)),
170
- ],
171
- pydantic.Field(alias="maxVersion"),
156
+ Optional[InputSplunkMaximumTLSVersion], pydantic.Field(alias="maxVersion")
172
157
  ] = None
173
158
 
174
159
 
@@ -198,14 +183,14 @@ class InputSplunkAuthToken(BaseModel):
198
183
  description: Optional[str] = None
199
184
 
200
185
 
201
- class InputSplunkMaxS2SVersion(str, Enum, metaclass=utils.OpenEnumMeta):
186
+ class InputSplunkMaxS2SVersion(str, Enum):
202
187
  r"""The highest S2S protocol version to advertise during handshake"""
203
188
 
204
189
  V3 = "v3"
205
190
  V4 = "v4"
206
191
 
207
192
 
208
- class InputSplunkCompression(str, Enum, metaclass=utils.OpenEnumMeta):
193
+ class InputSplunkCompression(str, Enum):
209
194
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""
210
195
 
211
196
  DISABLED = "disabled"
@@ -276,9 +261,7 @@ class InputSplunk(BaseModel):
276
261
  id: Optional[str] = None
277
262
  r"""Unique ID for this input"""
278
263
 
279
- type: Annotated[
280
- Optional[InputSplunkType], PlainValidator(validate_open_enum(False))
281
- ] = None
264
+ type: Optional[InputSplunkType] = None
282
265
 
283
266
  disabled: Optional[bool] = False
284
267
 
@@ -358,11 +341,7 @@ class InputSplunk(BaseModel):
358
341
  r"""Shared secrets to be provided by any Splunk forwarder. If empty, unauthorized access is permitted."""
359
342
 
360
343
  max_s2_sversion: Annotated[
361
- Annotated[
362
- Optional[InputSplunkMaxS2SVersion],
363
- PlainValidator(validate_open_enum(False)),
364
- ],
365
- pydantic.Field(alias="maxS2Sversion"),
344
+ Optional[InputSplunkMaxS2SVersion], pydantic.Field(alias="maxS2Sversion")
366
345
  ] = InputSplunkMaxS2SVersion.V3
367
346
  r"""The highest S2S protocol version to advertise during handshake"""
368
347
 
@@ -383,7 +362,5 @@ class InputSplunk(BaseModel):
383
362
  ] = False
384
363
  r"""Extract and process Splunk-generated metrics as Cribl metrics"""
385
364
 
386
- compress: Annotated[
387
- Optional[InputSplunkCompression], PlainValidator(validate_open_enum(False))
388
- ] = InputSplunkCompression.DISABLED
365
+ compress: Optional[InputSplunkCompression] = InputSplunkCompression.DISABLED
389
366
  r"""Controls whether to support reading compressed data from a forwarder. Select 'Automatic' to match the forwarder's configuration, or 'Disabled' to reject compressed connections."""