cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (144) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/{outputs.py → destinations.py} +69 -71
  3. cribl_control_plane/errors/healthstatus_error.py +2 -8
  4. cribl_control_plane/models/__init__.py +5347 -115
  5. cribl_control_plane/models/createinputop.py +18216 -2
  6. cribl_control_plane/models/createoutputop.py +18417 -4
  7. cribl_control_plane/models/createoutputtestbyidop.py +2 -2
  8. cribl_control_plane/models/deleteoutputbyidop.py +2 -2
  9. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  10. cribl_control_plane/models/getoutputbyidop.py +2 -2
  11. cribl_control_plane/models/getoutputpqbyidop.py +2 -2
  12. cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
  13. cribl_control_plane/models/healthstatus.py +4 -7
  14. cribl_control_plane/models/inputappscope.py +16 -36
  15. cribl_control_plane/models/inputazureblob.py +8 -19
  16. cribl_control_plane/models/inputcollection.py +6 -15
  17. cribl_control_plane/models/inputconfluentcloud.py +22 -45
  18. cribl_control_plane/models/inputcribl.py +6 -13
  19. cribl_control_plane/models/inputcriblhttp.py +12 -27
  20. cribl_control_plane/models/inputcribllakehttp.py +14 -26
  21. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  22. cribl_control_plane/models/inputcribltcp.py +12 -27
  23. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  24. cribl_control_plane/models/inputdatadogagent.py +12 -28
  25. cribl_control_plane/models/inputdatagen.py +6 -13
  26. cribl_control_plane/models/inputedgeprometheus.py +33 -64
  27. cribl_control_plane/models/inputelastic.py +18 -44
  28. cribl_control_plane/models/inputeventhub.py +10 -19
  29. cribl_control_plane/models/inputexec.py +8 -16
  30. cribl_control_plane/models/inputfile.py +8 -17
  31. cribl_control_plane/models/inputfirehose.py +12 -27
  32. cribl_control_plane/models/inputgooglepubsub.py +10 -23
  33. cribl_control_plane/models/inputgrafana_union.py +39 -81
  34. cribl_control_plane/models/inputhttp.py +12 -27
  35. cribl_control_plane/models/inputhttpraw.py +12 -27
  36. cribl_control_plane/models/inputjournalfiles.py +8 -16
  37. cribl_control_plane/models/inputkafka.py +18 -45
  38. cribl_control_plane/models/inputkinesis.py +18 -42
  39. cribl_control_plane/models/inputkubeevents.py +6 -13
  40. cribl_control_plane/models/inputkubelogs.py +10 -18
  41. cribl_control_plane/models/inputkubemetrics.py +10 -18
  42. cribl_control_plane/models/inputloki.py +14 -33
  43. cribl_control_plane/models/inputmetrics.py +10 -25
  44. cribl_control_plane/models/inputmodeldriventelemetry.py +14 -33
  45. cribl_control_plane/models/inputmsk.py +20 -52
  46. cribl_control_plane/models/inputnetflow.py +8 -15
  47. cribl_control_plane/models/inputoffice365mgmt.py +18 -37
  48. cribl_control_plane/models/inputoffice365msgtrace.py +20 -41
  49. cribl_control_plane/models/inputoffice365service.py +20 -41
  50. cribl_control_plane/models/inputopentelemetry.py +20 -42
  51. cribl_control_plane/models/inputprometheus.py +22 -54
  52. cribl_control_plane/models/inputprometheusrw.py +14 -34
  53. cribl_control_plane/models/inputrawudp.py +8 -15
  54. cribl_control_plane/models/inputs3.py +10 -23
  55. cribl_control_plane/models/inputs3inventory.py +12 -28
  56. cribl_control_plane/models/inputsecuritylake.py +12 -29
  57. cribl_control_plane/models/inputsnmp.py +10 -20
  58. cribl_control_plane/models/inputsplunk.py +16 -37
  59. cribl_control_plane/models/inputsplunkhec.py +14 -33
  60. cribl_control_plane/models/inputsplunksearch.py +18 -37
  61. cribl_control_plane/models/inputsqs.py +14 -31
  62. cribl_control_plane/models/inputsyslog_union.py +29 -53
  63. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  64. cribl_control_plane/models/inputsystemstate.py +10 -18
  65. cribl_control_plane/models/inputtcp.py +14 -33
  66. cribl_control_plane/models/inputtcpjson.py +14 -33
  67. cribl_control_plane/models/inputwef.py +22 -45
  68. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  69. cribl_control_plane/models/inputwineventlogs.py +12 -22
  70. cribl_control_plane/models/inputwiz.py +12 -25
  71. cribl_control_plane/models/inputzscalerhec.py +14 -33
  72. cribl_control_plane/models/listoutputop.py +2 -2
  73. cribl_control_plane/models/output.py +3 -6
  74. cribl_control_plane/models/outputazureblob.py +20 -52
  75. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  76. cribl_control_plane/models/outputazureeventhub.py +20 -44
  77. cribl_control_plane/models/outputazurelogs.py +14 -37
  78. cribl_control_plane/models/outputclickhouse.py +22 -59
  79. cribl_control_plane/models/outputcloudwatch.py +12 -33
  80. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  81. cribl_control_plane/models/outputcriblhttp.py +18 -46
  82. cribl_control_plane/models/outputcribllake.py +18 -48
  83. cribl_control_plane/models/outputcribltcp.py +20 -47
  84. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  85. cribl_control_plane/models/outputdatadog.py +22 -50
  86. cribl_control_plane/models/outputdataset.py +20 -48
  87. cribl_control_plane/models/outputdefault.py +2 -5
  88. cribl_control_plane/models/outputdevnull.py +2 -5
  89. cribl_control_plane/models/outputdiskspool.py +4 -9
  90. cribl_control_plane/models/outputdls3.py +26 -72
  91. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  92. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  93. cribl_control_plane/models/outputelastic.py +20 -45
  94. cribl_control_plane/models/outputelasticcloud.py +14 -40
  95. cribl_control_plane/models/outputexabeam.py +12 -33
  96. cribl_control_plane/models/outputfilesystem.py +16 -41
  97. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  98. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  99. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  100. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  101. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  102. cribl_control_plane/models/outputgraphite.py +16 -35
  103. cribl_control_plane/models/outputhoneycomb.py +14 -37
  104. cribl_control_plane/models/outputhumiohec.py +18 -47
  105. cribl_control_plane/models/outputinfluxdb.py +18 -44
  106. cribl_control_plane/models/outputkafka.py +28 -73
  107. cribl_control_plane/models/outputkinesis.py +18 -44
  108. cribl_control_plane/models/outputloki.py +18 -43
  109. cribl_control_plane/models/outputminio.py +26 -69
  110. cribl_control_plane/models/outputmsk.py +30 -81
  111. cribl_control_plane/models/outputnetflow.py +2 -5
  112. cribl_control_plane/models/outputnewrelic.py +20 -45
  113. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  114. cribl_control_plane/models/outputopentelemetry.py +28 -69
  115. cribl_control_plane/models/outputprometheus.py +14 -37
  116. cribl_control_plane/models/outputring.py +10 -21
  117. cribl_control_plane/models/outputrouter.py +2 -5
  118. cribl_control_plane/models/outputs3.py +28 -72
  119. cribl_control_plane/models/outputsecuritylake.py +20 -56
  120. cribl_control_plane/models/outputsentinel.py +20 -49
  121. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  122. cribl_control_plane/models/outputservicenow.py +26 -64
  123. cribl_control_plane/models/outputsignalfx.py +16 -39
  124. cribl_control_plane/models/outputsnmp.py +2 -5
  125. cribl_control_plane/models/outputsns.py +16 -40
  126. cribl_control_plane/models/outputsplunk.py +26 -64
  127. cribl_control_plane/models/outputsplunkhec.py +14 -37
  128. cribl_control_plane/models/outputsplunklb.py +36 -83
  129. cribl_control_plane/models/outputsqs.py +18 -45
  130. cribl_control_plane/models/outputstatsd.py +16 -34
  131. cribl_control_plane/models/outputstatsdext.py +14 -33
  132. cribl_control_plane/models/outputsumologic.py +14 -37
  133. cribl_control_plane/models/outputsyslog.py +26 -60
  134. cribl_control_plane/models/outputtcpjson.py +22 -54
  135. cribl_control_plane/models/outputwavefront.py +14 -37
  136. cribl_control_plane/models/outputwebhook.py +24 -60
  137. cribl_control_plane/models/outputxsiam.py +16 -37
  138. cribl_control_plane/models/updateoutputbyidop.py +4 -4
  139. cribl_control_plane/sdk.py +3 -5
  140. cribl_control_plane/sources.py +8 -10
  141. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +13 -13
  142. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  143. cribl_control_plane-0.0.15.dist-info/RECORD +0 -215
  144. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputKafkaType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputKafkaType(str, Enum):
15
12
  KAFKA = "kafka"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputKafkaConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKafkaMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKafkaCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputKafkaPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputKafkaPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputKafkaMode], PlainValidator(validate_open_enum(False))
63
- ] = InputKafkaMode.ALWAYS
58
+ mode: Optional[InputKafkaMode] = InputKafkaMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputKafkaPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputKafkaCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputKafkaCompression.NONE
82
+ compress: Optional[InputKafkaCompression] = InputKafkaCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -109,18 +102,14 @@ class InputKafkaAuth(BaseModel):
109
102
  r"""Select or create a secret that references your credentials"""
110
103
 
111
104
 
112
- class InputKafkaKafkaSchemaRegistryMinimumTLSVersion(
113
- str, Enum, metaclass=utils.OpenEnumMeta
114
- ):
105
+ class InputKafkaKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
115
106
  TL_SV1 = "TLSv1"
116
107
  TL_SV1_1 = "TLSv1.1"
117
108
  TL_SV1_2 = "TLSv1.2"
118
109
  TL_SV1_3 = "TLSv1.3"
119
110
 
120
111
 
121
- class InputKafkaKafkaSchemaRegistryMaximumTLSVersion(
122
- str, Enum, metaclass=utils.OpenEnumMeta
123
- ):
112
+ class InputKafkaKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
124
113
  TL_SV1 = "TLSv1"
125
114
  TL_SV1_1 = "TLSv1.1"
126
115
  TL_SV1_2 = "TLSv1.2"
@@ -180,18 +169,12 @@ class InputKafkaKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
180
169
  r"""Passphrase to use to decrypt private key"""
181
170
 
182
171
  min_version: Annotated[
183
- Annotated[
184
- Optional[InputKafkaKafkaSchemaRegistryMinimumTLSVersion],
185
- PlainValidator(validate_open_enum(False)),
186
- ],
172
+ Optional[InputKafkaKafkaSchemaRegistryMinimumTLSVersion],
187
173
  pydantic.Field(alias="minVersion"),
188
174
  ] = None
189
175
 
190
176
  max_version: Annotated[
191
- Annotated[
192
- Optional[InputKafkaKafkaSchemaRegistryMaximumTLSVersion],
193
- PlainValidator(validate_open_enum(False)),
194
- ],
177
+ Optional[InputKafkaKafkaSchemaRegistryMaximumTLSVersion],
195
178
  pydantic.Field(alias="maxVersion"),
196
179
  ] = None
197
180
 
@@ -238,7 +221,7 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
238
221
  tls: Optional[InputKafkaKafkaSchemaRegistryTLSSettingsClientSide] = None
239
222
 
240
223
 
241
- class InputKafkaSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
224
+ class InputKafkaSASLMechanism(str, Enum):
242
225
  PLAIN = "plain"
243
226
  SCRAM_SHA_256 = "scram-sha-256"
244
227
  SCRAM_SHA_512 = "scram-sha-512"
@@ -257,19 +240,17 @@ class InputKafkaAuthentication(BaseModel):
257
240
 
258
241
  disabled: Optional[bool] = True
259
242
 
260
- mechanism: Annotated[
261
- Optional[InputKafkaSASLMechanism], PlainValidator(validate_open_enum(False))
262
- ] = InputKafkaSASLMechanism.PLAIN
243
+ mechanism: Optional[InputKafkaSASLMechanism] = InputKafkaSASLMechanism.PLAIN
263
244
 
264
245
 
265
- class InputKafkaMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
246
+ class InputKafkaMinimumTLSVersion(str, Enum):
266
247
  TL_SV1 = "TLSv1"
267
248
  TL_SV1_1 = "TLSv1.1"
268
249
  TL_SV1_2 = "TLSv1.2"
269
250
  TL_SV1_3 = "TLSv1.3"
270
251
 
271
252
 
272
- class InputKafkaMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
253
+ class InputKafkaMaximumTLSVersion(str, Enum):
273
254
  TL_SV1 = "TLSv1"
274
255
  TL_SV1_1 = "TLSv1.1"
275
256
  TL_SV1_2 = "TLSv1.2"
@@ -329,19 +310,11 @@ class InputKafkaTLSSettingsClientSide(BaseModel):
329
310
  r"""Passphrase to use to decrypt private key"""
330
311
 
331
312
  min_version: Annotated[
332
- Annotated[
333
- Optional[InputKafkaMinimumTLSVersion],
334
- PlainValidator(validate_open_enum(False)),
335
- ],
336
- pydantic.Field(alias="minVersion"),
313
+ Optional[InputKafkaMinimumTLSVersion], pydantic.Field(alias="minVersion")
337
314
  ] = None
338
315
 
339
316
  max_version: Annotated[
340
- Annotated[
341
- Optional[InputKafkaMaximumTLSVersion],
342
- PlainValidator(validate_open_enum(False)),
343
- ],
344
- pydantic.Field(alias="maxVersion"),
317
+ Optional[InputKafkaMaximumTLSVersion], pydantic.Field(alias="maxVersion")
345
318
  ] = None
346
319
 
347
320
 
@@ -359,13 +332,13 @@ class InputKafkaMetadatum(BaseModel):
359
332
 
360
333
 
361
334
  class InputKafkaTypedDict(TypedDict):
362
- type: InputKafkaType
363
335
  brokers: List[str]
364
336
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
365
337
  topics: List[str]
366
338
  r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
367
339
  id: NotRequired[str]
368
340
  r"""Unique ID for this input"""
341
+ type: NotRequired[InputKafkaType]
369
342
  disabled: NotRequired[bool]
370
343
  pipeline: NotRequired[str]
371
344
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -439,8 +412,6 @@ class InputKafkaTypedDict(TypedDict):
439
412
 
440
413
 
441
414
  class InputKafka(BaseModel):
442
- type: Annotated[InputKafkaType, PlainValidator(validate_open_enum(False))]
443
-
444
415
  brokers: List[str]
445
416
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
446
417
 
@@ -450,6 +421,8 @@ class InputKafka(BaseModel):
450
421
  id: Optional[str] = None
451
422
  r"""Unique ID for this input"""
452
423
 
424
+ type: Optional[InputKafkaType] = None
425
+
453
426
  disabled: Optional[bool] = False
454
427
 
455
428
  pipeline: Optional[str] = None
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputKinesisType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputKinesisType(str, Enum):
15
12
  KINESIS = "kinesis"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputKinesisConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKinesisMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKinesisMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKinesisCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKinesisCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputKinesisPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputKinesisPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputKinesisMode], PlainValidator(validate_open_enum(False))
63
- ] = InputKinesisMode.ALWAYS
58
+ mode: Optional[InputKinesisMode] = InputKinesisMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,20 +79,18 @@ class InputKinesisPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputKinesisCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputKinesisCompression.NONE
82
+ compress: Optional[InputKinesisCompression] = InputKinesisCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class ShardIteratorStart(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class ShardIteratorStart(str, Enum):
94
87
  r"""Location at which to start reading a shard for the first time"""
95
88
 
96
89
  TRIM_HORIZON = "TRIM_HORIZON"
97
90
  LATEST = "LATEST"
98
91
 
99
92
 
100
- class InputKinesisRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class InputKinesisRecordDataFormat(str, Enum):
101
94
  r"""Format of data inside the Kinesis Stream records. Gzip compression is automatically detected."""
102
95
 
103
96
  CRIBL = "cribl"
@@ -106,14 +99,14 @@ class InputKinesisRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
106
99
  LINE = "line"
107
100
 
108
101
 
109
- class ShardLoadBalancing(str, Enum, metaclass=utils.OpenEnumMeta):
102
+ class ShardLoadBalancing(str, Enum):
110
103
  r"""The load-balancing algorithm to use for spreading out shards across Workers and Worker Processes"""
111
104
 
112
105
  CONSISTENT_HASHING = "ConsistentHashing"
113
106
  ROUND_ROBIN = "RoundRobin"
114
107
 
115
108
 
116
- class InputKinesisAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
109
+ class InputKinesisAuthenticationMethod(str, Enum):
117
110
  r"""AWS authentication method. Choose Auto to use IAM roles."""
118
111
 
119
112
  AUTO = "auto"
@@ -121,7 +114,7 @@ class InputKinesisAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
121
114
  SECRET = "secret"
122
115
 
123
116
 
124
- class InputKinesisSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
117
+ class InputKinesisSignatureVersion(str, Enum):
125
118
  r"""Signature version to use for signing Kinesis stream requests"""
126
119
 
127
120
  V2 = "v2"
@@ -142,13 +135,13 @@ class InputKinesisMetadatum(BaseModel):
142
135
 
143
136
 
144
137
  class InputKinesisTypedDict(TypedDict):
145
- type: InputKinesisType
146
138
  stream_name: str
147
139
  r"""Kinesis Data Stream to read data from"""
148
140
  region: str
149
141
  r"""Region where the Kinesis stream is located"""
150
142
  id: NotRequired[str]
151
143
  r"""Unique ID for this input"""
144
+ type: NotRequired[InputKinesisType]
152
145
  disabled: NotRequired[bool]
153
146
  pipeline: NotRequired[str]
154
147
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -209,8 +202,6 @@ class InputKinesisTypedDict(TypedDict):
209
202
 
210
203
 
211
204
  class InputKinesis(BaseModel):
212
- type: Annotated[InputKinesisType, PlainValidator(validate_open_enum(False))]
213
-
214
205
  stream_name: Annotated[str, pydantic.Field(alias="streamName")]
215
206
  r"""Kinesis Data Stream to read data from"""
216
207
 
@@ -220,6 +211,8 @@ class InputKinesis(BaseModel):
220
211
  id: Optional[str] = None
221
212
  r"""Unique ID for this input"""
222
213
 
214
+ type: Optional[InputKinesisType] = None
215
+
223
216
  disabled: Optional[bool] = False
224
217
 
225
218
  pipeline: Optional[str] = None
@@ -253,19 +246,12 @@ class InputKinesis(BaseModel):
253
246
  r"""A JavaScript expression to be called with each shardId for the stream. If the expression evaluates to a truthy value, the shard will be processed."""
254
247
 
255
248
  shard_iterator_type: Annotated[
256
- Annotated[
257
- Optional[ShardIteratorStart], PlainValidator(validate_open_enum(False))
258
- ],
259
- pydantic.Field(alias="shardIteratorType"),
249
+ Optional[ShardIteratorStart], pydantic.Field(alias="shardIteratorType")
260
250
  ] = ShardIteratorStart.TRIM_HORIZON
261
251
  r"""Location at which to start reading a shard for the first time"""
262
252
 
263
253
  payload_format: Annotated[
264
- Annotated[
265
- Optional[InputKinesisRecordDataFormat],
266
- PlainValidator(validate_open_enum(False)),
267
- ],
268
- pydantic.Field(alias="payloadFormat"),
254
+ Optional[InputKinesisRecordDataFormat], pydantic.Field(alias="payloadFormat")
269
255
  ] = InputKinesisRecordDataFormat.CRIBL
270
256
  r"""Format of data inside the Kinesis Stream records. Gzip compression is automatically detected."""
271
257
 
@@ -280,18 +266,12 @@ class InputKinesis(BaseModel):
280
266
  r"""Maximum number of records, across all shards, to pull down at once per Worker Process"""
281
267
 
282
268
  load_balancing_algorithm: Annotated[
283
- Annotated[
284
- Optional[ShardLoadBalancing], PlainValidator(validate_open_enum(False))
285
- ],
286
- pydantic.Field(alias="loadBalancingAlgorithm"),
269
+ Optional[ShardLoadBalancing], pydantic.Field(alias="loadBalancingAlgorithm")
287
270
  ] = ShardLoadBalancing.CONSISTENT_HASHING
288
271
  r"""The load-balancing algorithm to use for spreading out shards across Workers and Worker Processes"""
289
272
 
290
273
  aws_authentication_method: Annotated[
291
- Annotated[
292
- Optional[InputKinesisAuthenticationMethod],
293
- PlainValidator(validate_open_enum(False)),
294
- ],
274
+ Optional[InputKinesisAuthenticationMethod],
295
275
  pydantic.Field(alias="awsAuthenticationMethod"),
296
276
  ] = InputKinesisAuthenticationMethod.AUTO
297
277
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -304,11 +284,7 @@ class InputKinesis(BaseModel):
304
284
  r"""Kinesis stream service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to Kinesis stream-compatible endpoint."""
305
285
 
306
286
  signature_version: Annotated[
307
- Annotated[
308
- Optional[InputKinesisSignatureVersion],
309
- PlainValidator(validate_open_enum(False)),
310
- ],
311
- pydantic.Field(alias="signatureVersion"),
287
+ Optional[InputKinesisSignatureVersion], pydantic.Field(alias="signatureVersion")
312
288
  ] = InputKinesisSignatureVersion.V4
313
289
  r"""Signature version to use for signing Kinesis stream requests"""
314
290
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputKubeEventsType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputKubeEventsType(str, Enum):
15
12
  KUBE_EVENTS = "kube_events"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputKubeEventsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKubeEventsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKubeEventsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKubeEventsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKubeEventsCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputKubeEventsPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputKubeEventsPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputKubeEventsMode], PlainValidator(validate_open_enum(False))
63
- ] = InputKubeEventsMode.ALWAYS
58
+ mode: Optional[InputKubeEventsMode] = InputKubeEventsMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputKubeEventsPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputKubeEventsCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputKubeEventsCompression.NONE
82
+ compress: Optional[InputKubeEventsCompression] = InputKubeEventsCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -147,7 +140,7 @@ class InputKubeEvents(BaseModel):
147
140
  id: str
148
141
  r"""Unique ID for this input"""
149
142
 
150
- type: Annotated[InputKubeEventsType, PlainValidator(validate_open_enum(False))]
143
+ type: InputKubeEventsType
151
144
 
152
145
  disabled: Optional[bool] = False
153
146
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputKubeLogsType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputKubeLogsType(str, Enum):
15
12
  KUBE_LOGS = "kube_logs"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputKubeLogsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKubeLogsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKubeLogsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKubeLogsPqCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKubeLogsPqCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputKubeLogsPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputKubeLogsPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputKubeLogsMode], PlainValidator(validate_open_enum(False))
63
- ] = InputKubeLogsMode.ALWAYS
58
+ mode: Optional[InputKubeLogsMode] = InputKubeLogsMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputKubeLogsPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputKubeLogsPqCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputKubeLogsPqCompression.NONE
82
+ compress: Optional[InputKubeLogsPqCompression] = InputKubeLogsPqCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -118,7 +111,7 @@ class InputKubeLogsMetadatum(BaseModel):
118
111
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
119
112
 
120
113
 
121
- class InputKubeLogsPersistenceCompression(str, Enum, metaclass=utils.OpenEnumMeta):
114
+ class InputKubeLogsPersistenceCompression(str, Enum):
122
115
  r"""Data compression format. Default is gzip."""
123
116
 
124
117
  NONE = "none"
@@ -151,10 +144,9 @@ class InputKubeLogsDiskSpooling(BaseModel):
151
144
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
152
145
  r"""Maximum amount of time to retain data before older buckets are deleted. Examples: 2h, 4d. Default is 24h."""
153
146
 
154
- compress: Annotated[
155
- Optional[InputKubeLogsPersistenceCompression],
156
- PlainValidator(validate_open_enum(False)),
157
- ] = InputKubeLogsPersistenceCompression.GZIP
147
+ compress: Optional[InputKubeLogsPersistenceCompression] = (
148
+ InputKubeLogsPersistenceCompression.GZIP
149
+ )
158
150
  r"""Data compression format. Default is gzip."""
159
151
 
160
152
 
@@ -198,7 +190,7 @@ class InputKubeLogs(BaseModel):
198
190
  id: str
199
191
  r"""Unique ID for this input"""
200
192
 
201
- type: Annotated[InputKubeLogsType, PlainValidator(validate_open_enum(False))]
193
+ type: InputKubeLogsType
202
194
 
203
195
  disabled: Optional[bool] = False
204
196
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputKubeMetricsType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputKubeMetricsType(str, Enum):
15
12
  KUBE_METRICS = "kube_metrics"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputKubeMetricsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputKubeMetricsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputKubeMetricsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputKubeMetricsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputKubeMetricsCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputKubeMetricsPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputKubeMetricsPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputKubeMetricsMode], PlainValidator(validate_open_enum(False))
63
- ] = InputKubeMetricsMode.ALWAYS
58
+ mode: Optional[InputKubeMetricsMode] = InputKubeMetricsMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputKubeMetricsPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputKubeMetricsCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputKubeMetricsCompression.NONE
82
+ compress: Optional[InputKubeMetricsCompression] = InputKubeMetricsCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -118,7 +111,7 @@ class InputKubeMetricsMetadatum(BaseModel):
118
111
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
119
112
 
120
113
 
121
- class InputKubeMetricsDataCompressionFormat(str, Enum, metaclass=utils.OpenEnumMeta):
114
+ class InputKubeMetricsDataCompressionFormat(str, Enum):
122
115
  NONE = "none"
123
116
  GZIP = "gzip"
124
117
 
@@ -150,10 +143,9 @@ class InputKubeMetricsPersistence(BaseModel):
150
143
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
151
144
  r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
152
145
 
153
- compress: Annotated[
154
- Optional[InputKubeMetricsDataCompressionFormat],
155
- PlainValidator(validate_open_enum(False)),
156
- ] = InputKubeMetricsDataCompressionFormat.GZIP
146
+ compress: Optional[InputKubeMetricsDataCompressionFormat] = (
147
+ InputKubeMetricsDataCompressionFormat.GZIP
148
+ )
157
149
 
158
150
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
159
151
  "$CRIBL_HOME/state/kube_metrics"
@@ -193,7 +185,7 @@ class InputKubeMetrics(BaseModel):
193
185
  id: str
194
186
  r"""Unique ID for this input"""
195
187
 
196
- type: Annotated[InputKubeMetricsType, PlainValidator(validate_open_enum(False))]
188
+ type: InputKubeMetricsType
197
189
 
198
190
  disabled: Optional[bool] = False
199
191