cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (144) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/{outputs.py → destinations.py} +69 -71
  3. cribl_control_plane/errors/healthstatus_error.py +2 -8
  4. cribl_control_plane/models/__init__.py +5347 -115
  5. cribl_control_plane/models/createinputop.py +18216 -2
  6. cribl_control_plane/models/createoutputop.py +18417 -4
  7. cribl_control_plane/models/createoutputtestbyidop.py +2 -2
  8. cribl_control_plane/models/deleteoutputbyidop.py +2 -2
  9. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  10. cribl_control_plane/models/getoutputbyidop.py +2 -2
  11. cribl_control_plane/models/getoutputpqbyidop.py +2 -2
  12. cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
  13. cribl_control_plane/models/healthstatus.py +4 -7
  14. cribl_control_plane/models/inputappscope.py +16 -36
  15. cribl_control_plane/models/inputazureblob.py +8 -19
  16. cribl_control_plane/models/inputcollection.py +6 -15
  17. cribl_control_plane/models/inputconfluentcloud.py +22 -45
  18. cribl_control_plane/models/inputcribl.py +6 -13
  19. cribl_control_plane/models/inputcriblhttp.py +12 -27
  20. cribl_control_plane/models/inputcribllakehttp.py +14 -26
  21. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  22. cribl_control_plane/models/inputcribltcp.py +12 -27
  23. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  24. cribl_control_plane/models/inputdatadogagent.py +12 -28
  25. cribl_control_plane/models/inputdatagen.py +6 -13
  26. cribl_control_plane/models/inputedgeprometheus.py +33 -64
  27. cribl_control_plane/models/inputelastic.py +18 -44
  28. cribl_control_plane/models/inputeventhub.py +10 -19
  29. cribl_control_plane/models/inputexec.py +8 -16
  30. cribl_control_plane/models/inputfile.py +8 -17
  31. cribl_control_plane/models/inputfirehose.py +12 -27
  32. cribl_control_plane/models/inputgooglepubsub.py +10 -23
  33. cribl_control_plane/models/inputgrafana_union.py +39 -81
  34. cribl_control_plane/models/inputhttp.py +12 -27
  35. cribl_control_plane/models/inputhttpraw.py +12 -27
  36. cribl_control_plane/models/inputjournalfiles.py +8 -16
  37. cribl_control_plane/models/inputkafka.py +18 -45
  38. cribl_control_plane/models/inputkinesis.py +18 -42
  39. cribl_control_plane/models/inputkubeevents.py +6 -13
  40. cribl_control_plane/models/inputkubelogs.py +10 -18
  41. cribl_control_plane/models/inputkubemetrics.py +10 -18
  42. cribl_control_plane/models/inputloki.py +14 -33
  43. cribl_control_plane/models/inputmetrics.py +10 -25
  44. cribl_control_plane/models/inputmodeldriventelemetry.py +14 -33
  45. cribl_control_plane/models/inputmsk.py +20 -52
  46. cribl_control_plane/models/inputnetflow.py +8 -15
  47. cribl_control_plane/models/inputoffice365mgmt.py +18 -37
  48. cribl_control_plane/models/inputoffice365msgtrace.py +20 -41
  49. cribl_control_plane/models/inputoffice365service.py +20 -41
  50. cribl_control_plane/models/inputopentelemetry.py +20 -42
  51. cribl_control_plane/models/inputprometheus.py +22 -54
  52. cribl_control_plane/models/inputprometheusrw.py +14 -34
  53. cribl_control_plane/models/inputrawudp.py +8 -15
  54. cribl_control_plane/models/inputs3.py +10 -23
  55. cribl_control_plane/models/inputs3inventory.py +12 -28
  56. cribl_control_plane/models/inputsecuritylake.py +12 -29
  57. cribl_control_plane/models/inputsnmp.py +10 -20
  58. cribl_control_plane/models/inputsplunk.py +16 -37
  59. cribl_control_plane/models/inputsplunkhec.py +14 -33
  60. cribl_control_plane/models/inputsplunksearch.py +18 -37
  61. cribl_control_plane/models/inputsqs.py +14 -31
  62. cribl_control_plane/models/inputsyslog_union.py +29 -53
  63. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  64. cribl_control_plane/models/inputsystemstate.py +10 -18
  65. cribl_control_plane/models/inputtcp.py +14 -33
  66. cribl_control_plane/models/inputtcpjson.py +14 -33
  67. cribl_control_plane/models/inputwef.py +22 -45
  68. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  69. cribl_control_plane/models/inputwineventlogs.py +12 -22
  70. cribl_control_plane/models/inputwiz.py +12 -25
  71. cribl_control_plane/models/inputzscalerhec.py +14 -33
  72. cribl_control_plane/models/listoutputop.py +2 -2
  73. cribl_control_plane/models/output.py +3 -6
  74. cribl_control_plane/models/outputazureblob.py +20 -52
  75. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  76. cribl_control_plane/models/outputazureeventhub.py +20 -44
  77. cribl_control_plane/models/outputazurelogs.py +14 -37
  78. cribl_control_plane/models/outputclickhouse.py +22 -59
  79. cribl_control_plane/models/outputcloudwatch.py +12 -33
  80. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  81. cribl_control_plane/models/outputcriblhttp.py +18 -46
  82. cribl_control_plane/models/outputcribllake.py +18 -48
  83. cribl_control_plane/models/outputcribltcp.py +20 -47
  84. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  85. cribl_control_plane/models/outputdatadog.py +22 -50
  86. cribl_control_plane/models/outputdataset.py +20 -48
  87. cribl_control_plane/models/outputdefault.py +2 -5
  88. cribl_control_plane/models/outputdevnull.py +2 -5
  89. cribl_control_plane/models/outputdiskspool.py +4 -9
  90. cribl_control_plane/models/outputdls3.py +26 -72
  91. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  92. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  93. cribl_control_plane/models/outputelastic.py +20 -45
  94. cribl_control_plane/models/outputelasticcloud.py +14 -40
  95. cribl_control_plane/models/outputexabeam.py +12 -33
  96. cribl_control_plane/models/outputfilesystem.py +16 -41
  97. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  98. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  99. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  100. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  101. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  102. cribl_control_plane/models/outputgraphite.py +16 -35
  103. cribl_control_plane/models/outputhoneycomb.py +14 -37
  104. cribl_control_plane/models/outputhumiohec.py +18 -47
  105. cribl_control_plane/models/outputinfluxdb.py +18 -44
  106. cribl_control_plane/models/outputkafka.py +28 -73
  107. cribl_control_plane/models/outputkinesis.py +18 -44
  108. cribl_control_plane/models/outputloki.py +18 -43
  109. cribl_control_plane/models/outputminio.py +26 -69
  110. cribl_control_plane/models/outputmsk.py +30 -81
  111. cribl_control_plane/models/outputnetflow.py +2 -5
  112. cribl_control_plane/models/outputnewrelic.py +20 -45
  113. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  114. cribl_control_plane/models/outputopentelemetry.py +28 -69
  115. cribl_control_plane/models/outputprometheus.py +14 -37
  116. cribl_control_plane/models/outputring.py +10 -21
  117. cribl_control_plane/models/outputrouter.py +2 -5
  118. cribl_control_plane/models/outputs3.py +28 -72
  119. cribl_control_plane/models/outputsecuritylake.py +20 -56
  120. cribl_control_plane/models/outputsentinel.py +20 -49
  121. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  122. cribl_control_plane/models/outputservicenow.py +26 -64
  123. cribl_control_plane/models/outputsignalfx.py +16 -39
  124. cribl_control_plane/models/outputsnmp.py +2 -5
  125. cribl_control_plane/models/outputsns.py +16 -40
  126. cribl_control_plane/models/outputsplunk.py +26 -64
  127. cribl_control_plane/models/outputsplunkhec.py +14 -37
  128. cribl_control_plane/models/outputsplunklb.py +36 -83
  129. cribl_control_plane/models/outputsqs.py +18 -45
  130. cribl_control_plane/models/outputstatsd.py +16 -34
  131. cribl_control_plane/models/outputstatsdext.py +14 -33
  132. cribl_control_plane/models/outputsumologic.py +14 -37
  133. cribl_control_plane/models/outputsyslog.py +26 -60
  134. cribl_control_plane/models/outputtcpjson.py +22 -54
  135. cribl_control_plane/models/outputwavefront.py +14 -37
  136. cribl_control_plane/models/outputwebhook.py +24 -60
  137. cribl_control_plane/models/outputxsiam.py +16 -37
  138. cribl_control_plane/models/updateoutputbyidop.py +4 -4
  139. cribl_control_plane/sdk.py +3 -5
  140. cribl_control_plane/sources.py +8 -10
  141. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +13 -13
  142. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  143. cribl_control_plane-0.0.15.dist-info/RECORD +0 -215
  144. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -1,28 +1,25 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputConfluentCloudType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputConfluentCloudType(str, Enum):
15
12
  CONFLUENT_CLOUD = "confluent_cloud"
16
13
 
17
14
 
18
- class OutputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputConfluentCloudMinimumTLSVersion(str, Enum):
19
16
  TL_SV1 = "TLSv1"
20
17
  TL_SV1_1 = "TLSv1.1"
21
18
  TL_SV1_2 = "TLSv1.2"
22
19
  TL_SV1_3 = "TLSv1.3"
23
20
 
24
21
 
25
- class OutputConfluentCloudMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
22
+ class OutputConfluentCloudMaximumTLSVersion(str, Enum):
26
23
  TL_SV1 = "TLSv1"
27
24
  TL_SV1_1 = "TLSv1.1"
28
25
  TL_SV1_2 = "TLSv1.2"
@@ -82,23 +79,17 @@ class OutputConfluentCloudTLSSettingsClientSide(BaseModel):
82
79
  r"""Passphrase to use to decrypt private key"""
83
80
 
84
81
  min_version: Annotated[
85
- Annotated[
86
- Optional[OutputConfluentCloudMinimumTLSVersion],
87
- PlainValidator(validate_open_enum(False)),
88
- ],
82
+ Optional[OutputConfluentCloudMinimumTLSVersion],
89
83
  pydantic.Field(alias="minVersion"),
90
84
  ] = None
91
85
 
92
86
  max_version: Annotated[
93
- Annotated[
94
- Optional[OutputConfluentCloudMaximumTLSVersion],
95
- PlainValidator(validate_open_enum(False)),
96
- ],
87
+ Optional[OutputConfluentCloudMaximumTLSVersion],
97
88
  pydantic.Field(alias="maxVersion"),
98
89
  ] = None
99
90
 
100
91
 
101
- class OutputConfluentCloudAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
92
+ class OutputConfluentCloudAcknowledgments(int, Enum):
102
93
  r"""Control the number of required acknowledgments."""
103
94
 
104
95
  ONE = 1
@@ -106,7 +97,7 @@ class OutputConfluentCloudAcknowledgments(int, Enum, metaclass=utils.OpenEnumMet
106
97
  MINUS_1 = -1
107
98
 
108
99
 
109
- class OutputConfluentCloudRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
100
+ class OutputConfluentCloudRecordDataFormat(str, Enum):
110
101
  r"""Format to use to serialize events before writing to Kafka."""
111
102
 
112
103
  JSON = "json"
@@ -114,7 +105,7 @@ class OutputConfluentCloudRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMe
114
105
  PROTOBUF = "protobuf"
115
106
 
116
107
 
117
- class OutputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
108
+ class OutputConfluentCloudCompression(str, Enum):
118
109
  r"""Codec to use to compress the data before sending to Kafka"""
119
110
 
120
111
  NONE = "none"
@@ -142,18 +133,14 @@ class OutputConfluentCloudAuth(BaseModel):
142
133
  r"""Select or create a secret that references your credentials"""
143
134
 
144
135
 
145
- class OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
146
- str, Enum, metaclass=utils.OpenEnumMeta
147
- ):
136
+ class OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
148
137
  TL_SV1 = "TLSv1"
149
138
  TL_SV1_1 = "TLSv1.1"
150
139
  TL_SV1_2 = "TLSv1.2"
151
140
  TL_SV1_3 = "TLSv1.3"
152
141
 
153
142
 
154
- class OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
155
- str, Enum, metaclass=utils.OpenEnumMeta
156
- ):
143
+ class OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
157
144
  TL_SV1 = "TLSv1"
158
145
  TL_SV1_1 = "TLSv1.1"
159
146
  TL_SV1_2 = "TLSv1.2"
@@ -213,18 +200,12 @@ class OutputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
213
200
  r"""Passphrase to use to decrypt private key"""
214
201
 
215
202
  min_version: Annotated[
216
- Annotated[
217
- Optional[OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
218
- PlainValidator(validate_open_enum(False)),
219
- ],
203
+ Optional[OutputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
220
204
  pydantic.Field(alias="minVersion"),
221
205
  ] = None
222
206
 
223
207
  max_version: Annotated[
224
- Annotated[
225
- Optional[OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
226
- PlainValidator(validate_open_enum(False)),
227
- ],
208
+ Optional[OutputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
228
209
  pydantic.Field(alias="maxVersion"),
229
210
  ] = None
230
211
 
@@ -287,7 +268,7 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
287
268
  r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
288
269
 
289
270
 
290
- class OutputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
271
+ class OutputConfluentCloudSASLMechanism(str, Enum):
291
272
  PLAIN = "plain"
292
273
  SCRAM_SHA_256 = "scram-sha-256"
293
274
  SCRAM_SHA_512 = "scram-sha-512"
@@ -306,13 +287,12 @@ class OutputConfluentCloudAuthentication(BaseModel):
306
287
 
307
288
  disabled: Optional[bool] = True
308
289
 
309
- mechanism: Annotated[
310
- Optional[OutputConfluentCloudSASLMechanism],
311
- PlainValidator(validate_open_enum(False)),
312
- ] = OutputConfluentCloudSASLMechanism.PLAIN
290
+ mechanism: Optional[OutputConfluentCloudSASLMechanism] = (
291
+ OutputConfluentCloudSASLMechanism.PLAIN
292
+ )
313
293
 
314
294
 
315
- class OutputConfluentCloudBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
295
+ class OutputConfluentCloudBackpressureBehavior(str, Enum):
316
296
  r"""How to handle events when all receivers are exerting backpressure"""
317
297
 
318
298
  BLOCK = "block"
@@ -320,23 +300,21 @@ class OutputConfluentCloudBackpressureBehavior(str, Enum, metaclass=utils.OpenEn
320
300
  QUEUE = "queue"
321
301
 
322
302
 
323
- class OutputConfluentCloudPqCompressCompression(
324
- str, Enum, metaclass=utils.OpenEnumMeta
325
- ):
303
+ class OutputConfluentCloudPqCompressCompression(str, Enum):
326
304
  r"""Codec to use to compress the persisted data"""
327
305
 
328
306
  NONE = "none"
329
307
  GZIP = "gzip"
330
308
 
331
309
 
332
- class OutputConfluentCloudQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
310
+ class OutputConfluentCloudQueueFullBehavior(str, Enum):
333
311
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
334
312
 
335
313
  BLOCK = "block"
336
314
  DROP = "drop"
337
315
 
338
316
 
339
- class OutputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
317
+ class OutputConfluentCloudMode(str, Enum):
340
318
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
341
319
 
342
320
  ERROR = "error"
@@ -432,9 +410,7 @@ class OutputConfluentCloud(BaseModel):
432
410
  id: Optional[str] = None
433
411
  r"""Unique ID for this output"""
434
412
 
435
- type: Annotated[
436
- Optional[OutputConfluentCloudType], PlainValidator(validate_open_enum(False))
437
- ] = None
413
+ type: Optional[OutputConfluentCloudType] = None
438
414
 
439
415
  pipeline: Optional[str] = None
440
416
  r"""Pipeline to process data before sending out to this output"""
@@ -452,25 +428,19 @@ class OutputConfluentCloud(BaseModel):
452
428
 
453
429
  tls: Optional[OutputConfluentCloudTLSSettingsClientSide] = None
454
430
 
455
- ack: Annotated[
456
- Optional[OutputConfluentCloudAcknowledgments],
457
- PlainValidator(validate_open_enum(True)),
458
- ] = OutputConfluentCloudAcknowledgments.ONE
431
+ ack: Optional[OutputConfluentCloudAcknowledgments] = (
432
+ OutputConfluentCloudAcknowledgments.ONE
433
+ )
459
434
  r"""Control the number of required acknowledgments."""
460
435
 
461
436
  format_: Annotated[
462
- Annotated[
463
- Optional[OutputConfluentCloudRecordDataFormat],
464
- PlainValidator(validate_open_enum(False)),
465
- ],
466
- pydantic.Field(alias="format"),
437
+ Optional[OutputConfluentCloudRecordDataFormat], pydantic.Field(alias="format")
467
438
  ] = OutputConfluentCloudRecordDataFormat.JSON
468
439
  r"""Format to use to serialize events before writing to Kafka."""
469
440
 
470
- compression: Annotated[
471
- Optional[OutputConfluentCloudCompression],
472
- PlainValidator(validate_open_enum(False)),
473
- ] = OutputConfluentCloudCompression.GZIP
441
+ compression: Optional[OutputConfluentCloudCompression] = (
442
+ OutputConfluentCloudCompression.GZIP
443
+ )
474
444
  r"""Codec to use to compress the data before sending to Kafka"""
475
445
 
476
446
  max_record_size_kb: Annotated[
@@ -531,10 +501,7 @@ class OutputConfluentCloud(BaseModel):
531
501
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
532
502
 
533
503
  on_backpressure: Annotated[
534
- Annotated[
535
- Optional[OutputConfluentCloudBackpressureBehavior],
536
- PlainValidator(validate_open_enum(False)),
537
- ],
504
+ Optional[OutputConfluentCloudBackpressureBehavior],
538
505
  pydantic.Field(alias="onBackpressure"),
539
506
  ] = OutputConfluentCloudBackpressureBehavior.BLOCK
540
507
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -560,29 +527,19 @@ class OutputConfluentCloud(BaseModel):
560
527
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
561
528
 
562
529
  pq_compress: Annotated[
563
- Annotated[
564
- Optional[OutputConfluentCloudPqCompressCompression],
565
- PlainValidator(validate_open_enum(False)),
566
- ],
530
+ Optional[OutputConfluentCloudPqCompressCompression],
567
531
  pydantic.Field(alias="pqCompress"),
568
532
  ] = OutputConfluentCloudPqCompressCompression.NONE
569
533
  r"""Codec to use to compress the persisted data"""
570
534
 
571
535
  pq_on_backpressure: Annotated[
572
- Annotated[
573
- Optional[OutputConfluentCloudQueueFullBehavior],
574
- PlainValidator(validate_open_enum(False)),
575
- ],
536
+ Optional[OutputConfluentCloudQueueFullBehavior],
576
537
  pydantic.Field(alias="pqOnBackpressure"),
577
538
  ] = OutputConfluentCloudQueueFullBehavior.BLOCK
578
539
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
579
540
 
580
541
  pq_mode: Annotated[
581
- Annotated[
582
- Optional[OutputConfluentCloudMode],
583
- PlainValidator(validate_open_enum(False)),
584
- ],
585
- pydantic.Field(alias="pqMode"),
542
+ Optional[OutputConfluentCloudMode], pydantic.Field(alias="pqMode")
586
543
  ] = OutputConfluentCloudMode.ERROR
587
544
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
588
545
 
@@ -1,28 +1,25 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputCriblHTTPType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputCriblHTTPType(str, Enum):
15
12
  CRIBL_HTTP = "cribl_http"
16
13
 
17
14
 
18
- class OutputCriblHTTPMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputCriblHTTPMinimumTLSVersion(str, Enum):
19
16
  TL_SV1 = "TLSv1"
20
17
  TL_SV1_1 = "TLSv1.1"
21
18
  TL_SV1_2 = "TLSv1.2"
22
19
  TL_SV1_3 = "TLSv1.3"
23
20
 
24
21
 
25
- class OutputCriblHTTPMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
22
+ class OutputCriblHTTPMaximumTLSVersion(str, Enum):
26
23
  TL_SV1 = "TLSv1"
27
24
  TL_SV1_1 = "TLSv1.1"
28
25
  TL_SV1_2 = "TLSv1.2"
@@ -82,23 +79,15 @@ class OutputCriblHTTPTLSSettingsClientSide(BaseModel):
82
79
  r"""Passphrase to use to decrypt private key"""
83
80
 
84
81
  min_version: Annotated[
85
- Annotated[
86
- Optional[OutputCriblHTTPMinimumTLSVersion],
87
- PlainValidator(validate_open_enum(False)),
88
- ],
89
- pydantic.Field(alias="minVersion"),
82
+ Optional[OutputCriblHTTPMinimumTLSVersion], pydantic.Field(alias="minVersion")
90
83
  ] = None
91
84
 
92
85
  max_version: Annotated[
93
- Annotated[
94
- Optional[OutputCriblHTTPMaximumTLSVersion],
95
- PlainValidator(validate_open_enum(False)),
96
- ],
97
- pydantic.Field(alias="maxVersion"),
86
+ Optional[OutputCriblHTTPMaximumTLSVersion], pydantic.Field(alias="maxVersion")
98
87
  ] = None
99
88
 
100
89
 
101
- class OutputCriblHTTPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
90
+ class OutputCriblHTTPCompression(str, Enum):
102
91
  r"""Codec to use to compress the data before sending"""
103
92
 
104
93
  NONE = "none"
@@ -116,7 +105,7 @@ class OutputCriblHTTPExtraHTTPHeader(BaseModel):
116
105
  name: Optional[str] = None
117
106
 
118
107
 
119
- class OutputCriblHTTPFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
108
+ class OutputCriblHTTPFailedRequestLoggingMode(str, Enum):
120
109
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
121
110
 
122
111
  PAYLOAD = "payload"
@@ -178,7 +167,7 @@ class OutputCriblHTTPTimeoutRetrySettings(BaseModel):
178
167
  r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
179
168
 
180
169
 
181
- class OutputCriblHTTPBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
170
+ class OutputCriblHTTPBackpressureBehavior(str, Enum):
182
171
  r"""How to handle events when all receivers are exerting backpressure"""
183
172
 
184
173
  BLOCK = "block"
@@ -201,21 +190,21 @@ class OutputCriblHTTPURL(BaseModel):
201
190
  r"""Assign a weight (>0) to each endpoint to indicate its traffic-handling capability"""
202
191
 
203
192
 
204
- class OutputCriblHTTPPqCompressCompression(str, Enum, metaclass=utils.OpenEnumMeta):
193
+ class OutputCriblHTTPPqCompressCompression(str, Enum):
205
194
  r"""Codec to use to compress the persisted data"""
206
195
 
207
196
  NONE = "none"
208
197
  GZIP = "gzip"
209
198
 
210
199
 
211
- class OutputCriblHTTPQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
200
+ class OutputCriblHTTPQueueFullBehavior(str, Enum):
212
201
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
213
202
 
214
203
  BLOCK = "block"
215
204
  DROP = "drop"
216
205
 
217
206
 
218
- class OutputCriblHTTPMode(str, Enum, metaclass=utils.OpenEnumMeta):
207
+ class OutputCriblHTTPMode(str, Enum):
219
208
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
220
209
 
221
210
  ERROR = "error"
@@ -313,7 +302,7 @@ class OutputCriblHTTP(BaseModel):
313
302
  id: str
314
303
  r"""Unique ID for this output"""
315
304
 
316
- type: Annotated[OutputCriblHTTPType, PlainValidator(validate_open_enum(False))]
305
+ type: OutputCriblHTTPType
317
306
 
318
307
  pipeline: Optional[str] = None
319
308
  r"""Pipeline to process data before sending out to this output"""
@@ -346,9 +335,7 @@ class OutputCriblHTTP(BaseModel):
346
335
  ] = None
347
336
  r"""Fields to exclude from the event. By default, all internal fields except `__output` are sent. Example: `cribl_pipe`, `c*`. Wildcards supported."""
348
337
 
349
- compression: Annotated[
350
- Optional[OutputCriblHTTPCompression], PlainValidator(validate_open_enum(False))
351
- ] = OutputCriblHTTPCompression.GZIP
338
+ compression: Optional[OutputCriblHTTPCompression] = OutputCriblHTTPCompression.GZIP
352
339
  r"""Codec to use to compress the data before sending"""
353
340
 
354
341
  concurrency: Optional[float] = 5
@@ -387,10 +374,7 @@ class OutputCriblHTTP(BaseModel):
387
374
  r"""Headers to add to all events"""
388
375
 
389
376
  failed_request_logging_mode: Annotated[
390
- Annotated[
391
- Optional[OutputCriblHTTPFailedRequestLoggingMode],
392
- PlainValidator(validate_open_enum(False)),
393
- ],
377
+ Optional[OutputCriblHTTPFailedRequestLoggingMode],
394
378
  pydantic.Field(alias="failedRequestLoggingMode"),
395
379
  ] = OutputCriblHTTPFailedRequestLoggingMode.NONE
396
380
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
@@ -417,10 +401,7 @@ class OutputCriblHTTP(BaseModel):
417
401
  r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
418
402
 
419
403
  on_backpressure: Annotated[
420
- Annotated[
421
- Optional[OutputCriblHTTPBackpressureBehavior],
422
- PlainValidator(validate_open_enum(False)),
423
- ],
404
+ Optional[OutputCriblHTTPBackpressureBehavior],
424
405
  pydantic.Field(alias="onBackpressure"),
425
406
  ] = OutputCriblHTTPBackpressureBehavior.BLOCK
426
407
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -464,28 +445,19 @@ class OutputCriblHTTP(BaseModel):
464
445
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
465
446
 
466
447
  pq_compress: Annotated[
467
- Annotated[
468
- Optional[OutputCriblHTTPPqCompressCompression],
469
- PlainValidator(validate_open_enum(False)),
470
- ],
448
+ Optional[OutputCriblHTTPPqCompressCompression],
471
449
  pydantic.Field(alias="pqCompress"),
472
450
  ] = OutputCriblHTTPPqCompressCompression.NONE
473
451
  r"""Codec to use to compress the persisted data"""
474
452
 
475
453
  pq_on_backpressure: Annotated[
476
- Annotated[
477
- Optional[OutputCriblHTTPQueueFullBehavior],
478
- PlainValidator(validate_open_enum(False)),
479
- ],
454
+ Optional[OutputCriblHTTPQueueFullBehavior],
480
455
  pydantic.Field(alias="pqOnBackpressure"),
481
456
  ] = OutputCriblHTTPQueueFullBehavior.BLOCK
482
457
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
483
458
 
484
459
  pq_mode: Annotated[
485
- Annotated[
486
- Optional[OutputCriblHTTPMode], PlainValidator(validate_open_enum(False))
487
- ],
488
- pydantic.Field(alias="pqMode"),
460
+ Optional[OutputCriblHTTPMode], pydantic.Field(alias="pqMode")
489
461
  ] = OutputCriblHTTPMode.ERROR
490
462
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
491
463
 
@@ -1,28 +1,25 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputCriblLakeType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputCriblLakeType(str, Enum):
15
12
  CRIBL_LAKE = "cribl_lake"
16
13
 
17
14
 
18
- class OutputCriblLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputCriblLakeSignatureVersion(str, Enum):
19
16
  r"""Signature version to use for signing S3 requests"""
20
17
 
21
18
  V2 = "v2"
22
19
  V4 = "v4"
23
20
 
24
21
 
25
- class OutputCriblLakeObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
22
+ class OutputCriblLakeObjectACL(str, Enum):
26
23
  r"""Object ACL to assign to uploaded objects"""
27
24
 
28
25
  PRIVATE = "private"
@@ -34,7 +31,7 @@ class OutputCriblLakeObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
34
31
  BUCKET_OWNER_FULL_CONTROL = "bucket-owner-full-control"
35
32
 
36
33
 
37
- class OutputCriblLakeStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
34
+ class OutputCriblLakeStorageClass(str, Enum):
38
35
  r"""Storage class to select for uploaded objects"""
39
36
 
40
37
  STANDARD = "STANDARD"
@@ -47,34 +44,32 @@ class OutputCriblLakeStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
47
44
  DEEP_ARCHIVE = "DEEP_ARCHIVE"
48
45
 
49
46
 
50
- class OutputCriblLakeServerSideEncryptionForUploadedObjects(
51
- str, Enum, metaclass=utils.OpenEnumMeta
52
- ):
47
+ class OutputCriblLakeServerSideEncryptionForUploadedObjects(str, Enum):
53
48
  AES256 = "AES256"
54
49
  AWS_KMS = "aws:kms"
55
50
 
56
51
 
57
- class OutputCriblLakeBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
52
+ class OutputCriblLakeBackpressureBehavior(str, Enum):
58
53
  r"""How to handle events when all receivers are exerting backpressure"""
59
54
 
60
55
  BLOCK = "block"
61
56
  DROP = "drop"
62
57
 
63
58
 
64
- class OutputCriblLakeDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
59
+ class OutputCriblLakeDiskSpaceProtection(str, Enum):
65
60
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
66
61
 
67
62
  BLOCK = "block"
68
63
  DROP = "drop"
69
64
 
70
65
 
71
- class AwsAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
66
+ class AwsAuthenticationMethod(str, Enum):
72
67
  AUTO = "auto"
73
68
  AUTO_RPC = "auto_rpc"
74
69
  MANUAL = "manual"
75
70
 
76
71
 
77
- class OutputCriblLakeFormat(str, Enum, metaclass=utils.OpenEnumMeta):
72
+ class OutputCriblLakeFormat(str, Enum):
78
73
  JSON = "json"
79
74
  PARQUET = "parquet"
80
75
  DDSS = "ddss"
@@ -174,7 +169,7 @@ class OutputCriblLake(BaseModel):
174
169
  id: str
175
170
  r"""Unique ID for this output"""
176
171
 
177
- type: Annotated[OutputCriblLakeType, PlainValidator(validate_open_enum(False))]
172
+ type: OutputCriblLakeType
178
173
 
179
174
  pipeline: Optional[str] = None
180
175
  r"""Pipeline to process data before sending out to this output"""
@@ -205,10 +200,7 @@ class OutputCriblLake(BaseModel):
205
200
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
206
201
 
207
202
  signature_version: Annotated[
208
- Annotated[
209
- Optional[OutputCriblLakeSignatureVersion],
210
- PlainValidator(validate_open_enum(False)),
211
- ],
203
+ Optional[OutputCriblLakeSignatureVersion],
212
204
  pydantic.Field(alias="signatureVersion"),
213
205
  ] = OutputCriblLakeSignatureVersion.V4
214
206
  r"""Signature version to use for signing S3 requests"""
@@ -257,28 +249,17 @@ class OutputCriblLake(BaseModel):
257
249
  r"""Lake dataset to send the data to."""
258
250
 
259
251
  object_acl: Annotated[
260
- Annotated[
261
- Optional[OutputCriblLakeObjectACL],
262
- PlainValidator(validate_open_enum(False)),
263
- ],
264
- pydantic.Field(alias="objectACL"),
252
+ Optional[OutputCriblLakeObjectACL], pydantic.Field(alias="objectACL")
265
253
  ] = OutputCriblLakeObjectACL.PRIVATE
266
254
  r"""Object ACL to assign to uploaded objects"""
267
255
 
268
256
  storage_class: Annotated[
269
- Annotated[
270
- Optional[OutputCriblLakeStorageClass],
271
- PlainValidator(validate_open_enum(False)),
272
- ],
273
- pydantic.Field(alias="storageClass"),
257
+ Optional[OutputCriblLakeStorageClass], pydantic.Field(alias="storageClass")
274
258
  ] = None
275
259
  r"""Storage class to select for uploaded objects"""
276
260
 
277
261
  server_side_encryption: Annotated[
278
- Annotated[
279
- Optional[OutputCriblLakeServerSideEncryptionForUploadedObjects],
280
- PlainValidator(validate_open_enum(False)),
281
- ],
262
+ Optional[OutputCriblLakeServerSideEncryptionForUploadedObjects],
282
263
  pydantic.Field(alias="serverSideEncryption"),
283
264
  ] = None
284
265
 
@@ -319,10 +300,7 @@ class OutputCriblLake(BaseModel):
319
300
  r"""Buffer size used to write to a file"""
320
301
 
321
302
  on_backpressure: Annotated[
322
- Annotated[
323
- Optional[OutputCriblLakeBackpressureBehavior],
324
- PlainValidator(validate_open_enum(False)),
325
- ],
303
+ Optional[OutputCriblLakeBackpressureBehavior],
326
304
  pydantic.Field(alias="onBackpressure"),
327
305
  ] = OutputCriblLakeBackpressureBehavior.BLOCK
328
306
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -333,10 +311,7 @@ class OutputCriblLake(BaseModel):
333
311
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
334
312
 
335
313
  on_disk_full_backpressure: Annotated[
336
- Annotated[
337
- Optional[OutputCriblLakeDiskSpaceProtection],
338
- PlainValidator(validate_open_enum(False)),
339
- ],
314
+ Optional[OutputCriblLakeDiskSpaceProtection],
340
315
  pydantic.Field(alias="onDiskFullBackpressure"),
341
316
  ] = OutputCriblLakeDiskSpaceProtection.BLOCK
342
317
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
@@ -362,17 +337,12 @@ class OutputCriblLake(BaseModel):
362
337
  r"""Maximum number of files that can be waiting for upload before backpressure is applied"""
363
338
 
364
339
  aws_authentication_method: Annotated[
365
- Annotated[
366
- Optional[AwsAuthenticationMethod], PlainValidator(validate_open_enum(False))
367
- ],
340
+ Optional[AwsAuthenticationMethod],
368
341
  pydantic.Field(alias="awsAuthenticationMethod"),
369
342
  ] = AwsAuthenticationMethod.AUTO
370
343
 
371
344
  format_: Annotated[
372
- Annotated[
373
- Optional[OutputCriblLakeFormat], PlainValidator(validate_open_enum(False))
374
- ],
375
- pydantic.Field(alias="format"),
345
+ Optional[OutputCriblLakeFormat], pydantic.Field(alias="format")
376
346
  ] = None
377
347
 
378
348
  max_concurrent_file_parts: Annotated[