cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (144) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/{outputs.py → destinations.py} +69 -71
  3. cribl_control_plane/errors/healthstatus_error.py +2 -8
  4. cribl_control_plane/models/__init__.py +5347 -115
  5. cribl_control_plane/models/createinputop.py +18216 -2
  6. cribl_control_plane/models/createoutputop.py +18417 -4
  7. cribl_control_plane/models/createoutputtestbyidop.py +2 -2
  8. cribl_control_plane/models/deleteoutputbyidop.py +2 -2
  9. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  10. cribl_control_plane/models/getoutputbyidop.py +2 -2
  11. cribl_control_plane/models/getoutputpqbyidop.py +2 -2
  12. cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
  13. cribl_control_plane/models/healthstatus.py +4 -7
  14. cribl_control_plane/models/inputappscope.py +16 -36
  15. cribl_control_plane/models/inputazureblob.py +8 -19
  16. cribl_control_plane/models/inputcollection.py +6 -15
  17. cribl_control_plane/models/inputconfluentcloud.py +22 -45
  18. cribl_control_plane/models/inputcribl.py +6 -13
  19. cribl_control_plane/models/inputcriblhttp.py +12 -27
  20. cribl_control_plane/models/inputcribllakehttp.py +14 -26
  21. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  22. cribl_control_plane/models/inputcribltcp.py +12 -27
  23. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  24. cribl_control_plane/models/inputdatadogagent.py +12 -28
  25. cribl_control_plane/models/inputdatagen.py +6 -13
  26. cribl_control_plane/models/inputedgeprometheus.py +33 -64
  27. cribl_control_plane/models/inputelastic.py +18 -44
  28. cribl_control_plane/models/inputeventhub.py +10 -19
  29. cribl_control_plane/models/inputexec.py +8 -16
  30. cribl_control_plane/models/inputfile.py +8 -17
  31. cribl_control_plane/models/inputfirehose.py +12 -27
  32. cribl_control_plane/models/inputgooglepubsub.py +10 -23
  33. cribl_control_plane/models/inputgrafana_union.py +39 -81
  34. cribl_control_plane/models/inputhttp.py +12 -27
  35. cribl_control_plane/models/inputhttpraw.py +12 -27
  36. cribl_control_plane/models/inputjournalfiles.py +8 -16
  37. cribl_control_plane/models/inputkafka.py +18 -45
  38. cribl_control_plane/models/inputkinesis.py +18 -42
  39. cribl_control_plane/models/inputkubeevents.py +6 -13
  40. cribl_control_plane/models/inputkubelogs.py +10 -18
  41. cribl_control_plane/models/inputkubemetrics.py +10 -18
  42. cribl_control_plane/models/inputloki.py +14 -33
  43. cribl_control_plane/models/inputmetrics.py +10 -25
  44. cribl_control_plane/models/inputmodeldriventelemetry.py +14 -33
  45. cribl_control_plane/models/inputmsk.py +20 -52
  46. cribl_control_plane/models/inputnetflow.py +8 -15
  47. cribl_control_plane/models/inputoffice365mgmt.py +18 -37
  48. cribl_control_plane/models/inputoffice365msgtrace.py +20 -41
  49. cribl_control_plane/models/inputoffice365service.py +20 -41
  50. cribl_control_plane/models/inputopentelemetry.py +20 -42
  51. cribl_control_plane/models/inputprometheus.py +22 -54
  52. cribl_control_plane/models/inputprometheusrw.py +14 -34
  53. cribl_control_plane/models/inputrawudp.py +8 -15
  54. cribl_control_plane/models/inputs3.py +10 -23
  55. cribl_control_plane/models/inputs3inventory.py +12 -28
  56. cribl_control_plane/models/inputsecuritylake.py +12 -29
  57. cribl_control_plane/models/inputsnmp.py +10 -20
  58. cribl_control_plane/models/inputsplunk.py +16 -37
  59. cribl_control_plane/models/inputsplunkhec.py +14 -33
  60. cribl_control_plane/models/inputsplunksearch.py +18 -37
  61. cribl_control_plane/models/inputsqs.py +14 -31
  62. cribl_control_plane/models/inputsyslog_union.py +29 -53
  63. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  64. cribl_control_plane/models/inputsystemstate.py +10 -18
  65. cribl_control_plane/models/inputtcp.py +14 -33
  66. cribl_control_plane/models/inputtcpjson.py +14 -33
  67. cribl_control_plane/models/inputwef.py +22 -45
  68. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  69. cribl_control_plane/models/inputwineventlogs.py +12 -22
  70. cribl_control_plane/models/inputwiz.py +12 -25
  71. cribl_control_plane/models/inputzscalerhec.py +14 -33
  72. cribl_control_plane/models/listoutputop.py +2 -2
  73. cribl_control_plane/models/output.py +3 -6
  74. cribl_control_plane/models/outputazureblob.py +20 -52
  75. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  76. cribl_control_plane/models/outputazureeventhub.py +20 -44
  77. cribl_control_plane/models/outputazurelogs.py +14 -37
  78. cribl_control_plane/models/outputclickhouse.py +22 -59
  79. cribl_control_plane/models/outputcloudwatch.py +12 -33
  80. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  81. cribl_control_plane/models/outputcriblhttp.py +18 -46
  82. cribl_control_plane/models/outputcribllake.py +18 -48
  83. cribl_control_plane/models/outputcribltcp.py +20 -47
  84. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  85. cribl_control_plane/models/outputdatadog.py +22 -50
  86. cribl_control_plane/models/outputdataset.py +20 -48
  87. cribl_control_plane/models/outputdefault.py +2 -5
  88. cribl_control_plane/models/outputdevnull.py +2 -5
  89. cribl_control_plane/models/outputdiskspool.py +4 -9
  90. cribl_control_plane/models/outputdls3.py +26 -72
  91. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  92. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  93. cribl_control_plane/models/outputelastic.py +20 -45
  94. cribl_control_plane/models/outputelasticcloud.py +14 -40
  95. cribl_control_plane/models/outputexabeam.py +12 -33
  96. cribl_control_plane/models/outputfilesystem.py +16 -41
  97. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  98. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  99. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  100. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  101. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  102. cribl_control_plane/models/outputgraphite.py +16 -35
  103. cribl_control_plane/models/outputhoneycomb.py +14 -37
  104. cribl_control_plane/models/outputhumiohec.py +18 -47
  105. cribl_control_plane/models/outputinfluxdb.py +18 -44
  106. cribl_control_plane/models/outputkafka.py +28 -73
  107. cribl_control_plane/models/outputkinesis.py +18 -44
  108. cribl_control_plane/models/outputloki.py +18 -43
  109. cribl_control_plane/models/outputminio.py +26 -69
  110. cribl_control_plane/models/outputmsk.py +30 -81
  111. cribl_control_plane/models/outputnetflow.py +2 -5
  112. cribl_control_plane/models/outputnewrelic.py +20 -45
  113. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  114. cribl_control_plane/models/outputopentelemetry.py +28 -69
  115. cribl_control_plane/models/outputprometheus.py +14 -37
  116. cribl_control_plane/models/outputring.py +10 -21
  117. cribl_control_plane/models/outputrouter.py +2 -5
  118. cribl_control_plane/models/outputs3.py +28 -72
  119. cribl_control_plane/models/outputsecuritylake.py +20 -56
  120. cribl_control_plane/models/outputsentinel.py +20 -49
  121. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  122. cribl_control_plane/models/outputservicenow.py +26 -64
  123. cribl_control_plane/models/outputsignalfx.py +16 -39
  124. cribl_control_plane/models/outputsnmp.py +2 -5
  125. cribl_control_plane/models/outputsns.py +16 -40
  126. cribl_control_plane/models/outputsplunk.py +26 -64
  127. cribl_control_plane/models/outputsplunkhec.py +14 -37
  128. cribl_control_plane/models/outputsplunklb.py +36 -83
  129. cribl_control_plane/models/outputsqs.py +18 -45
  130. cribl_control_plane/models/outputstatsd.py +16 -34
  131. cribl_control_plane/models/outputstatsdext.py +14 -33
  132. cribl_control_plane/models/outputsumologic.py +14 -37
  133. cribl_control_plane/models/outputsyslog.py +26 -60
  134. cribl_control_plane/models/outputtcpjson.py +22 -54
  135. cribl_control_plane/models/outputwavefront.py +14 -37
  136. cribl_control_plane/models/outputwebhook.py +24 -60
  137. cribl_control_plane/models/outputxsiam.py +16 -37
  138. cribl_control_plane/models/updateoutputbyidop.py +4 -4
  139. cribl_control_plane/sdk.py +3 -5
  140. cribl_control_plane/sources.py +8 -10
  141. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +13 -13
  142. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  143. cribl_control_plane-0.0.15.dist-info/RECORD +0 -215
  144. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -1,21 +1,18 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputMinioType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputMinioType(str, Enum):
15
12
  MINIO = "minio"
16
13
 
17
14
 
18
- class OutputMinioAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputMinioAuthenticationMethod(str, Enum):
19
16
  r"""AWS authentication method. Choose Auto to use IAM roles."""
20
17
 
21
18
  AUTO = "auto"
@@ -23,14 +20,14 @@ class OutputMinioAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
23
20
  SECRET = "secret"
24
21
 
25
22
 
26
- class OutputMinioSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
23
+ class OutputMinioSignatureVersion(str, Enum):
27
24
  r"""Signature version to use for signing MinIO requests"""
28
25
 
29
26
  V2 = "v2"
30
27
  V4 = "v4"
31
28
 
32
29
 
33
- class OutputMinioObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ class OutputMinioObjectACL(str, Enum):
34
31
  r"""Object ACL to assign to uploaded objects"""
35
32
 
36
33
  PRIVATE = "private"
@@ -42,20 +39,20 @@ class OutputMinioObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
42
39
  BUCKET_OWNER_FULL_CONTROL = "bucket-owner-full-control"
43
40
 
44
41
 
45
- class OutputMinioStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
42
+ class OutputMinioStorageClass(str, Enum):
46
43
  r"""Storage class to select for uploaded objects"""
47
44
 
48
45
  STANDARD = "STANDARD"
49
46
  REDUCED_REDUNDANCY = "REDUCED_REDUNDANCY"
50
47
 
51
48
 
52
- class ServerSideEncryption(str, Enum, metaclass=utils.OpenEnumMeta):
49
+ class ServerSideEncryption(str, Enum):
53
50
  r"""Server-side encryption for uploaded objects"""
54
51
 
55
52
  AES256 = "AES256"
56
53
 
57
54
 
58
- class OutputMinioDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
55
+ class OutputMinioDataFormat(str, Enum):
59
56
  r"""Format of the output data"""
60
57
 
61
58
  JSON = "json"
@@ -63,28 +60,28 @@ class OutputMinioDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
63
60
  PARQUET = "parquet"
64
61
 
65
62
 
66
- class OutputMinioBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
63
+ class OutputMinioBackpressureBehavior(str, Enum):
67
64
  r"""How to handle events when all receivers are exerting backpressure"""
68
65
 
69
66
  BLOCK = "block"
70
67
  DROP = "drop"
71
68
 
72
69
 
73
- class OutputMinioDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
70
+ class OutputMinioDiskSpaceProtection(str, Enum):
74
71
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
75
72
 
76
73
  BLOCK = "block"
77
74
  DROP = "drop"
78
75
 
79
76
 
80
- class OutputMinioCompression(str, Enum, metaclass=utils.OpenEnumMeta):
77
+ class OutputMinioCompression(str, Enum):
81
78
  r"""Data compression format to apply to HTTP content before it is delivered"""
82
79
 
83
80
  NONE = "none"
84
81
  GZIP = "gzip"
85
82
 
86
83
 
87
- class OutputMinioCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
84
+ class OutputMinioCompressionLevel(str, Enum):
88
85
  r"""Compression level to apply before moving files to final destination"""
89
86
 
90
87
  BEST_SPEED = "best_speed"
@@ -92,7 +89,7 @@ class OutputMinioCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
92
89
  BEST_COMPRESSION = "best_compression"
93
90
 
94
91
 
95
- class OutputMinioParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
92
+ class OutputMinioParquetVersion(str, Enum):
96
93
  r"""Determines which data types are supported and how they are represented"""
97
94
 
98
95
  PARQUET_1_0 = "PARQUET_1_0"
@@ -100,7 +97,7 @@ class OutputMinioParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
100
97
  PARQUET_2_6 = "PARQUET_2_6"
101
98
 
102
99
 
103
- class OutputMinioDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
100
+ class OutputMinioDataPageVersion(str, Enum):
104
101
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
105
102
 
106
103
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -237,9 +234,7 @@ class OutputMinio(BaseModel):
237
234
  id: Optional[str] = None
238
235
  r"""Unique ID for this output"""
239
236
 
240
- type: Annotated[
241
- Optional[OutputMinioType], PlainValidator(validate_open_enum(False))
242
- ] = None
237
+ type: Optional[OutputMinioType] = None
243
238
 
244
239
  pipeline: Optional[str] = None
245
240
  r"""Pipeline to process data before sending out to this output"""
@@ -256,10 +251,7 @@ class OutputMinio(BaseModel):
256
251
  r"""Tags for filtering and grouping in @{product}"""
257
252
 
258
253
  aws_authentication_method: Annotated[
259
- Annotated[
260
- Optional[OutputMinioAuthenticationMethod],
261
- PlainValidator(validate_open_enum(False)),
262
- ],
254
+ Optional[OutputMinioAuthenticationMethod],
263
255
  pydantic.Field(alias="awsAuthenticationMethod"),
264
256
  ] = OutputMinioAuthenticationMethod.AUTO
265
257
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -286,35 +278,22 @@ class OutputMinio(BaseModel):
286
278
  r"""Root directory to prepend to path before uploading. Enter a constant, or a JavaScript expression enclosed in quotes or backticks."""
287
279
 
288
280
  signature_version: Annotated[
289
- Annotated[
290
- Optional[OutputMinioSignatureVersion],
291
- PlainValidator(validate_open_enum(False)),
292
- ],
293
- pydantic.Field(alias="signatureVersion"),
281
+ Optional[OutputMinioSignatureVersion], pydantic.Field(alias="signatureVersion")
294
282
  ] = OutputMinioSignatureVersion.V4
295
283
  r"""Signature version to use for signing MinIO requests"""
296
284
 
297
285
  object_acl: Annotated[
298
- Annotated[
299
- Optional[OutputMinioObjectACL], PlainValidator(validate_open_enum(False))
300
- ],
301
- pydantic.Field(alias="objectACL"),
286
+ Optional[OutputMinioObjectACL], pydantic.Field(alias="objectACL")
302
287
  ] = OutputMinioObjectACL.PRIVATE
303
288
  r"""Object ACL to assign to uploaded objects"""
304
289
 
305
290
  storage_class: Annotated[
306
- Annotated[
307
- Optional[OutputMinioStorageClass], PlainValidator(validate_open_enum(False))
308
- ],
309
- pydantic.Field(alias="storageClass"),
291
+ Optional[OutputMinioStorageClass], pydantic.Field(alias="storageClass")
310
292
  ] = None
311
293
  r"""Storage class to select for uploaded objects"""
312
294
 
313
295
  server_side_encryption: Annotated[
314
- Annotated[
315
- Optional[ServerSideEncryption], PlainValidator(validate_open_enum(False))
316
- ],
317
- pydantic.Field(alias="serverSideEncryption"),
296
+ Optional[ServerSideEncryption], pydantic.Field(alias="serverSideEncryption")
318
297
  ] = None
319
298
  r"""Server-side encryption for uploaded objects"""
320
299
 
@@ -344,10 +323,7 @@ class OutputMinio(BaseModel):
344
323
  r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
345
324
 
346
325
  format_: Annotated[
347
- Annotated[
348
- Optional[OutputMinioDataFormat], PlainValidator(validate_open_enum(False))
349
- ],
350
- pydantic.Field(alias="format"),
326
+ Optional[OutputMinioDataFormat], pydantic.Field(alias="format")
351
327
  ] = OutputMinioDataFormat.JSON
352
328
  r"""Format of the output data"""
353
329
 
@@ -380,10 +356,7 @@ class OutputMinio(BaseModel):
380
356
  r"""Buffer size used to write to a file"""
381
357
 
382
358
  on_backpressure: Annotated[
383
- Annotated[
384
- Optional[OutputMinioBackpressureBehavior],
385
- PlainValidator(validate_open_enum(False)),
386
- ],
359
+ Optional[OutputMinioBackpressureBehavior],
387
360
  pydantic.Field(alias="onBackpressure"),
388
361
  ] = OutputMinioBackpressureBehavior.BLOCK
389
362
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -394,10 +367,7 @@ class OutputMinio(BaseModel):
394
367
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
395
368
 
396
369
  on_disk_full_backpressure: Annotated[
397
- Annotated[
398
- Optional[OutputMinioDiskSpaceProtection],
399
- PlainValidator(validate_open_enum(False)),
400
- ],
370
+ Optional[OutputMinioDiskSpaceProtection],
401
371
  pydantic.Field(alias="onDiskFullBackpressure"),
402
372
  ] = OutputMinioDiskSpaceProtection.BLOCK
403
373
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
@@ -425,17 +395,11 @@ class OutputMinio(BaseModel):
425
395
  aws_secret: Annotated[Optional[str], pydantic.Field(alias="awsSecret")] = None
426
396
  r"""Select or create a stored secret that references your access key and secret key"""
427
397
 
428
- compress: Annotated[
429
- Optional[OutputMinioCompression], PlainValidator(validate_open_enum(False))
430
- ] = OutputMinioCompression.GZIP
398
+ compress: Optional[OutputMinioCompression] = OutputMinioCompression.GZIP
431
399
  r"""Data compression format to apply to HTTP content before it is delivered"""
432
400
 
433
401
  compression_level: Annotated[
434
- Annotated[
435
- Optional[OutputMinioCompressionLevel],
436
- PlainValidator(validate_open_enum(False)),
437
- ],
438
- pydantic.Field(alias="compressionLevel"),
402
+ Optional[OutputMinioCompressionLevel], pydantic.Field(alias="compressionLevel")
439
403
  ] = OutputMinioCompressionLevel.BEST_SPEED
440
404
  r"""Compression level to apply before moving files to final destination"""
441
405
 
@@ -445,19 +409,12 @@ class OutputMinio(BaseModel):
445
409
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
446
410
 
447
411
  parquet_version: Annotated[
448
- Annotated[
449
- Optional[OutputMinioParquetVersion],
450
- PlainValidator(validate_open_enum(False)),
451
- ],
452
- pydantic.Field(alias="parquetVersion"),
412
+ Optional[OutputMinioParquetVersion], pydantic.Field(alias="parquetVersion")
453
413
  ] = OutputMinioParquetVersion.PARQUET_2_6
454
414
  r"""Determines which data types are supported and how they are represented"""
455
415
 
456
416
  parquet_data_page_version: Annotated[
457
- Annotated[
458
- Optional[OutputMinioDataPageVersion],
459
- PlainValidator(validate_open_enum(False)),
460
- ],
417
+ Optional[OutputMinioDataPageVersion],
461
418
  pydantic.Field(alias="parquetDataPageVersion"),
462
419
  ] = OutputMinioDataPageVersion.DATA_PAGE_V2
463
420
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
@@ -1,21 +1,18 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputMskType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputMskType(str, Enum):
15
12
  MSK = "msk"
16
13
 
17
14
 
18
- class OutputMskAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputMskAcknowledgments(int, Enum):
19
16
  r"""Control the number of required acknowledgments."""
20
17
 
21
18
  ONE = 1
@@ -23,7 +20,7 @@ class OutputMskAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
23
20
  MINUS_1 = -1
24
21
 
25
22
 
26
- class OutputMskRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
23
+ class OutputMskRecordDataFormat(str, Enum):
27
24
  r"""Format to use to serialize events before writing to Kafka."""
28
25
 
29
26
  JSON = "json"
@@ -31,7 +28,7 @@ class OutputMskRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
31
28
  PROTOBUF = "protobuf"
32
29
 
33
30
 
34
- class OutputMskCompression(str, Enum, metaclass=utils.OpenEnumMeta):
31
+ class OutputMskCompression(str, Enum):
35
32
  r"""Codec to use to compress the data before sending to Kafka"""
36
33
 
37
34
  NONE = "none"
@@ -59,18 +56,14 @@ class OutputMskAuth(BaseModel):
59
56
  r"""Select or create a secret that references your credentials"""
60
57
 
61
58
 
62
- class OutputMskKafkaSchemaRegistryMinimumTLSVersion(
63
- str, Enum, metaclass=utils.OpenEnumMeta
64
- ):
59
+ class OutputMskKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
65
60
  TL_SV1 = "TLSv1"
66
61
  TL_SV1_1 = "TLSv1.1"
67
62
  TL_SV1_2 = "TLSv1.2"
68
63
  TL_SV1_3 = "TLSv1.3"
69
64
 
70
65
 
71
- class OutputMskKafkaSchemaRegistryMaximumTLSVersion(
72
- str, Enum, metaclass=utils.OpenEnumMeta
73
- ):
66
+ class OutputMskKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
74
67
  TL_SV1 = "TLSv1"
75
68
  TL_SV1_1 = "TLSv1.1"
76
69
  TL_SV1_2 = "TLSv1.2"
@@ -130,18 +123,12 @@ class OutputMskKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
130
123
  r"""Passphrase to use to decrypt private key"""
131
124
 
132
125
  min_version: Annotated[
133
- Annotated[
134
- Optional[OutputMskKafkaSchemaRegistryMinimumTLSVersion],
135
- PlainValidator(validate_open_enum(False)),
136
- ],
126
+ Optional[OutputMskKafkaSchemaRegistryMinimumTLSVersion],
137
127
  pydantic.Field(alias="minVersion"),
138
128
  ] = None
139
129
 
140
130
  max_version: Annotated[
141
- Annotated[
142
- Optional[OutputMskKafkaSchemaRegistryMaximumTLSVersion],
143
- PlainValidator(validate_open_enum(False)),
144
- ],
131
+ Optional[OutputMskKafkaSchemaRegistryMaximumTLSVersion],
145
132
  pydantic.Field(alias="maxVersion"),
146
133
  ] = None
147
134
 
@@ -202,7 +189,7 @@ class OutputMskKafkaSchemaRegistryAuthentication(BaseModel):
202
189
  r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
203
190
 
204
191
 
205
- class OutputMskAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
192
+ class OutputMskAuthenticationMethod(str, Enum):
206
193
  r"""AWS authentication method. Choose Auto to use IAM roles."""
207
194
 
208
195
  AUTO = "auto"
@@ -210,21 +197,21 @@ class OutputMskAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
210
197
  SECRET = "secret"
211
198
 
212
199
 
213
- class OutputMskSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
200
+ class OutputMskSignatureVersion(str, Enum):
214
201
  r"""Signature version to use for signing MSK cluster requests"""
215
202
 
216
203
  V2 = "v2"
217
204
  V4 = "v4"
218
205
 
219
206
 
220
- class OutputMskMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
207
+ class OutputMskMinimumTLSVersion(str, Enum):
221
208
  TL_SV1 = "TLSv1"
222
209
  TL_SV1_1 = "TLSv1.1"
223
210
  TL_SV1_2 = "TLSv1.2"
224
211
  TL_SV1_3 = "TLSv1.3"
225
212
 
226
213
 
227
- class OutputMskMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
214
+ class OutputMskMaximumTLSVersion(str, Enum):
228
215
  TL_SV1 = "TLSv1"
229
216
  TL_SV1_1 = "TLSv1.1"
230
217
  TL_SV1_2 = "TLSv1.2"
@@ -284,23 +271,15 @@ class OutputMskTLSSettingsClientSide(BaseModel):
284
271
  r"""Passphrase to use to decrypt private key"""
285
272
 
286
273
  min_version: Annotated[
287
- Annotated[
288
- Optional[OutputMskMinimumTLSVersion],
289
- PlainValidator(validate_open_enum(False)),
290
- ],
291
- pydantic.Field(alias="minVersion"),
274
+ Optional[OutputMskMinimumTLSVersion], pydantic.Field(alias="minVersion")
292
275
  ] = None
293
276
 
294
277
  max_version: Annotated[
295
- Annotated[
296
- Optional[OutputMskMaximumTLSVersion],
297
- PlainValidator(validate_open_enum(False)),
298
- ],
299
- pydantic.Field(alias="maxVersion"),
278
+ Optional[OutputMskMaximumTLSVersion], pydantic.Field(alias="maxVersion")
300
279
  ] = None
301
280
 
302
281
 
303
- class OutputMskBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
282
+ class OutputMskBackpressureBehavior(str, Enum):
304
283
  r"""How to handle events when all receivers are exerting backpressure"""
305
284
 
306
285
  BLOCK = "block"
@@ -308,21 +287,21 @@ class OutputMskBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
308
287
  QUEUE = "queue"
309
288
 
310
289
 
311
- class OutputMskPqCompressCompression(str, Enum, metaclass=utils.OpenEnumMeta):
290
+ class OutputMskPqCompressCompression(str, Enum):
312
291
  r"""Codec to use to compress the persisted data"""
313
292
 
314
293
  NONE = "none"
315
294
  GZIP = "gzip"
316
295
 
317
296
 
318
- class OutputMskQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
297
+ class OutputMskQueueFullBehavior(str, Enum):
319
298
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
320
299
 
321
300
  BLOCK = "block"
322
301
  DROP = "drop"
323
302
 
324
303
 
325
- class OutputMskMode(str, Enum, metaclass=utils.OpenEnumMeta):
304
+ class OutputMskMode(str, Enum):
326
305
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
327
306
 
328
307
  ERROR = "error"
@@ -443,9 +422,7 @@ class OutputMsk(BaseModel):
443
422
  id: Optional[str] = None
444
423
  r"""Unique ID for this output"""
445
424
 
446
- type: Annotated[
447
- Optional[OutputMskType], PlainValidator(validate_open_enum(False))
448
- ] = None
425
+ type: Optional[OutputMskType] = None
449
426
 
450
427
  pipeline: Optional[str] = None
451
428
  r"""Pipeline to process data before sending out to this output"""
@@ -461,23 +438,15 @@ class OutputMsk(BaseModel):
461
438
  streamtags: Optional[List[str]] = None
462
439
  r"""Tags for filtering and grouping in @{product}"""
463
440
 
464
- ack: Annotated[
465
- Optional[OutputMskAcknowledgments], PlainValidator(validate_open_enum(True))
466
- ] = OutputMskAcknowledgments.ONE
441
+ ack: Optional[OutputMskAcknowledgments] = OutputMskAcknowledgments.ONE
467
442
  r"""Control the number of required acknowledgments."""
468
443
 
469
444
  format_: Annotated[
470
- Annotated[
471
- Optional[OutputMskRecordDataFormat],
472
- PlainValidator(validate_open_enum(False)),
473
- ],
474
- pydantic.Field(alias="format"),
445
+ Optional[OutputMskRecordDataFormat], pydantic.Field(alias="format")
475
446
  ] = OutputMskRecordDataFormat.JSON
476
447
  r"""Format to use to serialize events before writing to Kafka."""
477
448
 
478
- compression: Annotated[
479
- Optional[OutputMskCompression], PlainValidator(validate_open_enum(False))
480
- ] = OutputMskCompression.GZIP
449
+ compression: Optional[OutputMskCompression] = OutputMskCompression.GZIP
481
450
  r"""Codec to use to compress the data before sending to Kafka"""
482
451
 
483
452
  max_record_size_kb: Annotated[
@@ -535,10 +504,7 @@ class OutputMsk(BaseModel):
535
504
  r"""Specifies a time window during which @{product} can reauthenticate if needed. Creates the window measuring backward from the moment when credentials are set to expire."""
536
505
 
537
506
  aws_authentication_method: Annotated[
538
- Annotated[
539
- Optional[OutputMskAuthenticationMethod],
540
- PlainValidator(validate_open_enum(False)),
541
- ],
507
+ Optional[OutputMskAuthenticationMethod],
542
508
  pydantic.Field(alias="awsAuthenticationMethod"),
543
509
  ] = OutputMskAuthenticationMethod.AUTO
544
510
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -551,11 +517,7 @@ class OutputMsk(BaseModel):
551
517
  r"""MSK cluster service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to MSK cluster-compatible endpoint."""
552
518
 
553
519
  signature_version: Annotated[
554
- Annotated[
555
- Optional[OutputMskSignatureVersion],
556
- PlainValidator(validate_open_enum(False)),
557
- ],
558
- pydantic.Field(alias="signatureVersion"),
520
+ Optional[OutputMskSignatureVersion], pydantic.Field(alias="signatureVersion")
559
521
  ] = OutputMskSignatureVersion.V4
560
522
  r"""Signature version to use for signing MSK cluster requests"""
561
523
 
@@ -592,11 +554,7 @@ class OutputMsk(BaseModel):
592
554
  tls: Optional[OutputMskTLSSettingsClientSide] = None
593
555
 
594
556
  on_backpressure: Annotated[
595
- Annotated[
596
- Optional[OutputMskBackpressureBehavior],
597
- PlainValidator(validate_open_enum(False)),
598
- ],
599
- pydantic.Field(alias="onBackpressure"),
557
+ Optional[OutputMskBackpressureBehavior], pydantic.Field(alias="onBackpressure")
600
558
  ] = OutputMskBackpressureBehavior.BLOCK
601
559
  r"""How to handle events when all receivers are exerting backpressure"""
602
560
 
@@ -626,27 +584,18 @@ class OutputMsk(BaseModel):
626
584
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
627
585
 
628
586
  pq_compress: Annotated[
629
- Annotated[
630
- Optional[OutputMskPqCompressCompression],
631
- PlainValidator(validate_open_enum(False)),
632
- ],
633
- pydantic.Field(alias="pqCompress"),
587
+ Optional[OutputMskPqCompressCompression], pydantic.Field(alias="pqCompress")
634
588
  ] = OutputMskPqCompressCompression.NONE
635
589
  r"""Codec to use to compress the persisted data"""
636
590
 
637
591
  pq_on_backpressure: Annotated[
638
- Annotated[
639
- Optional[OutputMskQueueFullBehavior],
640
- PlainValidator(validate_open_enum(False)),
641
- ],
642
- pydantic.Field(alias="pqOnBackpressure"),
592
+ Optional[OutputMskQueueFullBehavior], pydantic.Field(alias="pqOnBackpressure")
643
593
  ] = OutputMskQueueFullBehavior.BLOCK
644
594
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
645
595
 
646
- pq_mode: Annotated[
647
- Annotated[Optional[OutputMskMode], PlainValidator(validate_open_enum(False))],
648
- pydantic.Field(alias="pqMode"),
649
- ] = OutputMskMode.ERROR
596
+ pq_mode: Annotated[Optional[OutputMskMode], pydantic.Field(alias="pqMode")] = (
597
+ OutputMskMode.ERROR
598
+ )
650
599
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
651
600
 
652
601
  pq_controls: Annotated[
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputNetflowType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputNetflowType(str, Enum):
15
12
  NETFLOW = "netflow"
16
13
 
17
14
 
@@ -50,7 +47,7 @@ class OutputNetflowTypedDict(TypedDict):
50
47
 
51
48
 
52
49
  class OutputNetflow(BaseModel):
53
- type: Annotated[OutputNetflowType, PlainValidator(validate_open_enum(False))]
50
+ type: OutputNetflowType
54
51
 
55
52
  hosts: List[OutputNetflowHost]
56
53
  r"""One or more NetFlow destinations to forward events to"""