cribl-control-plane 0.0.49__py3-none-any.whl → 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (166) hide show
  1. cribl_control_plane/_version.py +4 -6
  2. cribl_control_plane/errors/healthstatus_error.py +8 -2
  3. cribl_control_plane/health.py +6 -2
  4. cribl_control_plane/models/__init__.py +18 -3
  5. cribl_control_plane/models/appmode.py +2 -1
  6. cribl_control_plane/models/cacheconnection.py +10 -2
  7. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  8. cribl_control_plane/models/cloudprovider.py +2 -1
  9. cribl_control_plane/models/configgroup.py +7 -2
  10. cribl_control_plane/models/configgroupcloud.py +6 -2
  11. cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
  12. cribl_control_plane/models/createinputhectokenbyidop.py +6 -5
  13. cribl_control_plane/models/createversionpushop.py +5 -5
  14. cribl_control_plane/models/cribllakedataset.py +8 -2
  15. cribl_control_plane/models/datasetmetadata.py +8 -2
  16. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
  17. cribl_control_plane/models/error.py +16 -0
  18. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
  19. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
  20. cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
  21. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
  22. cribl_control_plane/models/gethealthinfoop.py +17 -0
  23. cribl_control_plane/models/getsummaryop.py +7 -2
  24. cribl_control_plane/models/getversionshowop.py +6 -5
  25. cribl_control_plane/models/gitshowresult.py +19 -0
  26. cribl_control_plane/models/hbcriblinfo.py +6 -1
  27. cribl_control_plane/models/healthstatus.py +7 -4
  28. cribl_control_plane/models/inputappscope.py +34 -14
  29. cribl_control_plane/models/inputazureblob.py +17 -6
  30. cribl_control_plane/models/inputcollection.py +11 -4
  31. cribl_control_plane/models/inputconfluentcloud.py +47 -20
  32. cribl_control_plane/models/inputcribl.py +11 -4
  33. cribl_control_plane/models/inputcriblhttp.py +23 -8
  34. cribl_control_plane/models/inputcribllakehttp.py +22 -10
  35. cribl_control_plane/models/inputcriblmetrics.py +12 -4
  36. cribl_control_plane/models/inputcribltcp.py +23 -8
  37. cribl_control_plane/models/inputcrowdstrike.py +26 -10
  38. cribl_control_plane/models/inputdatadogagent.py +24 -8
  39. cribl_control_plane/models/inputdatagen.py +11 -4
  40. cribl_control_plane/models/inputedgeprometheus.py +58 -24
  41. cribl_control_plane/models/inputelastic.py +40 -14
  42. cribl_control_plane/models/inputeventhub.py +15 -6
  43. cribl_control_plane/models/inputexec.py +14 -6
  44. cribl_control_plane/models/inputfile.py +15 -6
  45. cribl_control_plane/models/inputfirehose.py +23 -8
  46. cribl_control_plane/models/inputgooglepubsub.py +19 -6
  47. cribl_control_plane/models/inputgrafana.py +67 -24
  48. cribl_control_plane/models/inputhttp.py +23 -8
  49. cribl_control_plane/models/inputhttpraw.py +23 -8
  50. cribl_control_plane/models/inputjournalfiles.py +12 -4
  51. cribl_control_plane/models/inputkafka.py +46 -16
  52. cribl_control_plane/models/inputkinesis.py +38 -14
  53. cribl_control_plane/models/inputkubeevents.py +11 -4
  54. cribl_control_plane/models/inputkubelogs.py +16 -8
  55. cribl_control_plane/models/inputkubemetrics.py +16 -8
  56. cribl_control_plane/models/inputloki.py +29 -10
  57. cribl_control_plane/models/inputmetrics.py +23 -8
  58. cribl_control_plane/models/inputmodeldriventelemetry.py +32 -10
  59. cribl_control_plane/models/inputmsk.py +53 -18
  60. cribl_control_plane/models/inputnetflow.py +11 -4
  61. cribl_control_plane/models/inputoffice365mgmt.py +33 -14
  62. cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
  63. cribl_control_plane/models/inputoffice365service.py +35 -16
  64. cribl_control_plane/models/inputopentelemetry.py +38 -16
  65. cribl_control_plane/models/inputprometheus.py +50 -18
  66. cribl_control_plane/models/inputprometheusrw.py +30 -10
  67. cribl_control_plane/models/inputrawudp.py +11 -4
  68. cribl_control_plane/models/inputs3.py +21 -8
  69. cribl_control_plane/models/inputs3inventory.py +26 -10
  70. cribl_control_plane/models/inputsecuritylake.py +27 -10
  71. cribl_control_plane/models/inputsnmp.py +16 -6
  72. cribl_control_plane/models/inputsplunk.py +33 -12
  73. cribl_control_plane/models/inputsplunkhec.py +29 -10
  74. cribl_control_plane/models/inputsplunksearch.py +33 -14
  75. cribl_control_plane/models/inputsqs.py +27 -10
  76. cribl_control_plane/models/inputsyslog.py +43 -16
  77. cribl_control_plane/models/inputsystemmetrics.py +48 -24
  78. cribl_control_plane/models/inputsystemstate.py +16 -8
  79. cribl_control_plane/models/inputtcp.py +29 -10
  80. cribl_control_plane/models/inputtcpjson.py +29 -10
  81. cribl_control_plane/models/inputwef.py +37 -14
  82. cribl_control_plane/models/inputwindowsmetrics.py +44 -24
  83. cribl_control_plane/models/inputwineventlogs.py +20 -10
  84. cribl_control_plane/models/inputwiz.py +21 -8
  85. cribl_control_plane/models/inputwizwebhook.py +23 -8
  86. cribl_control_plane/models/inputzscalerhec.py +29 -10
  87. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  88. cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
  89. cribl_control_plane/models/masterworkerentry.py +7 -2
  90. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  91. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  92. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  93. cribl_control_plane/models/nodeupgradestate.py +2 -1
  94. cribl_control_plane/models/nodeupgradestatus.py +13 -5
  95. cribl_control_plane/models/outputazureblob.py +48 -18
  96. cribl_control_plane/models/outputazuredataexplorer.py +73 -28
  97. cribl_control_plane/models/outputazureeventhub.py +40 -18
  98. cribl_control_plane/models/outputazurelogs.py +35 -12
  99. cribl_control_plane/models/outputclickhouse.py +55 -20
  100. cribl_control_plane/models/outputcloudwatch.py +29 -10
  101. cribl_control_plane/models/outputconfluentcloud.py +77 -32
  102. cribl_control_plane/models/outputcriblhttp.py +44 -16
  103. cribl_control_plane/models/outputcribllake.py +46 -16
  104. cribl_control_plane/models/outputcribltcp.py +45 -18
  105. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +49 -14
  106. cribl_control_plane/models/outputdatadog.py +48 -20
  107. cribl_control_plane/models/outputdataset.py +46 -18
  108. cribl_control_plane/models/outputdiskspool.py +7 -2
  109. cribl_control_plane/models/outputdls3.py +68 -24
  110. cribl_control_plane/models/outputdynatracehttp.py +53 -20
  111. cribl_control_plane/models/outputdynatraceotlp.py +55 -22
  112. cribl_control_plane/models/outputelastic.py +43 -18
  113. cribl_control_plane/models/outputelasticcloud.py +36 -12
  114. cribl_control_plane/models/outputexabeam.py +29 -10
  115. cribl_control_plane/models/outputfilesystem.py +39 -14
  116. cribl_control_plane/models/outputgooglechronicle.py +50 -16
  117. cribl_control_plane/models/outputgooglecloudlogging.py +41 -14
  118. cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
  119. cribl_control_plane/models/outputgooglepubsub.py +31 -10
  120. cribl_control_plane/models/outputgrafanacloud.py +97 -32
  121. cribl_control_plane/models/outputgraphite.py +31 -14
  122. cribl_control_plane/models/outputhoneycomb.py +35 -12
  123. cribl_control_plane/models/outputhumiohec.py +43 -16
  124. cribl_control_plane/models/outputinfluxdb.py +42 -16
  125. cribl_control_plane/models/outputkafka.py +74 -28
  126. cribl_control_plane/models/outputkinesis.py +40 -16
  127. cribl_control_plane/models/outputloki.py +41 -16
  128. cribl_control_plane/models/outputminio.py +65 -24
  129. cribl_control_plane/models/outputmsk.py +82 -30
  130. cribl_control_plane/models/outputnewrelic.py +43 -18
  131. cribl_control_plane/models/outputnewrelicevents.py +41 -14
  132. cribl_control_plane/models/outputopentelemetry.py +67 -26
  133. cribl_control_plane/models/outputprometheus.py +35 -12
  134. cribl_control_plane/models/outputring.py +19 -8
  135. cribl_control_plane/models/outputs3.py +68 -26
  136. cribl_control_plane/models/outputsecuritylake.py +52 -18
  137. cribl_control_plane/models/outputsentinel.py +45 -18
  138. cribl_control_plane/models/outputsentineloneaisiem.py +50 -18
  139. cribl_control_plane/models/outputservicenow.py +60 -24
  140. cribl_control_plane/models/outputsignalfx.py +37 -14
  141. cribl_control_plane/models/outputsns.py +36 -14
  142. cribl_control_plane/models/outputsplunk.py +60 -24
  143. cribl_control_plane/models/outputsplunkhec.py +35 -12
  144. cribl_control_plane/models/outputsplunklb.py +77 -30
  145. cribl_control_plane/models/outputsqs.py +41 -16
  146. cribl_control_plane/models/outputstatsd.py +30 -14
  147. cribl_control_plane/models/outputstatsdext.py +29 -12
  148. cribl_control_plane/models/outputsumologic.py +35 -12
  149. cribl_control_plane/models/outputsyslog.py +58 -24
  150. cribl_control_plane/models/outputtcpjson.py +52 -20
  151. cribl_control_plane/models/outputwavefront.py +35 -12
  152. cribl_control_plane/models/outputwebhook.py +58 -22
  153. cribl_control_plane/models/outputxsiam.py +35 -14
  154. cribl_control_plane/models/productscore.py +2 -1
  155. cribl_control_plane/models/rbacresource.py +2 -1
  156. cribl_control_plane/models/resourcepolicy.py +4 -2
  157. cribl_control_plane/models/runnablejobcollection.py +30 -13
  158. cribl_control_plane/models/runnablejobexecutor.py +13 -4
  159. cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
  160. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
  161. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
  162. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +6 -5
  163. cribl_control_plane/models/workertypes.py +2 -1
  164. {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0a1.dist-info}/METADATA +1 -1
  165. {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0a1.dist-info}/RECORD +166 -163
  166. {cribl_control_plane-0.0.49.dist-info → cribl_control_plane-0.1.0a1.dist-info}/WHEEL +0 -0
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,7 +15,7 @@ class OutputS3Type(str, Enum):
12
15
  S3 = "s3"
13
16
 
14
17
 
15
- class OutputS3AuthenticationMethod(str, Enum):
18
+ class OutputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
16
19
  r"""AWS authentication method. Choose Auto to use IAM roles."""
17
20
 
18
21
  AUTO = "auto"
@@ -20,14 +23,14 @@ class OutputS3AuthenticationMethod(str, Enum):
20
23
  SECRET = "secret"
21
24
 
22
25
 
23
- class OutputS3SignatureVersion(str, Enum):
26
+ class OutputS3SignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
24
27
  r"""Signature version to use for signing S3 requests"""
25
28
 
26
29
  V2 = "v2"
27
30
  V4 = "v4"
28
31
 
29
32
 
30
- class OutputS3ObjectACL(str, Enum):
33
+ class OutputS3ObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
31
34
  r"""Object ACL to assign to uploaded objects"""
32
35
 
33
36
  PRIVATE = "private"
@@ -39,7 +42,7 @@ class OutputS3ObjectACL(str, Enum):
39
42
  BUCKET_OWNER_FULL_CONTROL = "bucket-owner-full-control"
40
43
 
41
44
 
42
- class OutputS3StorageClass(str, Enum):
45
+ class OutputS3StorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
43
46
  r"""Storage class to select for uploaded objects"""
44
47
 
45
48
  STANDARD = "STANDARD"
@@ -52,12 +55,14 @@ class OutputS3StorageClass(str, Enum):
52
55
  DEEP_ARCHIVE = "DEEP_ARCHIVE"
53
56
 
54
57
 
55
- class OutputS3ServerSideEncryptionForUploadedObjects(str, Enum):
58
+ class OutputS3ServerSideEncryptionForUploadedObjects(
59
+ str, Enum, metaclass=utils.OpenEnumMeta
60
+ ):
56
61
  AES256 = "AES256"
57
62
  AWS_KMS = "aws:kms"
58
63
 
59
64
 
60
- class OutputS3DataFormat(str, Enum):
65
+ class OutputS3DataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
61
66
  r"""Format of the output data"""
62
67
 
63
68
  JSON = "json"
@@ -65,28 +70,28 @@ class OutputS3DataFormat(str, Enum):
65
70
  PARQUET = "parquet"
66
71
 
67
72
 
68
- class OutputS3BackpressureBehavior(str, Enum):
73
+ class OutputS3BackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
69
74
  r"""How to handle events when all receivers are exerting backpressure"""
70
75
 
71
76
  BLOCK = "block"
72
77
  DROP = "drop"
73
78
 
74
79
 
75
- class OutputS3DiskSpaceProtection(str, Enum):
80
+ class OutputS3DiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
76
81
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
77
82
 
78
83
  BLOCK = "block"
79
84
  DROP = "drop"
80
85
 
81
86
 
82
- class OutputS3Compression(str, Enum):
87
+ class OutputS3Compression(str, Enum, metaclass=utils.OpenEnumMeta):
83
88
  r"""Data compression format to apply to HTTP content before it is delivered"""
84
89
 
85
90
  NONE = "none"
86
91
  GZIP = "gzip"
87
92
 
88
93
 
89
- class OutputS3CompressionLevel(str, Enum):
94
+ class OutputS3CompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
90
95
  r"""Compression level to apply before moving files to final destination"""
91
96
 
92
97
  BEST_SPEED = "best_speed"
@@ -94,7 +99,7 @@ class OutputS3CompressionLevel(str, Enum):
94
99
  BEST_COMPRESSION = "best_compression"
95
100
 
96
101
 
97
- class OutputS3ParquetVersion(str, Enum):
102
+ class OutputS3ParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
98
103
  r"""Determines which data types are supported and how they are represented"""
99
104
 
100
105
  PARQUET_1_0 = "PARQUET_1_0"
@@ -102,7 +107,7 @@ class OutputS3ParquetVersion(str, Enum):
102
107
  PARQUET_2_6 = "PARQUET_2_6"
103
108
 
104
109
 
105
- class OutputS3DataPageVersion(str, Enum):
110
+ class OutputS3DataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
106
111
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
107
112
 
108
113
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -272,7 +277,10 @@ class OutputS3(BaseModel):
272
277
  r"""Secret key. This value can be a constant or a JavaScript expression. Example: `${C.env.SOME_SECRET}`)"""
273
278
 
274
279
  aws_authentication_method: Annotated[
275
- Optional[OutputS3AuthenticationMethod],
280
+ Annotated[
281
+ Optional[OutputS3AuthenticationMethod],
282
+ PlainValidator(validate_open_enum(False)),
283
+ ],
276
284
  pydantic.Field(alias="awsAuthenticationMethod"),
277
285
  ] = OutputS3AuthenticationMethod.AUTO
278
286
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -281,7 +289,11 @@ class OutputS3(BaseModel):
281
289
  r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
282
290
 
283
291
  signature_version: Annotated[
284
- Optional[OutputS3SignatureVersion], pydantic.Field(alias="signatureVersion")
292
+ Annotated[
293
+ Optional[OutputS3SignatureVersion],
294
+ PlainValidator(validate_open_enum(False)),
295
+ ],
296
+ pydantic.Field(alias="signatureVersion"),
285
297
  ] = OutputS3SignatureVersion.V4
286
298
  r"""Signature version to use for signing S3 requests"""
287
299
 
@@ -329,17 +341,26 @@ class OutputS3(BaseModel):
329
341
  r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
330
342
 
331
343
  object_acl: Annotated[
332
- Optional[OutputS3ObjectACL], pydantic.Field(alias="objectACL")
344
+ Annotated[
345
+ Optional[OutputS3ObjectACL], PlainValidator(validate_open_enum(False))
346
+ ],
347
+ pydantic.Field(alias="objectACL"),
333
348
  ] = OutputS3ObjectACL.PRIVATE
334
349
  r"""Object ACL to assign to uploaded objects"""
335
350
 
336
351
  storage_class: Annotated[
337
- Optional[OutputS3StorageClass], pydantic.Field(alias="storageClass")
352
+ Annotated[
353
+ Optional[OutputS3StorageClass], PlainValidator(validate_open_enum(False))
354
+ ],
355
+ pydantic.Field(alias="storageClass"),
338
356
  ] = None
339
357
  r"""Storage class to select for uploaded objects"""
340
358
 
341
359
  server_side_encryption: Annotated[
342
- Optional[OutputS3ServerSideEncryptionForUploadedObjects],
360
+ Annotated[
361
+ Optional[OutputS3ServerSideEncryptionForUploadedObjects],
362
+ PlainValidator(validate_open_enum(False)),
363
+ ],
343
364
  pydantic.Field(alias="serverSideEncryption"),
344
365
  ] = None
345
366
 
@@ -356,9 +377,12 @@ class OutputS3(BaseModel):
356
377
  )
357
378
  r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
358
379
 
359
- format_: Annotated[Optional[OutputS3DataFormat], pydantic.Field(alias="format")] = (
360
- OutputS3DataFormat.JSON
361
- )
380
+ format_: Annotated[
381
+ Annotated[
382
+ Optional[OutputS3DataFormat], PlainValidator(validate_open_enum(False))
383
+ ],
384
+ pydantic.Field(alias="format"),
385
+ ] = OutputS3DataFormat.JSON
362
386
  r"""Format of the output data"""
363
387
 
364
388
  base_file_name: Annotated[Optional[str], pydantic.Field(alias="baseFileName")] = (
@@ -390,7 +414,11 @@ class OutputS3(BaseModel):
390
414
  r"""Buffer size used to write to a file"""
391
415
 
392
416
  on_backpressure: Annotated[
393
- Optional[OutputS3BackpressureBehavior], pydantic.Field(alias="onBackpressure")
417
+ Annotated[
418
+ Optional[OutputS3BackpressureBehavior],
419
+ PlainValidator(validate_open_enum(False)),
420
+ ],
421
+ pydantic.Field(alias="onBackpressure"),
394
422
  ] = OutputS3BackpressureBehavior.BLOCK
395
423
  r"""How to handle events when all receivers are exerting backpressure"""
396
424
 
@@ -400,7 +428,10 @@ class OutputS3(BaseModel):
400
428
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
401
429
 
402
430
  on_disk_full_backpressure: Annotated[
403
- Optional[OutputS3DiskSpaceProtection],
431
+ Annotated[
432
+ Optional[OutputS3DiskSpaceProtection],
433
+ PlainValidator(validate_open_enum(False)),
434
+ ],
404
435
  pydantic.Field(alias="onDiskFullBackpressure"),
405
436
  ] = OutputS3DiskSpaceProtection.BLOCK
406
437
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
@@ -438,11 +469,17 @@ class OutputS3(BaseModel):
438
469
  aws_secret: Annotated[Optional[str], pydantic.Field(alias="awsSecret")] = None
439
470
  r"""Select or create a stored secret that references your access key and secret key"""
440
471
 
441
- compress: Optional[OutputS3Compression] = OutputS3Compression.GZIP
472
+ compress: Annotated[
473
+ Optional[OutputS3Compression], PlainValidator(validate_open_enum(False))
474
+ ] = OutputS3Compression.GZIP
442
475
  r"""Data compression format to apply to HTTP content before it is delivered"""
443
476
 
444
477
  compression_level: Annotated[
445
- Optional[OutputS3CompressionLevel], pydantic.Field(alias="compressionLevel")
478
+ Annotated[
479
+ Optional[OutputS3CompressionLevel],
480
+ PlainValidator(validate_open_enum(False)),
481
+ ],
482
+ pydantic.Field(alias="compressionLevel"),
446
483
  ] = OutputS3CompressionLevel.BEST_SPEED
447
484
  r"""Compression level to apply before moving files to final destination"""
448
485
 
@@ -452,12 +489,17 @@ class OutputS3(BaseModel):
452
489
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
453
490
 
454
491
  parquet_version: Annotated[
455
- Optional[OutputS3ParquetVersion], pydantic.Field(alias="parquetVersion")
492
+ Annotated[
493
+ Optional[OutputS3ParquetVersion], PlainValidator(validate_open_enum(False))
494
+ ],
495
+ pydantic.Field(alias="parquetVersion"),
456
496
  ] = OutputS3ParquetVersion.PARQUET_2_6
457
497
  r"""Determines which data types are supported and how they are represented"""
458
498
 
459
499
  parquet_data_page_version: Annotated[
460
- Optional[OutputS3DataPageVersion],
500
+ Annotated[
501
+ Optional[OutputS3DataPageVersion], PlainValidator(validate_open_enum(False))
502
+ ],
461
503
  pydantic.Field(alias="parquetDataPageVersion"),
462
504
  ] = OutputS3DataPageVersion.DATA_PAGE_V2
463
505
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,7 +15,7 @@ class OutputSecurityLakeType(str, Enum):
12
15
  SECURITY_LAKE = "security_lake"
13
16
 
14
17
 
15
- class OutputSecurityLakeAuthenticationMethod(str, Enum):
18
+ class OutputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
16
19
  r"""AWS authentication method. Choose Auto to use IAM roles."""
17
20
 
18
21
  AUTO = "auto"
@@ -20,14 +23,14 @@ class OutputSecurityLakeAuthenticationMethod(str, Enum):
20
23
  SECRET = "secret"
21
24
 
22
25
 
23
- class OutputSecurityLakeSignatureVersion(str, Enum):
26
+ class OutputSecurityLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
24
27
  r"""Signature version to use for signing Amazon Security Lake requests"""
25
28
 
26
29
  V2 = "v2"
27
30
  V4 = "v4"
28
31
 
29
32
 
30
- class OutputSecurityLakeObjectACL(str, Enum):
33
+ class OutputSecurityLakeObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
31
34
  r"""Object ACL to assign to uploaded objects"""
32
35
 
33
36
  PRIVATE = "private"
@@ -39,7 +42,7 @@ class OutputSecurityLakeObjectACL(str, Enum):
39
42
  BUCKET_OWNER_FULL_CONTROL = "bucket-owner-full-control"
40
43
 
41
44
 
42
- class OutputSecurityLakeStorageClass(str, Enum):
45
+ class OutputSecurityLakeStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
43
46
  r"""Storage class to select for uploaded objects"""
44
47
 
45
48
  STANDARD = "STANDARD"
@@ -52,26 +55,28 @@ class OutputSecurityLakeStorageClass(str, Enum):
52
55
  DEEP_ARCHIVE = "DEEP_ARCHIVE"
53
56
 
54
57
 
55
- class OutputSecurityLakeServerSideEncryptionForUploadedObjects(str, Enum):
58
+ class OutputSecurityLakeServerSideEncryptionForUploadedObjects(
59
+ str, Enum, metaclass=utils.OpenEnumMeta
60
+ ):
56
61
  AES256 = "AES256"
57
62
  AWS_KMS = "aws:kms"
58
63
 
59
64
 
60
- class OutputSecurityLakeBackpressureBehavior(str, Enum):
65
+ class OutputSecurityLakeBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
61
66
  r"""How to handle events when all receivers are exerting backpressure"""
62
67
 
63
68
  BLOCK = "block"
64
69
  DROP = "drop"
65
70
 
66
71
 
67
- class OutputSecurityLakeDiskSpaceProtection(str, Enum):
72
+ class OutputSecurityLakeDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
68
73
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
69
74
 
70
75
  BLOCK = "block"
71
76
  DROP = "drop"
72
77
 
73
78
 
74
- class OutputSecurityLakeParquetVersion(str, Enum):
79
+ class OutputSecurityLakeParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
75
80
  r"""Determines which data types are supported and how they are represented"""
76
81
 
77
82
  PARQUET_1_0 = "PARQUET_1_0"
@@ -79,7 +84,7 @@ class OutputSecurityLakeParquetVersion(str, Enum):
79
84
  PARQUET_2_6 = "PARQUET_2_6"
80
85
 
81
86
 
82
- class OutputSecurityLakeDataPageVersion(str, Enum):
87
+ class OutputSecurityLakeDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
83
88
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
84
89
 
85
90
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -252,7 +257,10 @@ class OutputSecurityLake(BaseModel):
252
257
  )
253
258
 
254
259
  aws_authentication_method: Annotated[
255
- Optional[OutputSecurityLakeAuthenticationMethod],
260
+ Annotated[
261
+ Optional[OutputSecurityLakeAuthenticationMethod],
262
+ PlainValidator(validate_open_enum(False)),
263
+ ],
256
264
  pydantic.Field(alias="awsAuthenticationMethod"),
257
265
  ] = OutputSecurityLakeAuthenticationMethod.AUTO
258
266
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -261,7 +269,10 @@ class OutputSecurityLake(BaseModel):
261
269
  r"""Amazon Security Lake service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to Amazon Security Lake-compatible endpoint."""
262
270
 
263
271
  signature_version: Annotated[
264
- Optional[OutputSecurityLakeSignatureVersion],
272
+ Annotated[
273
+ Optional[OutputSecurityLakeSignatureVersion],
274
+ PlainValidator(validate_open_enum(False)),
275
+ ],
265
276
  pydantic.Field(alias="signatureVersion"),
266
277
  ] = OutputSecurityLakeSignatureVersion.V4
267
278
  r"""Signature version to use for signing Amazon Security Lake requests"""
@@ -302,17 +313,28 @@ class OutputSecurityLake(BaseModel):
302
313
  r"""Add the Output ID value to staging location"""
303
314
 
304
315
  object_acl: Annotated[
305
- Optional[OutputSecurityLakeObjectACL], pydantic.Field(alias="objectACL")
316
+ Annotated[
317
+ Optional[OutputSecurityLakeObjectACL],
318
+ PlainValidator(validate_open_enum(False)),
319
+ ],
320
+ pydantic.Field(alias="objectACL"),
306
321
  ] = OutputSecurityLakeObjectACL.PRIVATE
307
322
  r"""Object ACL to assign to uploaded objects"""
308
323
 
309
324
  storage_class: Annotated[
310
- Optional[OutputSecurityLakeStorageClass], pydantic.Field(alias="storageClass")
325
+ Annotated[
326
+ Optional[OutputSecurityLakeStorageClass],
327
+ PlainValidator(validate_open_enum(False)),
328
+ ],
329
+ pydantic.Field(alias="storageClass"),
311
330
  ] = None
312
331
  r"""Storage class to select for uploaded objects"""
313
332
 
314
333
  server_side_encryption: Annotated[
315
- Optional[OutputSecurityLakeServerSideEncryptionForUploadedObjects],
334
+ Annotated[
335
+ Optional[OutputSecurityLakeServerSideEncryptionForUploadedObjects],
336
+ PlainValidator(validate_open_enum(False)),
337
+ ],
316
338
  pydantic.Field(alias="serverSideEncryption"),
317
339
  ] = None
318
340
 
@@ -348,7 +370,10 @@ class OutputSecurityLake(BaseModel):
348
370
  r"""Buffer size used to write to a file"""
349
371
 
350
372
  on_backpressure: Annotated[
351
- Optional[OutputSecurityLakeBackpressureBehavior],
373
+ Annotated[
374
+ Optional[OutputSecurityLakeBackpressureBehavior],
375
+ PlainValidator(validate_open_enum(False)),
376
+ ],
352
377
  pydantic.Field(alias="onBackpressure"),
353
378
  ] = OutputSecurityLakeBackpressureBehavior.BLOCK
354
379
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -359,7 +384,10 @@ class OutputSecurityLake(BaseModel):
359
384
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
360
385
 
361
386
  on_disk_full_backpressure: Annotated[
362
- Optional[OutputSecurityLakeDiskSpaceProtection],
387
+ Annotated[
388
+ Optional[OutputSecurityLakeDiskSpaceProtection],
389
+ PlainValidator(validate_open_enum(False)),
390
+ ],
363
391
  pydantic.Field(alias="onDiskFullBackpressure"),
364
392
  ] = OutputSecurityLakeDiskSpaceProtection.BLOCK
365
393
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
@@ -395,13 +423,19 @@ class OutputSecurityLake(BaseModel):
395
423
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
396
424
 
397
425
  parquet_version: Annotated[
398
- Optional[OutputSecurityLakeParquetVersion],
426
+ Annotated[
427
+ Optional[OutputSecurityLakeParquetVersion],
428
+ PlainValidator(validate_open_enum(False)),
429
+ ],
399
430
  pydantic.Field(alias="parquetVersion"),
400
431
  ] = OutputSecurityLakeParquetVersion.PARQUET_2_6
401
432
  r"""Determines which data types are supported and how they are represented"""
402
433
 
403
434
  parquet_data_page_version: Annotated[
404
- Optional[OutputSecurityLakeDataPageVersion],
435
+ Annotated[
436
+ Optional[OutputSecurityLakeDataPageVersion],
437
+ PlainValidator(validate_open_enum(False)),
438
+ ],
405
439
  pydantic.Field(alias="parquetDataPageVersion"),
406
440
  ] = OutputSecurityLakeDataPageVersion.DATA_PAGE_V2
407
441
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -23,7 +26,7 @@ class OutputSentinelExtraHTTPHeader(BaseModel):
23
26
  name: Optional[str] = None
24
27
 
25
28
 
26
- class OutputSentinelFailedRequestLoggingMode(str, Enum):
29
+ class OutputSentinelFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
30
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
28
31
 
29
32
  PAYLOAD = "payload"
@@ -85,7 +88,7 @@ class OutputSentinelTimeoutRetrySettings(BaseModel):
85
88
  r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
86
89
 
87
90
 
88
- class OutputSentinelBackpressureBehavior(str, Enum):
91
+ class OutputSentinelBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
89
92
  r"""How to handle events when all receivers are exerting backpressure"""
90
93
 
91
94
  BLOCK = "block"
@@ -93,39 +96,39 @@ class OutputSentinelBackpressureBehavior(str, Enum):
93
96
  QUEUE = "queue"
94
97
 
95
98
 
96
- class AuthType(str, Enum):
99
+ class AuthType(str, Enum, metaclass=utils.OpenEnumMeta):
97
100
  OAUTH = "oauth"
98
101
 
99
102
 
100
- class EndpointConfiguration(str, Enum):
103
+ class EndpointConfiguration(str, Enum, metaclass=utils.OpenEnumMeta):
101
104
  r"""Enter the data collection endpoint URL or the individual ID"""
102
105
 
103
106
  URL = "url"
104
107
  ID = "ID"
105
108
 
106
109
 
107
- class OutputSentinelFormat(str, Enum):
110
+ class OutputSentinelFormat(str, Enum, metaclass=utils.OpenEnumMeta):
108
111
  NDJSON = "ndjson"
109
112
  JSON_ARRAY = "json_array"
110
113
  CUSTOM = "custom"
111
114
  ADVANCED = "advanced"
112
115
 
113
116
 
114
- class OutputSentinelCompression(str, Enum):
117
+ class OutputSentinelCompression(str, Enum, metaclass=utils.OpenEnumMeta):
115
118
  r"""Codec to use to compress the persisted data"""
116
119
 
117
120
  NONE = "none"
118
121
  GZIP = "gzip"
119
122
 
120
123
 
121
- class OutputSentinelQueueFullBehavior(str, Enum):
124
+ class OutputSentinelQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
122
125
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
123
126
 
124
127
  BLOCK = "block"
125
128
  DROP = "drop"
126
129
 
127
130
 
128
- class OutputSentinelMode(str, Enum):
131
+ class OutputSentinelMode(str, Enum, metaclass=utils.OpenEnumMeta):
129
132
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
130
133
 
131
134
  ERROR = "error"
@@ -319,7 +322,10 @@ class OutputSentinel(BaseModel):
319
322
  r"""Enable round-robin DNS lookup. When a DNS server returns multiple addresses, @{product} will cycle through them in the order returned. For optimal performance, consider enabling this setting for non-load balanced destinations."""
320
323
 
321
324
  failed_request_logging_mode: Annotated[
322
- Optional[OutputSentinelFailedRequestLoggingMode],
325
+ Annotated[
326
+ Optional[OutputSentinelFailedRequestLoggingMode],
327
+ PlainValidator(validate_open_enum(False)),
328
+ ],
323
329
  pydantic.Field(alias="failedRequestLoggingMode"),
324
330
  ] = OutputSentinelFailedRequestLoggingMode.NONE
325
331
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
@@ -346,18 +352,26 @@ class OutputSentinel(BaseModel):
346
352
  r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
347
353
 
348
354
  on_backpressure: Annotated[
349
- Optional[OutputSentinelBackpressureBehavior],
355
+ Annotated[
356
+ Optional[OutputSentinelBackpressureBehavior],
357
+ PlainValidator(validate_open_enum(False)),
358
+ ],
350
359
  pydantic.Field(alias="onBackpressure"),
351
360
  ] = OutputSentinelBackpressureBehavior.BLOCK
352
361
  r"""How to handle events when all receivers are exerting backpressure"""
353
362
 
354
- auth_type: Annotated[Optional[AuthType], pydantic.Field(alias="authType")] = None
363
+ auth_type: Annotated[
364
+ Annotated[Optional[AuthType], PlainValidator(validate_open_enum(False))],
365
+ pydantic.Field(alias="authType"),
366
+ ] = None
355
367
 
356
368
  scope: Optional[str] = "https://monitor.azure.com/.default"
357
369
  r"""Scope to pass in the OAuth request"""
358
370
 
359
371
  endpoint_url_configuration: Annotated[
360
- Optional[EndpointConfiguration],
372
+ Annotated[
373
+ Optional[EndpointConfiguration], PlainValidator(validate_open_enum(False))
374
+ ],
361
375
  pydantic.Field(alias="endpointURLConfiguration"),
362
376
  ] = EndpointConfiguration.URL
363
377
  r"""Enter the data collection endpoint URL or the individual ID"""
@@ -370,7 +384,10 @@ class OutputSentinel(BaseModel):
370
384
  description: Optional[str] = None
371
385
 
372
386
  format_: Annotated[
373
- Optional[OutputSentinelFormat], pydantic.Field(alias="format")
387
+ Annotated[
388
+ Optional[OutputSentinelFormat], PlainValidator(validate_open_enum(False))
389
+ ],
390
+ pydantic.Field(alias="format"),
374
391
  ] = None
375
392
 
376
393
  custom_source_expression: Annotated[
@@ -427,19 +444,29 @@ class OutputSentinel(BaseModel):
427
444
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
428
445
 
429
446
  pq_compress: Annotated[
430
- Optional[OutputSentinelCompression], pydantic.Field(alias="pqCompress")
447
+ Annotated[
448
+ Optional[OutputSentinelCompression],
449
+ PlainValidator(validate_open_enum(False)),
450
+ ],
451
+ pydantic.Field(alias="pqCompress"),
431
452
  ] = OutputSentinelCompression.NONE
432
453
  r"""Codec to use to compress the persisted data"""
433
454
 
434
455
  pq_on_backpressure: Annotated[
435
- Optional[OutputSentinelQueueFullBehavior],
456
+ Annotated[
457
+ Optional[OutputSentinelQueueFullBehavior],
458
+ PlainValidator(validate_open_enum(False)),
459
+ ],
436
460
  pydantic.Field(alias="pqOnBackpressure"),
437
461
  ] = OutputSentinelQueueFullBehavior.BLOCK
438
462
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
439
463
 
440
- pq_mode: Annotated[Optional[OutputSentinelMode], pydantic.Field(alias="pqMode")] = (
441
- OutputSentinelMode.ERROR
442
- )
464
+ pq_mode: Annotated[
465
+ Annotated[
466
+ Optional[OutputSentinelMode], PlainValidator(validate_open_enum(False))
467
+ ],
468
+ pydantic.Field(alias="pqMode"),
469
+ ] = OutputSentinelMode.ERROR
443
470
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
444
471
 
445
472
  pq_controls: Annotated[