cribl-control-plane 0.0.44__py3-none-any.whl → 0.0.44a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (158) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/errors/healthstatus_error.py +8 -2
  3. cribl_control_plane/models/__init__.py +3 -3
  4. cribl_control_plane/models/appmode.py +2 -1
  5. cribl_control_plane/models/cacheconnection.py +10 -2
  6. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  7. cribl_control_plane/models/cloudprovider.py +2 -1
  8. cribl_control_plane/models/configgroup.py +7 -2
  9. cribl_control_plane/models/configgroupcloud.py +6 -2
  10. cribl_control_plane/models/createconfiggroupbyproductop.py +8 -2
  11. cribl_control_plane/models/cribllakedataset.py +8 -2
  12. cribl_control_plane/models/datasetmetadata.py +8 -2
  13. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +7 -2
  14. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +4 -2
  15. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +4 -2
  16. cribl_control_plane/models/getconfiggroupbyproductandidop.py +3 -1
  17. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +7 -2
  18. cribl_control_plane/models/getsummaryop.py +7 -2
  19. cribl_control_plane/models/hbcriblinfo.py +6 -1
  20. cribl_control_plane/models/healthstatus.py +7 -4
  21. cribl_control_plane/models/inputappscope.py +34 -14
  22. cribl_control_plane/models/inputazureblob.py +17 -6
  23. cribl_control_plane/models/inputcollection.py +11 -4
  24. cribl_control_plane/models/inputconfluentcloud.py +47 -20
  25. cribl_control_plane/models/inputcribl.py +11 -4
  26. cribl_control_plane/models/inputcriblhttp.py +23 -8
  27. cribl_control_plane/models/inputcribllakehttp.py +22 -10
  28. cribl_control_plane/models/inputcriblmetrics.py +12 -4
  29. cribl_control_plane/models/inputcribltcp.py +23 -8
  30. cribl_control_plane/models/inputcrowdstrike.py +26 -10
  31. cribl_control_plane/models/inputdatadogagent.py +24 -8
  32. cribl_control_plane/models/inputdatagen.py +11 -4
  33. cribl_control_plane/models/inputedgeprometheus.py +58 -24
  34. cribl_control_plane/models/inputelastic.py +40 -14
  35. cribl_control_plane/models/inputeventhub.py +15 -6
  36. cribl_control_plane/models/inputexec.py +14 -6
  37. cribl_control_plane/models/inputfile.py +15 -6
  38. cribl_control_plane/models/inputfirehose.py +23 -8
  39. cribl_control_plane/models/inputgooglepubsub.py +19 -6
  40. cribl_control_plane/models/inputgrafana.py +67 -24
  41. cribl_control_plane/models/inputhttp.py +23 -8
  42. cribl_control_plane/models/inputhttpraw.py +23 -8
  43. cribl_control_plane/models/inputjournalfiles.py +12 -4
  44. cribl_control_plane/models/inputkafka.py +46 -16
  45. cribl_control_plane/models/inputkinesis.py +38 -14
  46. cribl_control_plane/models/inputkubeevents.py +11 -4
  47. cribl_control_plane/models/inputkubelogs.py +16 -8
  48. cribl_control_plane/models/inputkubemetrics.py +16 -8
  49. cribl_control_plane/models/inputloki.py +29 -10
  50. cribl_control_plane/models/inputmetrics.py +23 -8
  51. cribl_control_plane/models/inputmodeldriventelemetry.py +27 -10
  52. cribl_control_plane/models/inputmsk.py +53 -18
  53. cribl_control_plane/models/inputnetflow.py +11 -4
  54. cribl_control_plane/models/inputoffice365mgmt.py +33 -14
  55. cribl_control_plane/models/inputoffice365msgtrace.py +35 -16
  56. cribl_control_plane/models/inputoffice365service.py +35 -16
  57. cribl_control_plane/models/inputopentelemetry.py +38 -16
  58. cribl_control_plane/models/inputprometheus.py +50 -18
  59. cribl_control_plane/models/inputprometheusrw.py +30 -10
  60. cribl_control_plane/models/inputrawudp.py +11 -4
  61. cribl_control_plane/models/inputs3.py +21 -8
  62. cribl_control_plane/models/inputs3inventory.py +26 -10
  63. cribl_control_plane/models/inputsecuritylake.py +27 -10
  64. cribl_control_plane/models/inputsnmp.py +16 -6
  65. cribl_control_plane/models/inputsplunk.py +33 -12
  66. cribl_control_plane/models/inputsplunkhec.py +29 -10
  67. cribl_control_plane/models/inputsplunksearch.py +33 -14
  68. cribl_control_plane/models/inputsqs.py +27 -10
  69. cribl_control_plane/models/inputsyslog.py +43 -16
  70. cribl_control_plane/models/inputsystemmetrics.py +48 -24
  71. cribl_control_plane/models/inputsystemstate.py +16 -8
  72. cribl_control_plane/models/inputtcp.py +29 -10
  73. cribl_control_plane/models/inputtcpjson.py +29 -10
  74. cribl_control_plane/models/inputwef.py +37 -14
  75. cribl_control_plane/models/inputwindowsmetrics.py +44 -24
  76. cribl_control_plane/models/inputwineventlogs.py +20 -10
  77. cribl_control_plane/models/inputwiz.py +21 -8
  78. cribl_control_plane/models/inputwizwebhook.py +23 -8
  79. cribl_control_plane/models/inputzscalerhec.py +29 -10
  80. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  81. cribl_control_plane/models/listconfiggroupbyproductop.py +3 -1
  82. cribl_control_plane/models/masterworkerentry.py +7 -2
  83. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  84. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  85. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  86. cribl_control_plane/models/nodeupgradestate.py +2 -1
  87. cribl_control_plane/models/nodeupgradestatus.py +13 -5
  88. cribl_control_plane/models/outputazureblob.py +48 -18
  89. cribl_control_plane/models/outputazuredataexplorer.py +74 -29
  90. cribl_control_plane/models/outputazureeventhub.py +40 -18
  91. cribl_control_plane/models/outputazurelogs.py +36 -13
  92. cribl_control_plane/models/outputclickhouse.py +56 -21
  93. cribl_control_plane/models/outputcloudwatch.py +29 -10
  94. cribl_control_plane/models/outputconfluentcloud.py +77 -32
  95. cribl_control_plane/models/outputcriblhttp.py +46 -18
  96. cribl_control_plane/models/outputcribllake.py +46 -16
  97. cribl_control_plane/models/outputcribltcp.py +45 -18
  98. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +50 -15
  99. cribl_control_plane/models/outputdatadog.py +48 -20
  100. cribl_control_plane/models/outputdataset.py +46 -18
  101. cribl_control_plane/models/outputdiskspool.py +7 -2
  102. cribl_control_plane/models/outputdls3.py +68 -24
  103. cribl_control_plane/models/outputdynatracehttp.py +54 -21
  104. cribl_control_plane/models/outputdynatraceotlp.py +56 -23
  105. cribl_control_plane/models/outputelastic.py +44 -19
  106. cribl_control_plane/models/outputelasticcloud.py +37 -13
  107. cribl_control_plane/models/outputexabeam.py +29 -10
  108. cribl_control_plane/models/outputfilesystem.py +39 -14
  109. cribl_control_plane/models/outputgooglechronicle.py +50 -16
  110. cribl_control_plane/models/outputgooglecloudlogging.py +41 -14
  111. cribl_control_plane/models/outputgooglecloudstorage.py +66 -24
  112. cribl_control_plane/models/outputgooglepubsub.py +31 -10
  113. cribl_control_plane/models/outputgrafanacloud.py +99 -34
  114. cribl_control_plane/models/outputgraphite.py +31 -14
  115. cribl_control_plane/models/outputhoneycomb.py +36 -13
  116. cribl_control_plane/models/outputhumiohec.py +44 -17
  117. cribl_control_plane/models/outputinfluxdb.py +43 -17
  118. cribl_control_plane/models/outputkafka.py +74 -28
  119. cribl_control_plane/models/outputkinesis.py +40 -16
  120. cribl_control_plane/models/outputloki.py +41 -16
  121. cribl_control_plane/models/outputminio.py +65 -24
  122. cribl_control_plane/models/outputmsk.py +82 -30
  123. cribl_control_plane/models/outputnewrelic.py +43 -18
  124. cribl_control_plane/models/outputnewrelicevents.py +42 -15
  125. cribl_control_plane/models/outputopentelemetry.py +68 -27
  126. cribl_control_plane/models/outputprometheus.py +36 -13
  127. cribl_control_plane/models/outputring.py +19 -8
  128. cribl_control_plane/models/outputs3.py +68 -26
  129. cribl_control_plane/models/outputsecuritylake.py +52 -18
  130. cribl_control_plane/models/outputsentinel.py +45 -18
  131. cribl_control_plane/models/outputsentineloneaisiem.py +51 -19
  132. cribl_control_plane/models/outputservicenow.py +61 -25
  133. cribl_control_plane/models/outputsignalfx.py +38 -15
  134. cribl_control_plane/models/outputsns.py +36 -14
  135. cribl_control_plane/models/outputsplunk.py +60 -24
  136. cribl_control_plane/models/outputsplunkhec.py +36 -13
  137. cribl_control_plane/models/outputsplunklb.py +77 -30
  138. cribl_control_plane/models/outputsqs.py +41 -16
  139. cribl_control_plane/models/outputstatsd.py +30 -14
  140. cribl_control_plane/models/outputstatsdext.py +29 -12
  141. cribl_control_plane/models/outputsumologic.py +35 -12
  142. cribl_control_plane/models/outputsyslog.py +58 -24
  143. cribl_control_plane/models/outputtcpjson.py +52 -20
  144. cribl_control_plane/models/outputwavefront.py +36 -13
  145. cribl_control_plane/models/outputwebhook.py +58 -22
  146. cribl_control_plane/models/outputxsiam.py +36 -15
  147. cribl_control_plane/models/productscore.py +2 -1
  148. cribl_control_plane/models/rbacresource.py +2 -1
  149. cribl_control_plane/models/resourcepolicy.py +4 -2
  150. cribl_control_plane/models/runnablejobcollection.py +30 -13
  151. cribl_control_plane/models/runnablejobexecutor.py +13 -4
  152. cribl_control_plane/models/runnablejobscheduledsearch.py +7 -2
  153. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +8 -2
  154. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +8 -2
  155. cribl_control_plane/models/workertypes.py +2 -1
  156. {cribl_control_plane-0.0.44.dist-info → cribl_control_plane-0.0.44a2.dist-info}/METADATA +1 -1
  157. {cribl_control_plane-0.0.44.dist-info → cribl_control_plane-0.0.44a2.dist-info}/RECORD +158 -158
  158. {cribl_control_plane-0.0.44.dist-info → cribl_control_plane-0.0.44a2.dist-info}/WHEEL +0 -0
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,14 +15,14 @@ class OutputGoogleCloudLoggingType(str, Enum):
12
15
  GOOGLE_CLOUD_LOGGING = "google_cloud_logging"
13
16
 
14
17
 
15
- class LogLocationType(str, Enum):
18
+ class LogLocationType(str, Enum, metaclass=utils.OpenEnumMeta):
16
19
  PROJECT = "project"
17
20
  ORGANIZATION = "organization"
18
21
  BILLING_ACCOUNT = "billingAccount"
19
22
  FOLDER = "folder"
20
23
 
21
24
 
22
- class PayloadFormat(str, Enum):
25
+ class PayloadFormat(str, Enum, metaclass=utils.OpenEnumMeta):
23
26
  r"""Format to use when sending payload. Defaults to Text."""
24
27
 
25
28
  TEXT = "text"
@@ -56,7 +59,9 @@ class ResourceTypeLabel(BaseModel):
56
59
  r"""JavaScript expression to compute the label's value."""
57
60
 
58
61
 
59
- class OutputGoogleCloudLoggingGoogleAuthenticationMethod(str, Enum):
62
+ class OutputGoogleCloudLoggingGoogleAuthenticationMethod(
63
+ str, Enum, metaclass=utils.OpenEnumMeta
64
+ ):
60
65
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
61
66
 
62
67
  AUTO = "auto"
@@ -64,7 +69,9 @@ class OutputGoogleCloudLoggingGoogleAuthenticationMethod(str, Enum):
64
69
  SECRET = "secret"
65
70
 
66
71
 
67
- class OutputGoogleCloudLoggingBackpressureBehavior(str, Enum):
72
+ class OutputGoogleCloudLoggingBackpressureBehavior(
73
+ str, Enum, metaclass=utils.OpenEnumMeta
74
+ ):
68
75
  r"""How to handle events when all receivers are exerting backpressure"""
69
76
 
70
77
  BLOCK = "block"
@@ -72,21 +79,23 @@ class OutputGoogleCloudLoggingBackpressureBehavior(str, Enum):
72
79
  QUEUE = "queue"
73
80
 
74
81
 
75
- class OutputGoogleCloudLoggingCompression(str, Enum):
82
+ class OutputGoogleCloudLoggingCompression(str, Enum, metaclass=utils.OpenEnumMeta):
76
83
  r"""Codec to use to compress the persisted data"""
77
84
 
78
85
  NONE = "none"
79
86
  GZIP = "gzip"
80
87
 
81
88
 
82
- class OutputGoogleCloudLoggingQueueFullBehavior(str, Enum):
89
+ class OutputGoogleCloudLoggingQueueFullBehavior(
90
+ str, Enum, metaclass=utils.OpenEnumMeta
91
+ ):
83
92
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
84
93
 
85
94
  BLOCK = "block"
86
95
  DROP = "drop"
87
96
 
88
97
 
89
- class OutputGoogleCloudLoggingMode(str, Enum):
98
+ class OutputGoogleCloudLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
90
99
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
91
100
 
92
101
  ERROR = "error"
@@ -233,7 +242,8 @@ class OutputGoogleCloudLogging(BaseModel):
233
242
  type: OutputGoogleCloudLoggingType
234
243
 
235
244
  log_location_type: Annotated[
236
- LogLocationType, pydantic.Field(alias="logLocationType")
245
+ Annotated[LogLocationType, PlainValidator(validate_open_enum(False))],
246
+ pydantic.Field(alias="logLocationType"),
237
247
  ]
238
248
 
239
249
  log_name_expression: Annotated[str, pydantic.Field(alias="logNameExpression")]
@@ -262,7 +272,8 @@ class OutputGoogleCloudLogging(BaseModel):
262
272
  r"""Tags for filtering and grouping in @{product}"""
263
273
 
264
274
  payload_format: Annotated[
265
- Optional[PayloadFormat], pydantic.Field(alias="payloadFormat")
275
+ Annotated[Optional[PayloadFormat], PlainValidator(validate_open_enum(False))],
276
+ pydantic.Field(alias="payloadFormat"),
266
277
  ] = PayloadFormat.TEXT
267
278
  r"""Format to use when sending payload. Defaults to Text."""
268
279
 
@@ -292,7 +303,10 @@ class OutputGoogleCloudLogging(BaseModel):
292
303
  r"""JavaScript expression to compute the value of the insert ID field."""
293
304
 
294
305
  google_auth_method: Annotated[
295
- Optional[OutputGoogleCloudLoggingGoogleAuthenticationMethod],
306
+ Annotated[
307
+ Optional[OutputGoogleCloudLoggingGoogleAuthenticationMethod],
308
+ PlainValidator(validate_open_enum(False)),
309
+ ],
296
310
  pydantic.Field(alias="googleAuthMethod"),
297
311
  ] = OutputGoogleCloudLoggingGoogleAuthenticationMethod.MANUAL
298
312
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
@@ -475,7 +489,10 @@ class OutputGoogleCloudLogging(BaseModel):
475
489
  r"""A JavaScript expression that evaluates to the the sampling decision of the span associated with the log entry. See the [documentation](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) for details."""
476
490
 
477
491
  on_backpressure: Annotated[
478
- Optional[OutputGoogleCloudLoggingBackpressureBehavior],
492
+ Annotated[
493
+ Optional[OutputGoogleCloudLoggingBackpressureBehavior],
494
+ PlainValidator(validate_open_enum(False)),
495
+ ],
479
496
  pydantic.Field(alias="onBackpressure"),
480
497
  ] = OutputGoogleCloudLoggingBackpressureBehavior.BLOCK
481
498
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -506,19 +523,29 @@ class OutputGoogleCloudLogging(BaseModel):
506
523
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
507
524
 
508
525
  pq_compress: Annotated[
509
- Optional[OutputGoogleCloudLoggingCompression],
526
+ Annotated[
527
+ Optional[OutputGoogleCloudLoggingCompression],
528
+ PlainValidator(validate_open_enum(False)),
529
+ ],
510
530
  pydantic.Field(alias="pqCompress"),
511
531
  ] = OutputGoogleCloudLoggingCompression.NONE
512
532
  r"""Codec to use to compress the persisted data"""
513
533
 
514
534
  pq_on_backpressure: Annotated[
515
- Optional[OutputGoogleCloudLoggingQueueFullBehavior],
535
+ Annotated[
536
+ Optional[OutputGoogleCloudLoggingQueueFullBehavior],
537
+ PlainValidator(validate_open_enum(False)),
538
+ ],
516
539
  pydantic.Field(alias="pqOnBackpressure"),
517
540
  ] = OutputGoogleCloudLoggingQueueFullBehavior.BLOCK
518
541
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
519
542
 
520
543
  pq_mode: Annotated[
521
- Optional[OutputGoogleCloudLoggingMode], pydantic.Field(alias="pqMode")
544
+ Annotated[
545
+ Optional[OutputGoogleCloudLoggingMode],
546
+ PlainValidator(validate_open_enum(False)),
547
+ ],
548
+ pydantic.Field(alias="pqMode"),
522
549
  ] = OutputGoogleCloudLoggingMode.ERROR
523
550
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
524
551
 
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,20 +15,22 @@ class OutputGoogleCloudStorageType(str, Enum):
12
15
  GOOGLE_CLOUD_STORAGE = "google_cloud_storage"
13
16
 
14
17
 
15
- class OutputGoogleCloudStorageSignatureVersion(str, Enum):
18
+ class OutputGoogleCloudStorageSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
16
19
  r"""Signature version to use for signing Google Cloud Storage requests"""
17
20
 
18
21
  V2 = "v2"
19
22
  V4 = "v4"
20
23
 
21
24
 
22
- class OutputGoogleCloudStorageAuthenticationMethod(str, Enum):
25
+ class OutputGoogleCloudStorageAuthenticationMethod(
26
+ str, Enum, metaclass=utils.OpenEnumMeta
27
+ ):
23
28
  AUTO = "auto"
24
29
  MANUAL = "manual"
25
30
  SECRET = "secret"
26
31
 
27
32
 
28
- class OutputGoogleCloudStorageObjectACL(str, Enum):
33
+ class OutputGoogleCloudStorageObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
29
34
  r"""Object ACL to assign to uploaded objects"""
30
35
 
31
36
  PRIVATE = "private"
@@ -36,7 +41,7 @@ class OutputGoogleCloudStorageObjectACL(str, Enum):
36
41
  PUBLIC_READ = "public-read"
37
42
 
38
43
 
39
- class OutputGoogleCloudStorageStorageClass(str, Enum):
44
+ class OutputGoogleCloudStorageStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
40
45
  r"""Storage class to select for uploaded objects"""
41
46
 
42
47
  STANDARD = "STANDARD"
@@ -45,7 +50,7 @@ class OutputGoogleCloudStorageStorageClass(str, Enum):
45
50
  ARCHIVE = "ARCHIVE"
46
51
 
47
52
 
48
- class OutputGoogleCloudStorageDataFormat(str, Enum):
53
+ class OutputGoogleCloudStorageDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
49
54
  r"""Format of the output data"""
50
55
 
51
56
  JSON = "json"
@@ -53,28 +58,32 @@ class OutputGoogleCloudStorageDataFormat(str, Enum):
53
58
  PARQUET = "parquet"
54
59
 
55
60
 
56
- class OutputGoogleCloudStorageBackpressureBehavior(str, Enum):
61
+ class OutputGoogleCloudStorageBackpressureBehavior(
62
+ str, Enum, metaclass=utils.OpenEnumMeta
63
+ ):
57
64
  r"""How to handle events when all receivers are exerting backpressure"""
58
65
 
59
66
  BLOCK = "block"
60
67
  DROP = "drop"
61
68
 
62
69
 
63
- class OutputGoogleCloudStorageDiskSpaceProtection(str, Enum):
70
+ class OutputGoogleCloudStorageDiskSpaceProtection(
71
+ str, Enum, metaclass=utils.OpenEnumMeta
72
+ ):
64
73
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
65
74
 
66
75
  BLOCK = "block"
67
76
  DROP = "drop"
68
77
 
69
78
 
70
- class OutputGoogleCloudStorageCompression(str, Enum):
79
+ class OutputGoogleCloudStorageCompression(str, Enum, metaclass=utils.OpenEnumMeta):
71
80
  r"""Data compression format to apply to HTTP content before it is delivered"""
72
81
 
73
82
  NONE = "none"
74
83
  GZIP = "gzip"
75
84
 
76
85
 
77
- class OutputGoogleCloudStorageCompressionLevel(str, Enum):
86
+ class OutputGoogleCloudStorageCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
78
87
  r"""Compression level to apply before moving files to final destination"""
79
88
 
80
89
  BEST_SPEED = "best_speed"
@@ -82,7 +91,7 @@ class OutputGoogleCloudStorageCompressionLevel(str, Enum):
82
91
  BEST_COMPRESSION = "best_compression"
83
92
 
84
93
 
85
- class OutputGoogleCloudStorageParquetVersion(str, Enum):
94
+ class OutputGoogleCloudStorageParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
86
95
  r"""Determines which data types are supported and how they are represented"""
87
96
 
88
97
  PARQUET_1_0 = "PARQUET_1_0"
@@ -90,7 +99,7 @@ class OutputGoogleCloudStorageParquetVersion(str, Enum):
90
99
  PARQUET_2_6 = "PARQUET_2_6"
91
100
 
92
101
 
93
- class OutputGoogleCloudStorageDataPageVersion(str, Enum):
102
+ class OutputGoogleCloudStorageDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
103
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
95
104
 
96
105
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -244,13 +253,19 @@ class OutputGoogleCloudStorage(BaseModel):
244
253
  r"""Google Cloud Storage service endpoint"""
245
254
 
246
255
  signature_version: Annotated[
247
- Optional[OutputGoogleCloudStorageSignatureVersion],
256
+ Annotated[
257
+ Optional[OutputGoogleCloudStorageSignatureVersion],
258
+ PlainValidator(validate_open_enum(False)),
259
+ ],
248
260
  pydantic.Field(alias="signatureVersion"),
249
261
  ] = OutputGoogleCloudStorageSignatureVersion.V4
250
262
  r"""Signature version to use for signing Google Cloud Storage requests"""
251
263
 
252
264
  aws_authentication_method: Annotated[
253
- Optional[OutputGoogleCloudStorageAuthenticationMethod],
265
+ Annotated[
266
+ Optional[OutputGoogleCloudStorageAuthenticationMethod],
267
+ PlainValidator(validate_open_enum(False)),
268
+ ],
254
269
  pydantic.Field(alias="awsAuthenticationMethod"),
255
270
  ] = OutputGoogleCloudStorageAuthenticationMethod.MANUAL
256
271
 
@@ -268,12 +283,19 @@ class OutputGoogleCloudStorage(BaseModel):
268
283
  r"""Disable if you can access files within the bucket but not the bucket itself"""
269
284
 
270
285
  object_acl: Annotated[
271
- Optional[OutputGoogleCloudStorageObjectACL], pydantic.Field(alias="objectACL")
286
+ Annotated[
287
+ Optional[OutputGoogleCloudStorageObjectACL],
288
+ PlainValidator(validate_open_enum(False)),
289
+ ],
290
+ pydantic.Field(alias="objectACL"),
272
291
  ] = OutputGoogleCloudStorageObjectACL.PRIVATE
273
292
  r"""Object ACL to assign to uploaded objects"""
274
293
 
275
294
  storage_class: Annotated[
276
- Optional[OutputGoogleCloudStorageStorageClass],
295
+ Annotated[
296
+ Optional[OutputGoogleCloudStorageStorageClass],
297
+ PlainValidator(validate_open_enum(False)),
298
+ ],
277
299
  pydantic.Field(alias="storageClass"),
278
300
  ] = None
279
301
  r"""Storage class to select for uploaded objects"""
@@ -304,7 +326,11 @@ class OutputGoogleCloudStorage(BaseModel):
304
326
  r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
305
327
 
306
328
  format_: Annotated[
307
- Optional[OutputGoogleCloudStorageDataFormat], pydantic.Field(alias="format")
329
+ Annotated[
330
+ Optional[OutputGoogleCloudStorageDataFormat],
331
+ PlainValidator(validate_open_enum(False)),
332
+ ],
333
+ pydantic.Field(alias="format"),
308
334
  ] = OutputGoogleCloudStorageDataFormat.JSON
309
335
  r"""Format of the output data"""
310
336
 
@@ -347,7 +373,10 @@ class OutputGoogleCloudStorage(BaseModel):
347
373
  r"""Buffer size used to write to a file"""
348
374
 
349
375
  on_backpressure: Annotated[
350
- Optional[OutputGoogleCloudStorageBackpressureBehavior],
376
+ Annotated[
377
+ Optional[OutputGoogleCloudStorageBackpressureBehavior],
378
+ PlainValidator(validate_open_enum(False)),
379
+ ],
351
380
  pydantic.Field(alias="onBackpressure"),
352
381
  ] = OutputGoogleCloudStorageBackpressureBehavior.BLOCK
353
382
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -358,20 +387,27 @@ class OutputGoogleCloudStorage(BaseModel):
358
387
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
359
388
 
360
389
  on_disk_full_backpressure: Annotated[
361
- Optional[OutputGoogleCloudStorageDiskSpaceProtection],
390
+ Annotated[
391
+ Optional[OutputGoogleCloudStorageDiskSpaceProtection],
392
+ PlainValidator(validate_open_enum(False)),
393
+ ],
362
394
  pydantic.Field(alias="onDiskFullBackpressure"),
363
395
  ] = OutputGoogleCloudStorageDiskSpaceProtection.BLOCK
364
396
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
365
397
 
366
398
  description: Optional[str] = None
367
399
 
368
- compress: Optional[OutputGoogleCloudStorageCompression] = (
369
- OutputGoogleCloudStorageCompression.GZIP
370
- )
400
+ compress: Annotated[
401
+ Optional[OutputGoogleCloudStorageCompression],
402
+ PlainValidator(validate_open_enum(False)),
403
+ ] = OutputGoogleCloudStorageCompression.GZIP
371
404
  r"""Data compression format to apply to HTTP content before it is delivered"""
372
405
 
373
406
  compression_level: Annotated[
374
- Optional[OutputGoogleCloudStorageCompressionLevel],
407
+ Annotated[
408
+ Optional[OutputGoogleCloudStorageCompressionLevel],
409
+ PlainValidator(validate_open_enum(False)),
410
+ ],
375
411
  pydantic.Field(alias="compressionLevel"),
376
412
  ] = OutputGoogleCloudStorageCompressionLevel.BEST_SPEED
377
413
  r"""Compression level to apply before moving files to final destination"""
@@ -382,13 +418,19 @@ class OutputGoogleCloudStorage(BaseModel):
382
418
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
383
419
 
384
420
  parquet_version: Annotated[
385
- Optional[OutputGoogleCloudStorageParquetVersion],
421
+ Annotated[
422
+ Optional[OutputGoogleCloudStorageParquetVersion],
423
+ PlainValidator(validate_open_enum(False)),
424
+ ],
386
425
  pydantic.Field(alias="parquetVersion"),
387
426
  ] = OutputGoogleCloudStorageParquetVersion.PARQUET_2_6
388
427
  r"""Determines which data types are supported and how they are represented"""
389
428
 
390
429
  parquet_data_page_version: Annotated[
391
- Optional[OutputGoogleCloudStorageDataPageVersion],
430
+ Annotated[
431
+ Optional[OutputGoogleCloudStorageDataPageVersion],
432
+ PlainValidator(validate_open_enum(False)),
433
+ ],
392
434
  pydantic.Field(alias="parquetDataPageVersion"),
393
435
  ] = OutputGoogleCloudStorageDataPageVersion.DATA_PAGE_V2
394
436
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
@@ -1,9 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic.functional_validators import PlainValidator
7
10
  from typing import List, Optional
8
11
  from typing_extensions import Annotated, NotRequired, TypedDict
9
12
 
@@ -12,7 +15,9 @@ class OutputGooglePubsubType(str, Enum):
12
15
  GOOGLE_PUBSUB = "google_pubsub"
13
16
 
14
17
 
15
- class OutputGooglePubsubGoogleAuthenticationMethod(str, Enum):
18
+ class OutputGooglePubsubGoogleAuthenticationMethod(
19
+ str, Enum, metaclass=utils.OpenEnumMeta
20
+ ):
16
21
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
17
22
 
18
23
  AUTO = "auto"
@@ -20,7 +25,7 @@ class OutputGooglePubsubGoogleAuthenticationMethod(str, Enum):
20
25
  SECRET = "secret"
21
26
 
22
27
 
23
- class OutputGooglePubsubBackpressureBehavior(str, Enum):
28
+ class OutputGooglePubsubBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
24
29
  r"""How to handle events when all receivers are exerting backpressure"""
25
30
 
26
31
  BLOCK = "block"
@@ -28,21 +33,21 @@ class OutputGooglePubsubBackpressureBehavior(str, Enum):
28
33
  QUEUE = "queue"
29
34
 
30
35
 
31
- class OutputGooglePubsubCompression(str, Enum):
36
+ class OutputGooglePubsubCompression(str, Enum, metaclass=utils.OpenEnumMeta):
32
37
  r"""Codec to use to compress the persisted data"""
33
38
 
34
39
  NONE = "none"
35
40
  GZIP = "gzip"
36
41
 
37
42
 
38
- class OutputGooglePubsubQueueFullBehavior(str, Enum):
43
+ class OutputGooglePubsubQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
39
44
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
40
45
 
41
46
  BLOCK = "block"
42
47
  DROP = "drop"
43
48
 
44
49
 
45
- class OutputGooglePubsubMode(str, Enum):
50
+ class OutputGooglePubsubMode(str, Enum, metaclass=utils.OpenEnumMeta):
46
51
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
47
52
 
48
53
  ERROR = "error"
@@ -149,7 +154,10 @@ class OutputGooglePubsub(BaseModel):
149
154
  r"""Region to publish messages to. Select 'default' to allow Google to auto-select the nearest region. When using ordered delivery, the selected region must be allowed by message storage policy."""
150
155
 
151
156
  google_auth_method: Annotated[
152
- Optional[OutputGooglePubsubGoogleAuthenticationMethod],
157
+ Annotated[
158
+ Optional[OutputGooglePubsubGoogleAuthenticationMethod],
159
+ PlainValidator(validate_open_enum(False)),
160
+ ],
153
161
  pydantic.Field(alias="googleAuthMethod"),
154
162
  ] = OutputGooglePubsubGoogleAuthenticationMethod.MANUAL
155
163
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
@@ -189,7 +197,10 @@ class OutputGooglePubsub(BaseModel):
189
197
  r"""The maximum number of in-progress API requests before backpressure is applied."""
190
198
 
191
199
  on_backpressure: Annotated[
192
- Optional[OutputGooglePubsubBackpressureBehavior],
200
+ Annotated[
201
+ Optional[OutputGooglePubsubBackpressureBehavior],
202
+ PlainValidator(validate_open_enum(False)),
203
+ ],
193
204
  pydantic.Field(alias="onBackpressure"),
194
205
  ] = OutputGooglePubsubBackpressureBehavior.BLOCK
195
206
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -210,18 +221,28 @@ class OutputGooglePubsub(BaseModel):
210
221
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
211
222
 
212
223
  pq_compress: Annotated[
213
- Optional[OutputGooglePubsubCompression], pydantic.Field(alias="pqCompress")
224
+ Annotated[
225
+ Optional[OutputGooglePubsubCompression],
226
+ PlainValidator(validate_open_enum(False)),
227
+ ],
228
+ pydantic.Field(alias="pqCompress"),
214
229
  ] = OutputGooglePubsubCompression.NONE
215
230
  r"""Codec to use to compress the persisted data"""
216
231
 
217
232
  pq_on_backpressure: Annotated[
218
- Optional[OutputGooglePubsubQueueFullBehavior],
233
+ Annotated[
234
+ Optional[OutputGooglePubsubQueueFullBehavior],
235
+ PlainValidator(validate_open_enum(False)),
236
+ ],
219
237
  pydantic.Field(alias="pqOnBackpressure"),
220
238
  ] = OutputGooglePubsubQueueFullBehavior.BLOCK
221
239
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
222
240
 
223
241
  pq_mode: Annotated[
224
- Optional[OutputGooglePubsubMode], pydantic.Field(alias="pqMode")
242
+ Annotated[
243
+ Optional[OutputGooglePubsubMode], PlainValidator(validate_open_enum(False))
244
+ ],
245
+ pydantic.Field(alias="pqMode"),
225
246
  ] = OutputGooglePubsubMode.ERROR
226
247
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
227
248