cribl-control-plane 0.0.48a1__py3-none-any.whl → 0.0.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (165) hide show
  1. cribl_control_plane/_version.py +6 -4
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/httpclient.py +0 -1
  4. cribl_control_plane/models/__init__.py +12 -12
  5. cribl_control_plane/models/appmode.py +13 -0
  6. cribl_control_plane/models/cacheconnection.py +2 -10
  7. cribl_control_plane/models/cacheconnectionbackfillstatus.py +1 -2
  8. cribl_control_plane/models/cloudprovider.py +1 -2
  9. cribl_control_plane/models/configgroup.py +2 -7
  10. cribl_control_plane/models/configgroupcloud.py +2 -6
  11. cribl_control_plane/models/createconfiggroupbyproductop.py +2 -8
  12. cribl_control_plane/models/cribllakedataset.py +2 -8
  13. cribl_control_plane/models/datasetmetadata.py +2 -8
  14. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +2 -7
  15. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +2 -4
  16. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +2 -4
  17. cribl_control_plane/models/getconfiggroupbyproductandidop.py +1 -3
  18. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +2 -7
  19. cribl_control_plane/models/getsummaryop.py +2 -7
  20. cribl_control_plane/models/hbcriblinfo.py +3 -19
  21. cribl_control_plane/models/healthstatus.py +4 -7
  22. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  23. cribl_control_plane/models/inputappscope.py +14 -34
  24. cribl_control_plane/models/inputazureblob.py +6 -17
  25. cribl_control_plane/models/inputcollection.py +4 -11
  26. cribl_control_plane/models/inputconfluentcloud.py +20 -47
  27. cribl_control_plane/models/inputcribl.py +4 -11
  28. cribl_control_plane/models/inputcriblhttp.py +8 -23
  29. cribl_control_plane/models/inputcribllakehttp.py +10 -22
  30. cribl_control_plane/models/inputcriblmetrics.py +4 -12
  31. cribl_control_plane/models/inputcribltcp.py +8 -23
  32. cribl_control_plane/models/inputcrowdstrike.py +10 -26
  33. cribl_control_plane/models/inputdatadogagent.py +8 -24
  34. cribl_control_plane/models/inputdatagen.py +4 -11
  35. cribl_control_plane/models/inputedgeprometheus.py +24 -58
  36. cribl_control_plane/models/inputelastic.py +14 -40
  37. cribl_control_plane/models/inputeventhub.py +6 -15
  38. cribl_control_plane/models/inputexec.py +6 -14
  39. cribl_control_plane/models/inputfile.py +6 -15
  40. cribl_control_plane/models/inputfirehose.py +8 -23
  41. cribl_control_plane/models/inputgooglepubsub.py +6 -19
  42. cribl_control_plane/models/inputgrafana.py +24 -67
  43. cribl_control_plane/models/inputhttp.py +8 -23
  44. cribl_control_plane/models/inputhttpraw.py +8 -23
  45. cribl_control_plane/models/inputjournalfiles.py +4 -12
  46. cribl_control_plane/models/inputkafka.py +16 -46
  47. cribl_control_plane/models/inputkinesis.py +14 -38
  48. cribl_control_plane/models/inputkubeevents.py +4 -11
  49. cribl_control_plane/models/inputkubelogs.py +8 -16
  50. cribl_control_plane/models/inputkubemetrics.py +8 -16
  51. cribl_control_plane/models/inputloki.py +10 -29
  52. cribl_control_plane/models/inputmetrics.py +8 -23
  53. cribl_control_plane/models/inputmodeldriventelemetry.py +10 -32
  54. cribl_control_plane/models/inputmsk.py +18 -53
  55. cribl_control_plane/models/inputnetflow.py +4 -11
  56. cribl_control_plane/models/inputoffice365mgmt.py +14 -33
  57. cribl_control_plane/models/inputoffice365msgtrace.py +16 -35
  58. cribl_control_plane/models/inputoffice365service.py +16 -35
  59. cribl_control_plane/models/inputopentelemetry.py +16 -38
  60. cribl_control_plane/models/inputprometheus.py +18 -50
  61. cribl_control_plane/models/inputprometheusrw.py +10 -30
  62. cribl_control_plane/models/inputrawudp.py +4 -11
  63. cribl_control_plane/models/inputs3.py +8 -21
  64. cribl_control_plane/models/inputs3inventory.py +10 -26
  65. cribl_control_plane/models/inputsecuritylake.py +10 -27
  66. cribl_control_plane/models/inputsnmp.py +6 -16
  67. cribl_control_plane/models/inputsplunk.py +12 -33
  68. cribl_control_plane/models/inputsplunkhec.py +10 -29
  69. cribl_control_plane/models/inputsplunksearch.py +14 -33
  70. cribl_control_plane/models/inputsqs.py +10 -27
  71. cribl_control_plane/models/inputsyslog.py +16 -43
  72. cribl_control_plane/models/inputsystemmetrics.py +24 -48
  73. cribl_control_plane/models/inputsystemstate.py +8 -16
  74. cribl_control_plane/models/inputtcp.py +10 -29
  75. cribl_control_plane/models/inputtcpjson.py +10 -29
  76. cribl_control_plane/models/inputwef.py +14 -37
  77. cribl_control_plane/models/inputwindowsmetrics.py +24 -44
  78. cribl_control_plane/models/inputwineventlogs.py +10 -20
  79. cribl_control_plane/models/inputwiz.py +8 -21
  80. cribl_control_plane/models/inputwizwebhook.py +8 -23
  81. cribl_control_plane/models/inputzscalerhec.py +10 -29
  82. cribl_control_plane/models/lakehouseconnectiontype.py +1 -2
  83. cribl_control_plane/models/listconfiggroupbyproductop.py +1 -3
  84. cribl_control_plane/models/masterworkerentry.py +2 -7
  85. cribl_control_plane/models/nodeactiveupgradestatus.py +1 -2
  86. cribl_control_plane/models/nodefailedupgradestatus.py +1 -2
  87. cribl_control_plane/models/nodeprovidedinfo.py +0 -3
  88. cribl_control_plane/models/nodeskippedupgradestatus.py +1 -2
  89. cribl_control_plane/models/nodeupgradestate.py +1 -2
  90. cribl_control_plane/models/nodeupgradestatus.py +5 -13
  91. cribl_control_plane/models/outputazureblob.py +18 -48
  92. cribl_control_plane/models/outputazuredataexplorer.py +28 -73
  93. cribl_control_plane/models/outputazureeventhub.py +18 -40
  94. cribl_control_plane/models/outputazurelogs.py +12 -35
  95. cribl_control_plane/models/outputclickhouse.py +20 -55
  96. cribl_control_plane/models/outputcloudwatch.py +10 -29
  97. cribl_control_plane/models/outputconfluentcloud.py +32 -77
  98. cribl_control_plane/models/outputcriblhttp.py +16 -44
  99. cribl_control_plane/models/outputcribllake.py +16 -46
  100. cribl_control_plane/models/outputcribltcp.py +18 -45
  101. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +14 -49
  102. cribl_control_plane/models/outputdatadog.py +20 -48
  103. cribl_control_plane/models/outputdataset.py +18 -46
  104. cribl_control_plane/models/outputdiskspool.py +2 -7
  105. cribl_control_plane/models/outputdls3.py +24 -68
  106. cribl_control_plane/models/outputdynatracehttp.py +20 -53
  107. cribl_control_plane/models/outputdynatraceotlp.py +22 -55
  108. cribl_control_plane/models/outputelastic.py +18 -43
  109. cribl_control_plane/models/outputelasticcloud.py +12 -36
  110. cribl_control_plane/models/outputexabeam.py +10 -29
  111. cribl_control_plane/models/outputfilesystem.py +14 -39
  112. cribl_control_plane/models/outputgooglechronicle.py +16 -50
  113. cribl_control_plane/models/outputgooglecloudlogging.py +14 -41
  114. cribl_control_plane/models/outputgooglecloudstorage.py +24 -66
  115. cribl_control_plane/models/outputgooglepubsub.py +10 -31
  116. cribl_control_plane/models/outputgrafanacloud.py +32 -97
  117. cribl_control_plane/models/outputgraphite.py +14 -31
  118. cribl_control_plane/models/outputhoneycomb.py +12 -35
  119. cribl_control_plane/models/outputhumiohec.py +16 -43
  120. cribl_control_plane/models/outputinfluxdb.py +16 -42
  121. cribl_control_plane/models/outputkafka.py +28 -74
  122. cribl_control_plane/models/outputkinesis.py +16 -40
  123. cribl_control_plane/models/outputloki.py +16 -41
  124. cribl_control_plane/models/outputminio.py +24 -65
  125. cribl_control_plane/models/outputmsk.py +30 -82
  126. cribl_control_plane/models/outputnewrelic.py +18 -43
  127. cribl_control_plane/models/outputnewrelicevents.py +14 -41
  128. cribl_control_plane/models/outputopentelemetry.py +26 -67
  129. cribl_control_plane/models/outputprometheus.py +12 -35
  130. cribl_control_plane/models/outputring.py +8 -19
  131. cribl_control_plane/models/outputs3.py +26 -68
  132. cribl_control_plane/models/outputsecuritylake.py +18 -52
  133. cribl_control_plane/models/outputsentinel.py +18 -45
  134. cribl_control_plane/models/outputsentineloneaisiem.py +18 -50
  135. cribl_control_plane/models/outputservicenow.py +24 -60
  136. cribl_control_plane/models/outputsignalfx.py +14 -37
  137. cribl_control_plane/models/outputsns.py +14 -36
  138. cribl_control_plane/models/outputsplunk.py +24 -60
  139. cribl_control_plane/models/outputsplunkhec.py +12 -35
  140. cribl_control_plane/models/outputsplunklb.py +30 -77
  141. cribl_control_plane/models/outputsqs.py +16 -41
  142. cribl_control_plane/models/outputstatsd.py +14 -30
  143. cribl_control_plane/models/outputstatsdext.py +12 -29
  144. cribl_control_plane/models/outputsumologic.py +12 -35
  145. cribl_control_plane/models/outputsyslog.py +24 -58
  146. cribl_control_plane/models/outputtcpjson.py +20 -52
  147. cribl_control_plane/models/outputwavefront.py +12 -35
  148. cribl_control_plane/models/outputwebhook.py +22 -58
  149. cribl_control_plane/models/outputxsiam.py +14 -35
  150. cribl_control_plane/models/productscore.py +1 -2
  151. cribl_control_plane/models/rbacresource.py +1 -2
  152. cribl_control_plane/models/resourcepolicy.py +2 -4
  153. cribl_control_plane/models/routecloneconf.py +13 -0
  154. cribl_control_plane/models/routeconf.py +4 -3
  155. cribl_control_plane/models/runnablejobcollection.py +13 -30
  156. cribl_control_plane/models/runnablejobexecutor.py +4 -13
  157. cribl_control_plane/models/runnablejobscheduledsearch.py +2 -7
  158. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +2 -8
  159. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +2 -8
  160. cribl_control_plane/models/workertypes.py +1 -2
  161. cribl_control_plane/sdk.py +2 -2
  162. cribl_control_plane/utils/annotations.py +32 -8
  163. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.50.dist-info}/METADATA +2 -1
  164. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.50.dist-info}/RECORD +165 -163
  165. {cribl_control_plane-0.0.48a1.dist-info → cribl_control_plane-0.0.50.dist-info}/WHEEL +0 -0
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -15,14 +12,14 @@ class OutputGoogleCloudLoggingType(str, Enum):
15
12
  GOOGLE_CLOUD_LOGGING = "google_cloud_logging"
16
13
 
17
14
 
18
- class LogLocationType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class LogLocationType(str, Enum):
19
16
  PROJECT = "project"
20
17
  ORGANIZATION = "organization"
21
18
  BILLING_ACCOUNT = "billingAccount"
22
19
  FOLDER = "folder"
23
20
 
24
21
 
25
- class PayloadFormat(str, Enum, metaclass=utils.OpenEnumMeta):
22
+ class PayloadFormat(str, Enum):
26
23
  r"""Format to use when sending payload. Defaults to Text."""
27
24
 
28
25
  TEXT = "text"
@@ -59,9 +56,7 @@ class ResourceTypeLabel(BaseModel):
59
56
  r"""JavaScript expression to compute the label's value."""
60
57
 
61
58
 
62
- class OutputGoogleCloudLoggingGoogleAuthenticationMethod(
63
- str, Enum, metaclass=utils.OpenEnumMeta
64
- ):
59
+ class OutputGoogleCloudLoggingGoogleAuthenticationMethod(str, Enum):
65
60
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
66
61
 
67
62
  AUTO = "auto"
@@ -69,9 +64,7 @@ class OutputGoogleCloudLoggingGoogleAuthenticationMethod(
69
64
  SECRET = "secret"
70
65
 
71
66
 
72
- class OutputGoogleCloudLoggingBackpressureBehavior(
73
- str, Enum, metaclass=utils.OpenEnumMeta
74
- ):
67
+ class OutputGoogleCloudLoggingBackpressureBehavior(str, Enum):
75
68
  r"""How to handle events when all receivers are exerting backpressure"""
76
69
 
77
70
  BLOCK = "block"
@@ -79,23 +72,21 @@ class OutputGoogleCloudLoggingBackpressureBehavior(
79
72
  QUEUE = "queue"
80
73
 
81
74
 
82
- class OutputGoogleCloudLoggingCompression(str, Enum, metaclass=utils.OpenEnumMeta):
75
+ class OutputGoogleCloudLoggingCompression(str, Enum):
83
76
  r"""Codec to use to compress the persisted data"""
84
77
 
85
78
  NONE = "none"
86
79
  GZIP = "gzip"
87
80
 
88
81
 
89
- class OutputGoogleCloudLoggingQueueFullBehavior(
90
- str, Enum, metaclass=utils.OpenEnumMeta
91
- ):
82
+ class OutputGoogleCloudLoggingQueueFullBehavior(str, Enum):
92
83
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
93
84
 
94
85
  BLOCK = "block"
95
86
  DROP = "drop"
96
87
 
97
88
 
98
- class OutputGoogleCloudLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
89
+ class OutputGoogleCloudLoggingMode(str, Enum):
99
90
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
100
91
 
101
92
  ERROR = "error"
@@ -242,8 +233,7 @@ class OutputGoogleCloudLogging(BaseModel):
242
233
  type: OutputGoogleCloudLoggingType
243
234
 
244
235
  log_location_type: Annotated[
245
- Annotated[LogLocationType, PlainValidator(validate_open_enum(False))],
246
- pydantic.Field(alias="logLocationType"),
236
+ LogLocationType, pydantic.Field(alias="logLocationType")
247
237
  ]
248
238
 
249
239
  log_name_expression: Annotated[str, pydantic.Field(alias="logNameExpression")]
@@ -272,8 +262,7 @@ class OutputGoogleCloudLogging(BaseModel):
272
262
  r"""Tags for filtering and grouping in @{product}"""
273
263
 
274
264
  payload_format: Annotated[
275
- Annotated[Optional[PayloadFormat], PlainValidator(validate_open_enum(False))],
276
- pydantic.Field(alias="payloadFormat"),
265
+ Optional[PayloadFormat], pydantic.Field(alias="payloadFormat")
277
266
  ] = PayloadFormat.TEXT
278
267
  r"""Format to use when sending payload. Defaults to Text."""
279
268
 
@@ -303,10 +292,7 @@ class OutputGoogleCloudLogging(BaseModel):
303
292
  r"""JavaScript expression to compute the value of the insert ID field."""
304
293
 
305
294
  google_auth_method: Annotated[
306
- Annotated[
307
- Optional[OutputGoogleCloudLoggingGoogleAuthenticationMethod],
308
- PlainValidator(validate_open_enum(False)),
309
- ],
295
+ Optional[OutputGoogleCloudLoggingGoogleAuthenticationMethod],
310
296
  pydantic.Field(alias="googleAuthMethod"),
311
297
  ] = OutputGoogleCloudLoggingGoogleAuthenticationMethod.MANUAL
312
298
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
@@ -489,10 +475,7 @@ class OutputGoogleCloudLogging(BaseModel):
489
475
  r"""A JavaScript expression that evaluates to the the sampling decision of the span associated with the log entry. See the [documentation](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) for details."""
490
476
 
491
477
  on_backpressure: Annotated[
492
- Annotated[
493
- Optional[OutputGoogleCloudLoggingBackpressureBehavior],
494
- PlainValidator(validate_open_enum(False)),
495
- ],
478
+ Optional[OutputGoogleCloudLoggingBackpressureBehavior],
496
479
  pydantic.Field(alias="onBackpressure"),
497
480
  ] = OutputGoogleCloudLoggingBackpressureBehavior.BLOCK
498
481
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -523,29 +506,19 @@ class OutputGoogleCloudLogging(BaseModel):
523
506
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
524
507
 
525
508
  pq_compress: Annotated[
526
- Annotated[
527
- Optional[OutputGoogleCloudLoggingCompression],
528
- PlainValidator(validate_open_enum(False)),
529
- ],
509
+ Optional[OutputGoogleCloudLoggingCompression],
530
510
  pydantic.Field(alias="pqCompress"),
531
511
  ] = OutputGoogleCloudLoggingCompression.NONE
532
512
  r"""Codec to use to compress the persisted data"""
533
513
 
534
514
  pq_on_backpressure: Annotated[
535
- Annotated[
536
- Optional[OutputGoogleCloudLoggingQueueFullBehavior],
537
- PlainValidator(validate_open_enum(False)),
538
- ],
515
+ Optional[OutputGoogleCloudLoggingQueueFullBehavior],
539
516
  pydantic.Field(alias="pqOnBackpressure"),
540
517
  ] = OutputGoogleCloudLoggingQueueFullBehavior.BLOCK
541
518
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
542
519
 
543
520
  pq_mode: Annotated[
544
- Annotated[
545
- Optional[OutputGoogleCloudLoggingMode],
546
- PlainValidator(validate_open_enum(False)),
547
- ],
548
- pydantic.Field(alias="pqMode"),
521
+ Optional[OutputGoogleCloudLoggingMode], pydantic.Field(alias="pqMode")
549
522
  ] = OutputGoogleCloudLoggingMode.ERROR
550
523
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
551
524
 
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -15,22 +12,20 @@ class OutputGoogleCloudStorageType(str, Enum):
15
12
  GOOGLE_CLOUD_STORAGE = "google_cloud_storage"
16
13
 
17
14
 
18
- class OutputGoogleCloudStorageSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputGoogleCloudStorageSignatureVersion(str, Enum):
19
16
  r"""Signature version to use for signing Google Cloud Storage requests"""
20
17
 
21
18
  V2 = "v2"
22
19
  V4 = "v4"
23
20
 
24
21
 
25
- class OutputGoogleCloudStorageAuthenticationMethod(
26
- str, Enum, metaclass=utils.OpenEnumMeta
27
- ):
22
+ class OutputGoogleCloudStorageAuthenticationMethod(str, Enum):
28
23
  AUTO = "auto"
29
24
  MANUAL = "manual"
30
25
  SECRET = "secret"
31
26
 
32
27
 
33
- class OutputGoogleCloudStorageObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
28
+ class OutputGoogleCloudStorageObjectACL(str, Enum):
34
29
  r"""Object ACL to assign to uploaded objects"""
35
30
 
36
31
  PRIVATE = "private"
@@ -41,7 +36,7 @@ class OutputGoogleCloudStorageObjectACL(str, Enum, metaclass=utils.OpenEnumMeta)
41
36
  PUBLIC_READ = "public-read"
42
37
 
43
38
 
44
- class OutputGoogleCloudStorageStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
39
+ class OutputGoogleCloudStorageStorageClass(str, Enum):
45
40
  r"""Storage class to select for uploaded objects"""
46
41
 
47
42
  STANDARD = "STANDARD"
@@ -50,7 +45,7 @@ class OutputGoogleCloudStorageStorageClass(str, Enum, metaclass=utils.OpenEnumMe
50
45
  ARCHIVE = "ARCHIVE"
51
46
 
52
47
 
53
- class OutputGoogleCloudStorageDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
48
+ class OutputGoogleCloudStorageDataFormat(str, Enum):
54
49
  r"""Format of the output data"""
55
50
 
56
51
  JSON = "json"
@@ -58,32 +53,28 @@ class OutputGoogleCloudStorageDataFormat(str, Enum, metaclass=utils.OpenEnumMeta
58
53
  PARQUET = "parquet"
59
54
 
60
55
 
61
- class OutputGoogleCloudStorageBackpressureBehavior(
62
- str, Enum, metaclass=utils.OpenEnumMeta
63
- ):
56
+ class OutputGoogleCloudStorageBackpressureBehavior(str, Enum):
64
57
  r"""How to handle events when all receivers are exerting backpressure"""
65
58
 
66
59
  BLOCK = "block"
67
60
  DROP = "drop"
68
61
 
69
62
 
70
- class OutputGoogleCloudStorageDiskSpaceProtection(
71
- str, Enum, metaclass=utils.OpenEnumMeta
72
- ):
63
+ class OutputGoogleCloudStorageDiskSpaceProtection(str, Enum):
73
64
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
74
65
 
75
66
  BLOCK = "block"
76
67
  DROP = "drop"
77
68
 
78
69
 
79
- class OutputGoogleCloudStorageCompression(str, Enum, metaclass=utils.OpenEnumMeta):
70
+ class OutputGoogleCloudStorageCompression(str, Enum):
80
71
  r"""Data compression format to apply to HTTP content before it is delivered"""
81
72
 
82
73
  NONE = "none"
83
74
  GZIP = "gzip"
84
75
 
85
76
 
86
- class OutputGoogleCloudStorageCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
77
+ class OutputGoogleCloudStorageCompressionLevel(str, Enum):
87
78
  r"""Compression level to apply before moving files to final destination"""
88
79
 
89
80
  BEST_SPEED = "best_speed"
@@ -91,7 +82,7 @@ class OutputGoogleCloudStorageCompressionLevel(str, Enum, metaclass=utils.OpenEn
91
82
  BEST_COMPRESSION = "best_compression"
92
83
 
93
84
 
94
- class OutputGoogleCloudStorageParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
85
+ class OutputGoogleCloudStorageParquetVersion(str, Enum):
95
86
  r"""Determines which data types are supported and how they are represented"""
96
87
 
97
88
  PARQUET_1_0 = "PARQUET_1_0"
@@ -99,7 +90,7 @@ class OutputGoogleCloudStorageParquetVersion(str, Enum, metaclass=utils.OpenEnum
99
90
  PARQUET_2_6 = "PARQUET_2_6"
100
91
 
101
92
 
102
- class OutputGoogleCloudStorageDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class OutputGoogleCloudStorageDataPageVersion(str, Enum):
103
94
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
104
95
 
105
96
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -253,19 +244,13 @@ class OutputGoogleCloudStorage(BaseModel):
253
244
  r"""Google Cloud Storage service endpoint"""
254
245
 
255
246
  signature_version: Annotated[
256
- Annotated[
257
- Optional[OutputGoogleCloudStorageSignatureVersion],
258
- PlainValidator(validate_open_enum(False)),
259
- ],
247
+ Optional[OutputGoogleCloudStorageSignatureVersion],
260
248
  pydantic.Field(alias="signatureVersion"),
261
249
  ] = OutputGoogleCloudStorageSignatureVersion.V4
262
250
  r"""Signature version to use for signing Google Cloud Storage requests"""
263
251
 
264
252
  aws_authentication_method: Annotated[
265
- Annotated[
266
- Optional[OutputGoogleCloudStorageAuthenticationMethod],
267
- PlainValidator(validate_open_enum(False)),
268
- ],
253
+ Optional[OutputGoogleCloudStorageAuthenticationMethod],
269
254
  pydantic.Field(alias="awsAuthenticationMethod"),
270
255
  ] = OutputGoogleCloudStorageAuthenticationMethod.MANUAL
271
256
 
@@ -283,19 +268,12 @@ class OutputGoogleCloudStorage(BaseModel):
283
268
  r"""Disable if you can access files within the bucket but not the bucket itself"""
284
269
 
285
270
  object_acl: Annotated[
286
- Annotated[
287
- Optional[OutputGoogleCloudStorageObjectACL],
288
- PlainValidator(validate_open_enum(False)),
289
- ],
290
- pydantic.Field(alias="objectACL"),
271
+ Optional[OutputGoogleCloudStorageObjectACL], pydantic.Field(alias="objectACL")
291
272
  ] = OutputGoogleCloudStorageObjectACL.PRIVATE
292
273
  r"""Object ACL to assign to uploaded objects"""
293
274
 
294
275
  storage_class: Annotated[
295
- Annotated[
296
- Optional[OutputGoogleCloudStorageStorageClass],
297
- PlainValidator(validate_open_enum(False)),
298
- ],
276
+ Optional[OutputGoogleCloudStorageStorageClass],
299
277
  pydantic.Field(alias="storageClass"),
300
278
  ] = None
301
279
  r"""Storage class to select for uploaded objects"""
@@ -326,11 +304,7 @@ class OutputGoogleCloudStorage(BaseModel):
326
304
  r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
327
305
 
328
306
  format_: Annotated[
329
- Annotated[
330
- Optional[OutputGoogleCloudStorageDataFormat],
331
- PlainValidator(validate_open_enum(False)),
332
- ],
333
- pydantic.Field(alias="format"),
307
+ Optional[OutputGoogleCloudStorageDataFormat], pydantic.Field(alias="format")
334
308
  ] = OutputGoogleCloudStorageDataFormat.JSON
335
309
  r"""Format of the output data"""
336
310
 
@@ -373,10 +347,7 @@ class OutputGoogleCloudStorage(BaseModel):
373
347
  r"""Buffer size used to write to a file"""
374
348
 
375
349
  on_backpressure: Annotated[
376
- Annotated[
377
- Optional[OutputGoogleCloudStorageBackpressureBehavior],
378
- PlainValidator(validate_open_enum(False)),
379
- ],
350
+ Optional[OutputGoogleCloudStorageBackpressureBehavior],
380
351
  pydantic.Field(alias="onBackpressure"),
381
352
  ] = OutputGoogleCloudStorageBackpressureBehavior.BLOCK
382
353
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -387,27 +358,20 @@ class OutputGoogleCloudStorage(BaseModel):
387
358
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
388
359
 
389
360
  on_disk_full_backpressure: Annotated[
390
- Annotated[
391
- Optional[OutputGoogleCloudStorageDiskSpaceProtection],
392
- PlainValidator(validate_open_enum(False)),
393
- ],
361
+ Optional[OutputGoogleCloudStorageDiskSpaceProtection],
394
362
  pydantic.Field(alias="onDiskFullBackpressure"),
395
363
  ] = OutputGoogleCloudStorageDiskSpaceProtection.BLOCK
396
364
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
397
365
 
398
366
  description: Optional[str] = None
399
367
 
400
- compress: Annotated[
401
- Optional[OutputGoogleCloudStorageCompression],
402
- PlainValidator(validate_open_enum(False)),
403
- ] = OutputGoogleCloudStorageCompression.GZIP
368
+ compress: Optional[OutputGoogleCloudStorageCompression] = (
369
+ OutputGoogleCloudStorageCompression.GZIP
370
+ )
404
371
  r"""Data compression format to apply to HTTP content before it is delivered"""
405
372
 
406
373
  compression_level: Annotated[
407
- Annotated[
408
- Optional[OutputGoogleCloudStorageCompressionLevel],
409
- PlainValidator(validate_open_enum(False)),
410
- ],
374
+ Optional[OutputGoogleCloudStorageCompressionLevel],
411
375
  pydantic.Field(alias="compressionLevel"),
412
376
  ] = OutputGoogleCloudStorageCompressionLevel.BEST_SPEED
413
377
  r"""Compression level to apply before moving files to final destination"""
@@ -418,19 +382,13 @@ class OutputGoogleCloudStorage(BaseModel):
418
382
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
419
383
 
420
384
  parquet_version: Annotated[
421
- Annotated[
422
- Optional[OutputGoogleCloudStorageParquetVersion],
423
- PlainValidator(validate_open_enum(False)),
424
- ],
385
+ Optional[OutputGoogleCloudStorageParquetVersion],
425
386
  pydantic.Field(alias="parquetVersion"),
426
387
  ] = OutputGoogleCloudStorageParquetVersion.PARQUET_2_6
427
388
  r"""Determines which data types are supported and how they are represented"""
428
389
 
429
390
  parquet_data_page_version: Annotated[
430
- Annotated[
431
- Optional[OutputGoogleCloudStorageDataPageVersion],
432
- PlainValidator(validate_open_enum(False)),
433
- ],
391
+ Optional[OutputGoogleCloudStorageDataPageVersion],
434
392
  pydantic.Field(alias="parquetDataPageVersion"),
435
393
  ] = OutputGoogleCloudStorageDataPageVersion.DATA_PAGE_V2
436
394
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
@@ -1,12 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
@@ -15,9 +12,7 @@ class OutputGooglePubsubType(str, Enum):
15
12
  GOOGLE_PUBSUB = "google_pubsub"
16
13
 
17
14
 
18
- class OutputGooglePubsubGoogleAuthenticationMethod(
19
- str, Enum, metaclass=utils.OpenEnumMeta
20
- ):
15
+ class OutputGooglePubsubGoogleAuthenticationMethod(str, Enum):
21
16
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
22
17
 
23
18
  AUTO = "auto"
@@ -25,7 +20,7 @@ class OutputGooglePubsubGoogleAuthenticationMethod(
25
20
  SECRET = "secret"
26
21
 
27
22
 
28
- class OutputGooglePubsubBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
23
+ class OutputGooglePubsubBackpressureBehavior(str, Enum):
29
24
  r"""How to handle events when all receivers are exerting backpressure"""
30
25
 
31
26
  BLOCK = "block"
@@ -33,21 +28,21 @@ class OutputGooglePubsubBackpressureBehavior(str, Enum, metaclass=utils.OpenEnum
33
28
  QUEUE = "queue"
34
29
 
35
30
 
36
- class OutputGooglePubsubCompression(str, Enum, metaclass=utils.OpenEnumMeta):
31
+ class OutputGooglePubsubCompression(str, Enum):
37
32
  r"""Codec to use to compress the persisted data"""
38
33
 
39
34
  NONE = "none"
40
35
  GZIP = "gzip"
41
36
 
42
37
 
43
- class OutputGooglePubsubQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
38
+ class OutputGooglePubsubQueueFullBehavior(str, Enum):
44
39
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
45
40
 
46
41
  BLOCK = "block"
47
42
  DROP = "drop"
48
43
 
49
44
 
50
- class OutputGooglePubsubMode(str, Enum, metaclass=utils.OpenEnumMeta):
45
+ class OutputGooglePubsubMode(str, Enum):
51
46
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
52
47
 
53
48
  ERROR = "error"
@@ -154,10 +149,7 @@ class OutputGooglePubsub(BaseModel):
154
149
  r"""Region to publish messages to. Select 'default' to allow Google to auto-select the nearest region. When using ordered delivery, the selected region must be allowed by message storage policy."""
155
150
 
156
151
  google_auth_method: Annotated[
157
- Annotated[
158
- Optional[OutputGooglePubsubGoogleAuthenticationMethod],
159
- PlainValidator(validate_open_enum(False)),
160
- ],
152
+ Optional[OutputGooglePubsubGoogleAuthenticationMethod],
161
153
  pydantic.Field(alias="googleAuthMethod"),
162
154
  ] = OutputGooglePubsubGoogleAuthenticationMethod.MANUAL
163
155
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
@@ -197,10 +189,7 @@ class OutputGooglePubsub(BaseModel):
197
189
  r"""The maximum number of in-progress API requests before backpressure is applied."""
198
190
 
199
191
  on_backpressure: Annotated[
200
- Annotated[
201
- Optional[OutputGooglePubsubBackpressureBehavior],
202
- PlainValidator(validate_open_enum(False)),
203
- ],
192
+ Optional[OutputGooglePubsubBackpressureBehavior],
204
193
  pydantic.Field(alias="onBackpressure"),
205
194
  ] = OutputGooglePubsubBackpressureBehavior.BLOCK
206
195
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -221,28 +210,18 @@ class OutputGooglePubsub(BaseModel):
221
210
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
222
211
 
223
212
  pq_compress: Annotated[
224
- Annotated[
225
- Optional[OutputGooglePubsubCompression],
226
- PlainValidator(validate_open_enum(False)),
227
- ],
228
- pydantic.Field(alias="pqCompress"),
213
+ Optional[OutputGooglePubsubCompression], pydantic.Field(alias="pqCompress")
229
214
  ] = OutputGooglePubsubCompression.NONE
230
215
  r"""Codec to use to compress the persisted data"""
231
216
 
232
217
  pq_on_backpressure: Annotated[
233
- Annotated[
234
- Optional[OutputGooglePubsubQueueFullBehavior],
235
- PlainValidator(validate_open_enum(False)),
236
- ],
218
+ Optional[OutputGooglePubsubQueueFullBehavior],
237
219
  pydantic.Field(alias="pqOnBackpressure"),
238
220
  ] = OutputGooglePubsubQueueFullBehavior.BLOCK
239
221
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
240
222
 
241
223
  pq_mode: Annotated[
242
- Annotated[
243
- Optional[OutputGooglePubsubMode], PlainValidator(validate_open_enum(False))
244
- ],
245
- pydantic.Field(alias="pqMode"),
224
+ Optional[OutputGooglePubsubMode], pydantic.Field(alias="pqMode")
246
225
  ] = OutputGooglePubsubMode.ERROR
247
226
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
248
227