cribl-control-plane 0.0.16__py3-none-any.whl → 0.0.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (156) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/models/__init__.py +4365 -4124
  4. cribl_control_plane/models/createinputop.py +1734 -2771
  5. cribl_control_plane/models/createoutputop.py +2153 -4314
  6. cribl_control_plane/models/createversioncommitop.py +24 -0
  7. cribl_control_plane/models/createversionpushop.py +23 -0
  8. cribl_control_plane/models/createversionrevertop.py +47 -0
  9. cribl_control_plane/models/createversionsyncop.py +23 -0
  10. cribl_control_plane/models/createversionundoop.py +37 -0
  11. cribl_control_plane/models/getversionbranchop.py +23 -0
  12. cribl_control_plane/models/getversioncountop.py +47 -0
  13. cribl_control_plane/models/getversioncurrentbranchop.py +23 -0
  14. cribl_control_plane/models/getversiondiffop.py +63 -0
  15. cribl_control_plane/models/getversionfilesop.py +48 -0
  16. cribl_control_plane/models/getversioninfoop.py +24 -0
  17. cribl_control_plane/models/getversionshowop.py +63 -0
  18. cribl_control_plane/models/getversionstatusop.py +38 -0
  19. cribl_control_plane/models/gitcommitparams.py +23 -0
  20. cribl_control_plane/models/gitcommitsummary.py +68 -0
  21. cribl_control_plane/models/gitfile.py +20 -0
  22. cribl_control_plane/models/gitfilesresponse.py +22 -0
  23. cribl_control_plane/models/gitinfo.py +23 -0
  24. cribl_control_plane/models/gitrevertparams.py +20 -0
  25. cribl_control_plane/models/gitrevertresult.py +48 -0
  26. cribl_control_plane/models/gitstatusresult.py +73 -0
  27. cribl_control_plane/models/healthstatus.py +4 -7
  28. cribl_control_plane/models/inputappscope.py +16 -36
  29. cribl_control_plane/models/inputazureblob.py +8 -19
  30. cribl_control_plane/models/inputcollection.py +6 -15
  31. cribl_control_plane/models/inputconfluentcloud.py +20 -45
  32. cribl_control_plane/models/inputcribl.py +6 -13
  33. cribl_control_plane/models/inputcriblhttp.py +10 -27
  34. cribl_control_plane/models/inputcribllakehttp.py +12 -26
  35. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  36. cribl_control_plane/models/inputcribltcp.py +10 -27
  37. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  38. cribl_control_plane/models/inputdatadogagent.py +10 -28
  39. cribl_control_plane/models/inputdatagen.py +6 -13
  40. cribl_control_plane/models/inputedgeprometheus.py +31 -64
  41. cribl_control_plane/models/inputelastic.py +16 -44
  42. cribl_control_plane/models/inputeventhub.py +8 -19
  43. cribl_control_plane/models/inputexec.py +8 -16
  44. cribl_control_plane/models/inputfile.py +8 -17
  45. cribl_control_plane/models/inputfirehose.py +10 -27
  46. cribl_control_plane/models/inputgooglepubsub.py +8 -23
  47. cribl_control_plane/models/inputgrafana_union.py +35 -81
  48. cribl_control_plane/models/inputhttp.py +10 -27
  49. cribl_control_plane/models/inputhttpraw.py +10 -27
  50. cribl_control_plane/models/inputjournalfiles.py +6 -16
  51. cribl_control_plane/models/inputkafka.py +16 -45
  52. cribl_control_plane/models/inputkinesis.py +16 -42
  53. cribl_control_plane/models/inputkubeevents.py +6 -13
  54. cribl_control_plane/models/inputkubelogs.py +10 -18
  55. cribl_control_plane/models/inputkubemetrics.py +10 -18
  56. cribl_control_plane/models/inputloki.py +12 -33
  57. cribl_control_plane/models/inputmetrics.py +10 -25
  58. cribl_control_plane/models/inputmodeldriventelemetry.py +12 -32
  59. cribl_control_plane/models/inputmsk.py +18 -52
  60. cribl_control_plane/models/inputnetflow.py +6 -15
  61. cribl_control_plane/models/inputoffice365mgmt.py +16 -37
  62. cribl_control_plane/models/inputoffice365msgtrace.py +18 -39
  63. cribl_control_plane/models/inputoffice365service.py +18 -39
  64. cribl_control_plane/models/inputopentelemetry.py +18 -42
  65. cribl_control_plane/models/inputprometheus.py +20 -54
  66. cribl_control_plane/models/inputprometheusrw.py +12 -34
  67. cribl_control_plane/models/inputrawudp.py +6 -15
  68. cribl_control_plane/models/inputs3.py +10 -23
  69. cribl_control_plane/models/inputs3inventory.py +12 -28
  70. cribl_control_plane/models/inputsecuritylake.py +12 -29
  71. cribl_control_plane/models/inputsnmp.py +8 -20
  72. cribl_control_plane/models/inputsplunk.py +14 -37
  73. cribl_control_plane/models/inputsplunkhec.py +12 -33
  74. cribl_control_plane/models/inputsplunksearch.py +16 -37
  75. cribl_control_plane/models/inputsqs.py +12 -31
  76. cribl_control_plane/models/inputsyslog_union.py +29 -53
  77. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  78. cribl_control_plane/models/inputsystemstate.py +10 -18
  79. cribl_control_plane/models/inputtcp.py +12 -33
  80. cribl_control_plane/models/inputtcpjson.py +12 -33
  81. cribl_control_plane/models/inputwef.py +20 -45
  82. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  83. cribl_control_plane/models/inputwineventlogs.py +12 -22
  84. cribl_control_plane/models/inputwiz.py +10 -25
  85. cribl_control_plane/models/inputzscalerhec.py +12 -33
  86. cribl_control_plane/models/output.py +3 -6
  87. cribl_control_plane/models/outputazureblob.py +20 -52
  88. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  89. cribl_control_plane/models/outputazureeventhub.py +20 -44
  90. cribl_control_plane/models/outputazurelogs.py +14 -37
  91. cribl_control_plane/models/outputclickhouse.py +22 -59
  92. cribl_control_plane/models/outputcloudwatch.py +12 -33
  93. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  94. cribl_control_plane/models/outputcriblhttp.py +18 -46
  95. cribl_control_plane/models/outputcribllake.py +18 -48
  96. cribl_control_plane/models/outputcribltcp.py +20 -47
  97. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  98. cribl_control_plane/models/outputdatadog.py +22 -50
  99. cribl_control_plane/models/outputdataset.py +20 -48
  100. cribl_control_plane/models/outputdefault.py +2 -5
  101. cribl_control_plane/models/outputdevnull.py +2 -5
  102. cribl_control_plane/models/outputdiskspool.py +4 -9
  103. cribl_control_plane/models/outputdls3.py +26 -72
  104. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  105. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  106. cribl_control_plane/models/outputelastic.py +20 -45
  107. cribl_control_plane/models/outputelasticcloud.py +14 -40
  108. cribl_control_plane/models/outputexabeam.py +12 -33
  109. cribl_control_plane/models/outputfilesystem.py +16 -41
  110. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  111. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  112. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  113. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  114. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  115. cribl_control_plane/models/outputgraphite.py +16 -35
  116. cribl_control_plane/models/outputhoneycomb.py +14 -37
  117. cribl_control_plane/models/outputhumiohec.py +18 -47
  118. cribl_control_plane/models/outputinfluxdb.py +18 -44
  119. cribl_control_plane/models/outputkafka.py +28 -73
  120. cribl_control_plane/models/outputkinesis.py +18 -44
  121. cribl_control_plane/models/outputloki.py +18 -43
  122. cribl_control_plane/models/outputminio.py +26 -69
  123. cribl_control_plane/models/outputmsk.py +30 -81
  124. cribl_control_plane/models/outputnetflow.py +2 -5
  125. cribl_control_plane/models/outputnewrelic.py +20 -45
  126. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  127. cribl_control_plane/models/outputopentelemetry.py +28 -69
  128. cribl_control_plane/models/outputprometheus.py +14 -37
  129. cribl_control_plane/models/outputring.py +10 -21
  130. cribl_control_plane/models/outputrouter.py +2 -5
  131. cribl_control_plane/models/outputs3.py +28 -72
  132. cribl_control_plane/models/outputsecuritylake.py +20 -56
  133. cribl_control_plane/models/outputsentinel.py +20 -49
  134. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  135. cribl_control_plane/models/outputservicenow.py +26 -64
  136. cribl_control_plane/models/outputsignalfx.py +16 -39
  137. cribl_control_plane/models/outputsnmp.py +2 -5
  138. cribl_control_plane/models/outputsns.py +16 -40
  139. cribl_control_plane/models/outputsplunk.py +26 -64
  140. cribl_control_plane/models/outputsplunkhec.py +14 -37
  141. cribl_control_plane/models/outputsplunklb.py +36 -83
  142. cribl_control_plane/models/outputsqs.py +18 -45
  143. cribl_control_plane/models/outputstatsd.py +16 -34
  144. cribl_control_plane/models/outputstatsdext.py +14 -33
  145. cribl_control_plane/models/outputsumologic.py +14 -37
  146. cribl_control_plane/models/outputsyslog.py +26 -60
  147. cribl_control_plane/models/outputtcpjson.py +22 -54
  148. cribl_control_plane/models/outputwavefront.py +14 -37
  149. cribl_control_plane/models/outputwebhook.py +24 -60
  150. cribl_control_plane/models/outputxsiam.py +16 -37
  151. cribl_control_plane/sdk.py +4 -0
  152. cribl_control_plane/versioning.py +2309 -0
  153. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.18.dist-info}/METADATA +18 -2
  154. cribl_control_plane-0.0.18.dist-info/RECORD +237 -0
  155. cribl_control_plane-0.0.16.dist-info/RECORD +0 -215
  156. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.18.dist-info}/WHEEL +0 -0
@@ -1,28 +1,25 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputGoogleCloudLoggingType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputGoogleCloudLoggingType(str, Enum):
15
12
  GOOGLE_CLOUD_LOGGING = "google_cloud_logging"
16
13
 
17
14
 
18
- class LogLocationType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class LogLocationType(str, Enum):
19
16
  PROJECT = "project"
20
17
  ORGANIZATION = "organization"
21
18
  BILLING_ACCOUNT = "billingAccount"
22
19
  FOLDER = "folder"
23
20
 
24
21
 
25
- class PayloadFormat(str, Enum, metaclass=utils.OpenEnumMeta):
22
+ class PayloadFormat(str, Enum):
26
23
  r"""Format to use when sending payload. Defaults to Text."""
27
24
 
28
25
  TEXT = "text"
@@ -59,9 +56,7 @@ class ResourceTypeLabel(BaseModel):
59
56
  r"""JavaScript expression to compute the label's value."""
60
57
 
61
58
 
62
- class OutputGoogleCloudLoggingGoogleAuthenticationMethod(
63
- str, Enum, metaclass=utils.OpenEnumMeta
64
- ):
59
+ class OutputGoogleCloudLoggingGoogleAuthenticationMethod(str, Enum):
65
60
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
66
61
 
67
62
  AUTO = "auto"
@@ -69,9 +64,7 @@ class OutputGoogleCloudLoggingGoogleAuthenticationMethod(
69
64
  SECRET = "secret"
70
65
 
71
66
 
72
- class OutputGoogleCloudLoggingBackpressureBehavior(
73
- str, Enum, metaclass=utils.OpenEnumMeta
74
- ):
67
+ class OutputGoogleCloudLoggingBackpressureBehavior(str, Enum):
75
68
  r"""How to handle events when all receivers are exerting backpressure"""
76
69
 
77
70
  BLOCK = "block"
@@ -79,23 +72,21 @@ class OutputGoogleCloudLoggingBackpressureBehavior(
79
72
  QUEUE = "queue"
80
73
 
81
74
 
82
- class OutputGoogleCloudLoggingCompression(str, Enum, metaclass=utils.OpenEnumMeta):
75
+ class OutputGoogleCloudLoggingCompression(str, Enum):
83
76
  r"""Codec to use to compress the persisted data"""
84
77
 
85
78
  NONE = "none"
86
79
  GZIP = "gzip"
87
80
 
88
81
 
89
- class OutputGoogleCloudLoggingQueueFullBehavior(
90
- str, Enum, metaclass=utils.OpenEnumMeta
91
- ):
82
+ class OutputGoogleCloudLoggingQueueFullBehavior(str, Enum):
92
83
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
93
84
 
94
85
  BLOCK = "block"
95
86
  DROP = "drop"
96
87
 
97
88
 
98
- class OutputGoogleCloudLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
89
+ class OutputGoogleCloudLoggingMode(str, Enum):
99
90
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
100
91
 
101
92
  ERROR = "error"
@@ -240,8 +231,7 @@ class OutputGoogleCloudLoggingTypedDict(TypedDict):
240
231
 
241
232
  class OutputGoogleCloudLogging(BaseModel):
242
233
  log_location_type: Annotated[
243
- Annotated[LogLocationType, PlainValidator(validate_open_enum(False))],
244
- pydantic.Field(alias="logLocationType"),
234
+ LogLocationType, pydantic.Field(alias="logLocationType")
245
235
  ]
246
236
 
247
237
  log_name_expression: Annotated[str, pydantic.Field(alias="logNameExpression")]
@@ -255,10 +245,7 @@ class OutputGoogleCloudLogging(BaseModel):
255
245
  id: Optional[str] = None
256
246
  r"""Unique ID for this output"""
257
247
 
258
- type: Annotated[
259
- Optional[OutputGoogleCloudLoggingType],
260
- PlainValidator(validate_open_enum(False)),
261
- ] = None
248
+ type: Optional[OutputGoogleCloudLoggingType] = None
262
249
 
263
250
  pipeline: Optional[str] = None
264
251
  r"""Pipeline to process data before sending out to this output"""
@@ -275,8 +262,7 @@ class OutputGoogleCloudLogging(BaseModel):
275
262
  r"""Tags for filtering and grouping in @{product}"""
276
263
 
277
264
  payload_format: Annotated[
278
- Annotated[Optional[PayloadFormat], PlainValidator(validate_open_enum(False))],
279
- pydantic.Field(alias="payloadFormat"),
265
+ Optional[PayloadFormat], pydantic.Field(alias="payloadFormat")
280
266
  ] = PayloadFormat.TEXT
281
267
  r"""Format to use when sending payload. Defaults to Text."""
282
268
 
@@ -306,10 +292,7 @@ class OutputGoogleCloudLogging(BaseModel):
306
292
  r"""JavaScript expression to compute the value of the insert ID field."""
307
293
 
308
294
  google_auth_method: Annotated[
309
- Annotated[
310
- Optional[OutputGoogleCloudLoggingGoogleAuthenticationMethod],
311
- PlainValidator(validate_open_enum(False)),
312
- ],
295
+ Optional[OutputGoogleCloudLoggingGoogleAuthenticationMethod],
313
296
  pydantic.Field(alias="googleAuthMethod"),
314
297
  ] = OutputGoogleCloudLoggingGoogleAuthenticationMethod.MANUAL
315
298
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
@@ -492,10 +475,7 @@ class OutputGoogleCloudLogging(BaseModel):
492
475
  r"""A JavaScript expression that evaluates to the the sampling decision of the span associated with the log entry. See the [documentation](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) for details."""
493
476
 
494
477
  on_backpressure: Annotated[
495
- Annotated[
496
- Optional[OutputGoogleCloudLoggingBackpressureBehavior],
497
- PlainValidator(validate_open_enum(False)),
498
- ],
478
+ Optional[OutputGoogleCloudLoggingBackpressureBehavior],
499
479
  pydantic.Field(alias="onBackpressure"),
500
480
  ] = OutputGoogleCloudLoggingBackpressureBehavior.BLOCK
501
481
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -526,29 +506,19 @@ class OutputGoogleCloudLogging(BaseModel):
526
506
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
527
507
 
528
508
  pq_compress: Annotated[
529
- Annotated[
530
- Optional[OutputGoogleCloudLoggingCompression],
531
- PlainValidator(validate_open_enum(False)),
532
- ],
509
+ Optional[OutputGoogleCloudLoggingCompression],
533
510
  pydantic.Field(alias="pqCompress"),
534
511
  ] = OutputGoogleCloudLoggingCompression.NONE
535
512
  r"""Codec to use to compress the persisted data"""
536
513
 
537
514
  pq_on_backpressure: Annotated[
538
- Annotated[
539
- Optional[OutputGoogleCloudLoggingQueueFullBehavior],
540
- PlainValidator(validate_open_enum(False)),
541
- ],
515
+ Optional[OutputGoogleCloudLoggingQueueFullBehavior],
542
516
  pydantic.Field(alias="pqOnBackpressure"),
543
517
  ] = OutputGoogleCloudLoggingQueueFullBehavior.BLOCK
544
518
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
545
519
 
546
520
  pq_mode: Annotated[
547
- Annotated[
548
- Optional[OutputGoogleCloudLoggingMode],
549
- PlainValidator(validate_open_enum(False)),
550
- ],
551
- pydantic.Field(alias="pqMode"),
521
+ Optional[OutputGoogleCloudLoggingMode], pydantic.Field(alias="pqMode")
552
522
  ] = OutputGoogleCloudLoggingMode.ERROR
553
523
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
554
524
 
@@ -1,36 +1,31 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputGoogleCloudStorageType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputGoogleCloudStorageType(str, Enum):
15
12
  GOOGLE_CLOUD_STORAGE = "google_cloud_storage"
16
13
 
17
14
 
18
- class OutputGoogleCloudStorageSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputGoogleCloudStorageSignatureVersion(str, Enum):
19
16
  r"""Signature version to use for signing Google Cloud Storage requests"""
20
17
 
21
18
  V2 = "v2"
22
19
  V4 = "v4"
23
20
 
24
21
 
25
- class OutputGoogleCloudStorageAuthenticationMethod(
26
- str, Enum, metaclass=utils.OpenEnumMeta
27
- ):
22
+ class OutputGoogleCloudStorageAuthenticationMethod(str, Enum):
28
23
  AUTO = "auto"
29
24
  MANUAL = "manual"
30
25
  SECRET = "secret"
31
26
 
32
27
 
33
- class OutputGoogleCloudStorageObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
28
+ class OutputGoogleCloudStorageObjectACL(str, Enum):
34
29
  r"""Object ACL to assign to uploaded objects"""
35
30
 
36
31
  PRIVATE = "private"
@@ -41,7 +36,7 @@ class OutputGoogleCloudStorageObjectACL(str, Enum, metaclass=utils.OpenEnumMeta)
41
36
  PUBLIC_READ = "public-read"
42
37
 
43
38
 
44
- class OutputGoogleCloudStorageStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
39
+ class OutputGoogleCloudStorageStorageClass(str, Enum):
45
40
  r"""Storage class to select for uploaded objects"""
46
41
 
47
42
  STANDARD = "STANDARD"
@@ -50,7 +45,7 @@ class OutputGoogleCloudStorageStorageClass(str, Enum, metaclass=utils.OpenEnumMe
50
45
  ARCHIVE = "ARCHIVE"
51
46
 
52
47
 
53
- class OutputGoogleCloudStorageDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
48
+ class OutputGoogleCloudStorageDataFormat(str, Enum):
54
49
  r"""Format of the output data"""
55
50
 
56
51
  JSON = "json"
@@ -58,32 +53,28 @@ class OutputGoogleCloudStorageDataFormat(str, Enum, metaclass=utils.OpenEnumMeta
58
53
  PARQUET = "parquet"
59
54
 
60
55
 
61
- class OutputGoogleCloudStorageBackpressureBehavior(
62
- str, Enum, metaclass=utils.OpenEnumMeta
63
- ):
56
+ class OutputGoogleCloudStorageBackpressureBehavior(str, Enum):
64
57
  r"""How to handle events when all receivers are exerting backpressure"""
65
58
 
66
59
  BLOCK = "block"
67
60
  DROP = "drop"
68
61
 
69
62
 
70
- class OutputGoogleCloudStorageDiskSpaceProtection(
71
- str, Enum, metaclass=utils.OpenEnumMeta
72
- ):
63
+ class OutputGoogleCloudStorageDiskSpaceProtection(str, Enum):
73
64
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
74
65
 
75
66
  BLOCK = "block"
76
67
  DROP = "drop"
77
68
 
78
69
 
79
- class OutputGoogleCloudStorageCompression(str, Enum, metaclass=utils.OpenEnumMeta):
70
+ class OutputGoogleCloudStorageCompression(str, Enum):
80
71
  r"""Data compression format to apply to HTTP content before it is delivered"""
81
72
 
82
73
  NONE = "none"
83
74
  GZIP = "gzip"
84
75
 
85
76
 
86
- class OutputGoogleCloudStorageCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
77
+ class OutputGoogleCloudStorageCompressionLevel(str, Enum):
87
78
  r"""Compression level to apply before moving files to final destination"""
88
79
 
89
80
  BEST_SPEED = "best_speed"
@@ -91,7 +82,7 @@ class OutputGoogleCloudStorageCompressionLevel(str, Enum, metaclass=utils.OpenEn
91
82
  BEST_COMPRESSION = "best_compression"
92
83
 
93
84
 
94
- class OutputGoogleCloudStorageParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
85
+ class OutputGoogleCloudStorageParquetVersion(str, Enum):
95
86
  r"""Determines which data types are supported and how they are represented"""
96
87
 
97
88
  PARQUET_1_0 = "PARQUET_1_0"
@@ -99,7 +90,7 @@ class OutputGoogleCloudStorageParquetVersion(str, Enum, metaclass=utils.OpenEnum
99
90
  PARQUET_2_6 = "PARQUET_2_6"
100
91
 
101
92
 
102
- class OutputGoogleCloudStorageDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class OutputGoogleCloudStorageDataPageVersion(str, Enum):
103
94
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
104
95
 
105
96
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -233,10 +224,7 @@ class OutputGoogleCloudStorage(BaseModel):
233
224
  id: Optional[str] = None
234
225
  r"""Unique ID for this output"""
235
226
 
236
- type: Annotated[
237
- Optional[OutputGoogleCloudStorageType],
238
- PlainValidator(validate_open_enum(False)),
239
- ] = None
227
+ type: Optional[OutputGoogleCloudStorageType] = None
240
228
 
241
229
  pipeline: Optional[str] = None
242
230
  r"""Pipeline to process data before sending out to this output"""
@@ -256,19 +244,13 @@ class OutputGoogleCloudStorage(BaseModel):
256
244
  r"""Google Cloud Storage service endpoint"""
257
245
 
258
246
  signature_version: Annotated[
259
- Annotated[
260
- Optional[OutputGoogleCloudStorageSignatureVersion],
261
- PlainValidator(validate_open_enum(False)),
262
- ],
247
+ Optional[OutputGoogleCloudStorageSignatureVersion],
263
248
  pydantic.Field(alias="signatureVersion"),
264
249
  ] = OutputGoogleCloudStorageSignatureVersion.V4
265
250
  r"""Signature version to use for signing Google Cloud Storage requests"""
266
251
 
267
252
  aws_authentication_method: Annotated[
268
- Annotated[
269
- Optional[OutputGoogleCloudStorageAuthenticationMethod],
270
- PlainValidator(validate_open_enum(False)),
271
- ],
253
+ Optional[OutputGoogleCloudStorageAuthenticationMethod],
272
254
  pydantic.Field(alias="awsAuthenticationMethod"),
273
255
  ] = OutputGoogleCloudStorageAuthenticationMethod.MANUAL
274
256
 
@@ -286,19 +268,12 @@ class OutputGoogleCloudStorage(BaseModel):
286
268
  r"""Disable if you can access files within the bucket but not the bucket itself"""
287
269
 
288
270
  object_acl: Annotated[
289
- Annotated[
290
- Optional[OutputGoogleCloudStorageObjectACL],
291
- PlainValidator(validate_open_enum(False)),
292
- ],
293
- pydantic.Field(alias="objectACL"),
271
+ Optional[OutputGoogleCloudStorageObjectACL], pydantic.Field(alias="objectACL")
294
272
  ] = OutputGoogleCloudStorageObjectACL.PRIVATE
295
273
  r"""Object ACL to assign to uploaded objects"""
296
274
 
297
275
  storage_class: Annotated[
298
- Annotated[
299
- Optional[OutputGoogleCloudStorageStorageClass],
300
- PlainValidator(validate_open_enum(False)),
301
- ],
276
+ Optional[OutputGoogleCloudStorageStorageClass],
302
277
  pydantic.Field(alias="storageClass"),
303
278
  ] = None
304
279
  r"""Storage class to select for uploaded objects"""
@@ -329,11 +304,7 @@ class OutputGoogleCloudStorage(BaseModel):
329
304
  r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
330
305
 
331
306
  format_: Annotated[
332
- Annotated[
333
- Optional[OutputGoogleCloudStorageDataFormat],
334
- PlainValidator(validate_open_enum(False)),
335
- ],
336
- pydantic.Field(alias="format"),
307
+ Optional[OutputGoogleCloudStorageDataFormat], pydantic.Field(alias="format")
337
308
  ] = OutputGoogleCloudStorageDataFormat.JSON
338
309
  r"""Format of the output data"""
339
310
 
@@ -376,10 +347,7 @@ class OutputGoogleCloudStorage(BaseModel):
376
347
  r"""Buffer size used to write to a file"""
377
348
 
378
349
  on_backpressure: Annotated[
379
- Annotated[
380
- Optional[OutputGoogleCloudStorageBackpressureBehavior],
381
- PlainValidator(validate_open_enum(False)),
382
- ],
350
+ Optional[OutputGoogleCloudStorageBackpressureBehavior],
383
351
  pydantic.Field(alias="onBackpressure"),
384
352
  ] = OutputGoogleCloudStorageBackpressureBehavior.BLOCK
385
353
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -390,27 +358,20 @@ class OutputGoogleCloudStorage(BaseModel):
390
358
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
391
359
 
392
360
  on_disk_full_backpressure: Annotated[
393
- Annotated[
394
- Optional[OutputGoogleCloudStorageDiskSpaceProtection],
395
- PlainValidator(validate_open_enum(False)),
396
- ],
361
+ Optional[OutputGoogleCloudStorageDiskSpaceProtection],
397
362
  pydantic.Field(alias="onDiskFullBackpressure"),
398
363
  ] = OutputGoogleCloudStorageDiskSpaceProtection.BLOCK
399
364
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
400
365
 
401
366
  description: Optional[str] = None
402
367
 
403
- compress: Annotated[
404
- Optional[OutputGoogleCloudStorageCompression],
405
- PlainValidator(validate_open_enum(False)),
406
- ] = OutputGoogleCloudStorageCompression.GZIP
368
+ compress: Optional[OutputGoogleCloudStorageCompression] = (
369
+ OutputGoogleCloudStorageCompression.GZIP
370
+ )
407
371
  r"""Data compression format to apply to HTTP content before it is delivered"""
408
372
 
409
373
  compression_level: Annotated[
410
- Annotated[
411
- Optional[OutputGoogleCloudStorageCompressionLevel],
412
- PlainValidator(validate_open_enum(False)),
413
- ],
374
+ Optional[OutputGoogleCloudStorageCompressionLevel],
414
375
  pydantic.Field(alias="compressionLevel"),
415
376
  ] = OutputGoogleCloudStorageCompressionLevel.BEST_SPEED
416
377
  r"""Compression level to apply before moving files to final destination"""
@@ -421,19 +382,13 @@ class OutputGoogleCloudStorage(BaseModel):
421
382
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
422
383
 
423
384
  parquet_version: Annotated[
424
- Annotated[
425
- Optional[OutputGoogleCloudStorageParquetVersion],
426
- PlainValidator(validate_open_enum(False)),
427
- ],
385
+ Optional[OutputGoogleCloudStorageParquetVersion],
428
386
  pydantic.Field(alias="parquetVersion"),
429
387
  ] = OutputGoogleCloudStorageParquetVersion.PARQUET_2_6
430
388
  r"""Determines which data types are supported and how they are represented"""
431
389
 
432
390
  parquet_data_page_version: Annotated[
433
- Annotated[
434
- Optional[OutputGoogleCloudStorageDataPageVersion],
435
- PlainValidator(validate_open_enum(False)),
436
- ],
391
+ Optional[OutputGoogleCloudStorageDataPageVersion],
437
392
  pydantic.Field(alias="parquetDataPageVersion"),
438
393
  ] = OutputGoogleCloudStorageDataPageVersion.DATA_PAGE_V2
439
394
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
@@ -1,23 +1,18 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputGooglePubsubType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputGooglePubsubTypeGooglePubsub(str, Enum):
15
12
  GOOGLE_PUBSUB = "google_pubsub"
16
13
 
17
14
 
18
- class OutputGooglePubsubGoogleAuthenticationMethod(
19
- str, Enum, metaclass=utils.OpenEnumMeta
20
- ):
15
+ class OutputGooglePubsubGoogleAuthenticationMethod(str, Enum):
21
16
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
22
17
 
23
18
  AUTO = "auto"
@@ -25,28 +20,26 @@ class OutputGooglePubsubGoogleAuthenticationMethod(
25
20
  SECRET = "secret"
26
21
 
27
22
 
28
- class FlushPeriodSecType(str, Enum, metaclass=utils.OpenEnumMeta):
23
+ class OutputGooglePubsubFlushPeriodSecType(str, Enum):
29
24
  NUMBER = "number"
30
25
 
31
26
 
32
27
  class FlushPeriodSecTypedDict(TypedDict):
33
28
  r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
34
29
 
35
- type: NotRequired[FlushPeriodSecType]
30
+ type: NotRequired[OutputGooglePubsubFlushPeriodSecType]
36
31
  default: NotRequired[float]
37
32
 
38
33
 
39
34
  class FlushPeriodSec(BaseModel):
40
35
  r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
41
36
 
42
- type: Annotated[
43
- Optional[FlushPeriodSecType], PlainValidator(validate_open_enum(False))
44
- ] = None
37
+ type: Optional[OutputGooglePubsubFlushPeriodSecType] = None
45
38
 
46
39
  default: Optional[float] = None
47
40
 
48
41
 
49
- class OutputGooglePubsubBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
42
+ class OutputGooglePubsubBackpressureBehavior(str, Enum):
50
43
  r"""How to handle events when all receivers are exerting backpressure"""
51
44
 
52
45
  BLOCK = "block"
@@ -54,21 +47,21 @@ class OutputGooglePubsubBackpressureBehavior(str, Enum, metaclass=utils.OpenEnum
54
47
  QUEUE = "queue"
55
48
 
56
49
 
57
- class OutputGooglePubsubCompression(str, Enum, metaclass=utils.OpenEnumMeta):
50
+ class OutputGooglePubsubCompression(str, Enum):
58
51
  r"""Codec to use to compress the persisted data"""
59
52
 
60
53
  NONE = "none"
61
54
  GZIP = "gzip"
62
55
 
63
56
 
64
- class OutputGooglePubsubQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
57
+ class OutputGooglePubsubQueueFullBehavior(str, Enum):
65
58
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
66
59
 
67
60
  BLOCK = "block"
68
61
  DROP = "drop"
69
62
 
70
63
 
71
- class OutputGooglePubsubMode(str, Enum, metaclass=utils.OpenEnumMeta):
64
+ class OutputGooglePubsubMode(str, Enum):
72
65
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
73
66
 
74
67
  ERROR = "error"
@@ -85,7 +78,7 @@ class OutputGooglePubsubPqControls(BaseModel):
85
78
 
86
79
 
87
80
  class OutputGooglePubsubTypedDict(TypedDict):
88
- type: OutputGooglePubsubType
81
+ type: OutputGooglePubsubTypeGooglePubsub
89
82
  topic_name: str
90
83
  r"""ID of the topic to send events to."""
91
84
  id: NotRequired[str]
@@ -141,7 +134,7 @@ class OutputGooglePubsubTypedDict(TypedDict):
141
134
 
142
135
 
143
136
  class OutputGooglePubsub(BaseModel):
144
- type: Annotated[OutputGooglePubsubType, PlainValidator(validate_open_enum(False))]
137
+ type: OutputGooglePubsubTypeGooglePubsub
145
138
 
146
139
  topic_name: Annotated[str, pydantic.Field(alias="topicName")]
147
140
  r"""ID of the topic to send events to."""
@@ -175,10 +168,7 @@ class OutputGooglePubsub(BaseModel):
175
168
  r"""Region to publish messages to. Select 'default' to allow Google to auto-select the nearest region. When using ordered delivery, the selected region must be allowed by message storage policy."""
176
169
 
177
170
  google_auth_method: Annotated[
178
- Annotated[
179
- Optional[OutputGooglePubsubGoogleAuthenticationMethod],
180
- PlainValidator(validate_open_enum(False)),
181
- ],
171
+ Optional[OutputGooglePubsubGoogleAuthenticationMethod],
182
172
  pydantic.Field(alias="googleAuthMethod"),
183
173
  ] = OutputGooglePubsubGoogleAuthenticationMethod.MANUAL
184
174
  r"""Choose Auto to use Google Application Default Credentials (ADC), Manual to enter Google service account credentials directly, or Secret to select or create a stored secret that references Google service account credentials."""
@@ -220,10 +210,7 @@ class OutputGooglePubsub(BaseModel):
220
210
  r"""The maximum number of in-progress API requests before backpressure is applied."""
221
211
 
222
212
  on_backpressure: Annotated[
223
- Annotated[
224
- Optional[OutputGooglePubsubBackpressureBehavior],
225
- PlainValidator(validate_open_enum(False)),
226
- ],
213
+ Optional[OutputGooglePubsubBackpressureBehavior],
227
214
  pydantic.Field(alias="onBackpressure"),
228
215
  ] = OutputGooglePubsubBackpressureBehavior.BLOCK
229
216
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -244,28 +231,18 @@ class OutputGooglePubsub(BaseModel):
244
231
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
245
232
 
246
233
  pq_compress: Annotated[
247
- Annotated[
248
- Optional[OutputGooglePubsubCompression],
249
- PlainValidator(validate_open_enum(False)),
250
- ],
251
- pydantic.Field(alias="pqCompress"),
234
+ Optional[OutputGooglePubsubCompression], pydantic.Field(alias="pqCompress")
252
235
  ] = OutputGooglePubsubCompression.NONE
253
236
  r"""Codec to use to compress the persisted data"""
254
237
 
255
238
  pq_on_backpressure: Annotated[
256
- Annotated[
257
- Optional[OutputGooglePubsubQueueFullBehavior],
258
- PlainValidator(validate_open_enum(False)),
259
- ],
239
+ Optional[OutputGooglePubsubQueueFullBehavior],
260
240
  pydantic.Field(alias="pqOnBackpressure"),
261
241
  ] = OutputGooglePubsubQueueFullBehavior.BLOCK
262
242
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
263
243
 
264
244
  pq_mode: Annotated[
265
- Annotated[
266
- Optional[OutputGooglePubsubMode], PlainValidator(validate_open_enum(False))
267
- ],
268
- pydantic.Field(alias="pqMode"),
245
+ Optional[OutputGooglePubsubMode], pydantic.Field(alias="pqMode")
269
246
  ] = OutputGooglePubsubMode.ERROR
270
247
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
271
248