cribl-control-plane 0.0.16__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (133) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/errors/healthstatus_error.py +2 -8
  3. cribl_control_plane/models/__init__.py +4124 -4124
  4. cribl_control_plane/models/createinputop.py +1734 -2771
  5. cribl_control_plane/models/createoutputop.py +2153 -4314
  6. cribl_control_plane/models/healthstatus.py +4 -7
  7. cribl_control_plane/models/inputappscope.py +16 -36
  8. cribl_control_plane/models/inputazureblob.py +8 -19
  9. cribl_control_plane/models/inputcollection.py +6 -15
  10. cribl_control_plane/models/inputconfluentcloud.py +20 -45
  11. cribl_control_plane/models/inputcribl.py +6 -13
  12. cribl_control_plane/models/inputcriblhttp.py +10 -27
  13. cribl_control_plane/models/inputcribllakehttp.py +12 -26
  14. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  15. cribl_control_plane/models/inputcribltcp.py +10 -27
  16. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  17. cribl_control_plane/models/inputdatadogagent.py +10 -28
  18. cribl_control_plane/models/inputdatagen.py +6 -13
  19. cribl_control_plane/models/inputedgeprometheus.py +31 -64
  20. cribl_control_plane/models/inputelastic.py +16 -44
  21. cribl_control_plane/models/inputeventhub.py +8 -19
  22. cribl_control_plane/models/inputexec.py +8 -16
  23. cribl_control_plane/models/inputfile.py +8 -17
  24. cribl_control_plane/models/inputfirehose.py +10 -27
  25. cribl_control_plane/models/inputgooglepubsub.py +8 -23
  26. cribl_control_plane/models/inputgrafana_union.py +35 -81
  27. cribl_control_plane/models/inputhttp.py +10 -27
  28. cribl_control_plane/models/inputhttpraw.py +10 -27
  29. cribl_control_plane/models/inputjournalfiles.py +6 -16
  30. cribl_control_plane/models/inputkafka.py +16 -45
  31. cribl_control_plane/models/inputkinesis.py +16 -42
  32. cribl_control_plane/models/inputkubeevents.py +6 -13
  33. cribl_control_plane/models/inputkubelogs.py +10 -18
  34. cribl_control_plane/models/inputkubemetrics.py +10 -18
  35. cribl_control_plane/models/inputloki.py +12 -33
  36. cribl_control_plane/models/inputmetrics.py +10 -25
  37. cribl_control_plane/models/inputmodeldriventelemetry.py +12 -32
  38. cribl_control_plane/models/inputmsk.py +18 -52
  39. cribl_control_plane/models/inputnetflow.py +6 -15
  40. cribl_control_plane/models/inputoffice365mgmt.py +16 -37
  41. cribl_control_plane/models/inputoffice365msgtrace.py +18 -39
  42. cribl_control_plane/models/inputoffice365service.py +18 -39
  43. cribl_control_plane/models/inputopentelemetry.py +18 -42
  44. cribl_control_plane/models/inputprometheus.py +20 -54
  45. cribl_control_plane/models/inputprometheusrw.py +12 -34
  46. cribl_control_plane/models/inputrawudp.py +6 -15
  47. cribl_control_plane/models/inputs3.py +10 -23
  48. cribl_control_plane/models/inputs3inventory.py +12 -28
  49. cribl_control_plane/models/inputsecuritylake.py +12 -29
  50. cribl_control_plane/models/inputsnmp.py +8 -20
  51. cribl_control_plane/models/inputsplunk.py +14 -37
  52. cribl_control_plane/models/inputsplunkhec.py +12 -33
  53. cribl_control_plane/models/inputsplunksearch.py +16 -37
  54. cribl_control_plane/models/inputsqs.py +12 -31
  55. cribl_control_plane/models/inputsyslog_union.py +29 -53
  56. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  57. cribl_control_plane/models/inputsystemstate.py +10 -18
  58. cribl_control_plane/models/inputtcp.py +12 -33
  59. cribl_control_plane/models/inputtcpjson.py +12 -33
  60. cribl_control_plane/models/inputwef.py +20 -45
  61. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  62. cribl_control_plane/models/inputwineventlogs.py +12 -22
  63. cribl_control_plane/models/inputwiz.py +10 -25
  64. cribl_control_plane/models/inputzscalerhec.py +12 -33
  65. cribl_control_plane/models/output.py +3 -6
  66. cribl_control_plane/models/outputazureblob.py +20 -52
  67. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  68. cribl_control_plane/models/outputazureeventhub.py +20 -44
  69. cribl_control_plane/models/outputazurelogs.py +14 -37
  70. cribl_control_plane/models/outputclickhouse.py +22 -59
  71. cribl_control_plane/models/outputcloudwatch.py +12 -33
  72. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  73. cribl_control_plane/models/outputcriblhttp.py +18 -46
  74. cribl_control_plane/models/outputcribllake.py +18 -48
  75. cribl_control_plane/models/outputcribltcp.py +20 -47
  76. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  77. cribl_control_plane/models/outputdatadog.py +22 -50
  78. cribl_control_plane/models/outputdataset.py +20 -48
  79. cribl_control_plane/models/outputdefault.py +2 -5
  80. cribl_control_plane/models/outputdevnull.py +2 -5
  81. cribl_control_plane/models/outputdiskspool.py +4 -9
  82. cribl_control_plane/models/outputdls3.py +26 -72
  83. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  84. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  85. cribl_control_plane/models/outputelastic.py +20 -45
  86. cribl_control_plane/models/outputelasticcloud.py +14 -40
  87. cribl_control_plane/models/outputexabeam.py +12 -33
  88. cribl_control_plane/models/outputfilesystem.py +16 -41
  89. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  90. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  91. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  92. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  93. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  94. cribl_control_plane/models/outputgraphite.py +16 -35
  95. cribl_control_plane/models/outputhoneycomb.py +14 -37
  96. cribl_control_plane/models/outputhumiohec.py +18 -47
  97. cribl_control_plane/models/outputinfluxdb.py +18 -44
  98. cribl_control_plane/models/outputkafka.py +28 -73
  99. cribl_control_plane/models/outputkinesis.py +18 -44
  100. cribl_control_plane/models/outputloki.py +18 -43
  101. cribl_control_plane/models/outputminio.py +26 -69
  102. cribl_control_plane/models/outputmsk.py +30 -81
  103. cribl_control_plane/models/outputnetflow.py +2 -5
  104. cribl_control_plane/models/outputnewrelic.py +20 -45
  105. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  106. cribl_control_plane/models/outputopentelemetry.py +28 -69
  107. cribl_control_plane/models/outputprometheus.py +14 -37
  108. cribl_control_plane/models/outputring.py +10 -21
  109. cribl_control_plane/models/outputrouter.py +2 -5
  110. cribl_control_plane/models/outputs3.py +28 -72
  111. cribl_control_plane/models/outputsecuritylake.py +20 -56
  112. cribl_control_plane/models/outputsentinel.py +20 -49
  113. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  114. cribl_control_plane/models/outputservicenow.py +26 -64
  115. cribl_control_plane/models/outputsignalfx.py +16 -39
  116. cribl_control_plane/models/outputsnmp.py +2 -5
  117. cribl_control_plane/models/outputsns.py +16 -40
  118. cribl_control_plane/models/outputsplunk.py +26 -64
  119. cribl_control_plane/models/outputsplunkhec.py +14 -37
  120. cribl_control_plane/models/outputsplunklb.py +36 -83
  121. cribl_control_plane/models/outputsqs.py +18 -45
  122. cribl_control_plane/models/outputstatsd.py +16 -34
  123. cribl_control_plane/models/outputstatsdext.py +14 -33
  124. cribl_control_plane/models/outputsumologic.py +14 -37
  125. cribl_control_plane/models/outputsyslog.py +26 -60
  126. cribl_control_plane/models/outputtcpjson.py +22 -54
  127. cribl_control_plane/models/outputwavefront.py +14 -37
  128. cribl_control_plane/models/outputwebhook.py +24 -60
  129. cribl_control_plane/models/outputxsiam.py +16 -37
  130. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +1 -1
  131. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  132. cribl_control_plane-0.0.16.dist-info/RECORD +0 -215
  133. {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputWinEventLogsType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputWinEventLogsType(str, Enum):
15
12
  WIN_EVENT_LOGS = "win_event_logs"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputWinEventLogsConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputWinEventLogsMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputWinEventLogsMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputWinEventLogsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputWinEventLogsCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputWinEventLogsPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputWinEventLogsPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputWinEventLogsMode], PlainValidator(validate_open_enum(False))
63
- ] = InputWinEventLogsMode.ALWAYS
58
+ mode: Optional[InputWinEventLogsMode] = InputWinEventLogsMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,21 +79,18 @@ class InputWinEventLogsPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputWinEventLogsCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputWinEventLogsCompression.NONE
82
+ compress: Optional[InputWinEventLogsCompression] = InputWinEventLogsCompression.NONE
91
83
  r"""Codec to use to compress the persisted data"""
92
84
 
93
85
 
94
- class ReadMode(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class ReadMode(str, Enum):
95
87
  r"""Read all stored and future event logs, or only future events"""
96
88
 
97
89
  OLDEST = "oldest"
98
90
  NEWEST = "newest"
99
91
 
100
92
 
101
- class EventFormat(str, Enum, metaclass=utils.OpenEnumMeta):
93
+ class EventFormat(str, Enum):
102
94
  r"""Format of individual events"""
103
95
 
104
96
  JSON = "json"
@@ -156,7 +148,7 @@ class InputWinEventLogsTypedDict(TypedDict):
156
148
 
157
149
 
158
150
  class InputWinEventLogs(BaseModel):
159
- type: Annotated[InputWinEventLogsType, PlainValidator(validate_open_enum(False))]
151
+ type: InputWinEventLogsType
160
152
 
161
153
  log_names: Annotated[List[str], pydantic.Field(alias="logNames")]
162
154
  r"""Enter the event logs to collect. Run \"Get-WinEvent -ListLog *\" in PowerShell to see the available logs."""
@@ -188,15 +180,13 @@ class InputWinEventLogs(BaseModel):
188
180
 
189
181
  pq: Optional[InputWinEventLogsPq] = None
190
182
 
191
- read_mode: Annotated[
192
- Annotated[Optional[ReadMode], PlainValidator(validate_open_enum(False))],
193
- pydantic.Field(alias="readMode"),
194
- ] = ReadMode.OLDEST
183
+ read_mode: Annotated[Optional[ReadMode], pydantic.Field(alias="readMode")] = (
184
+ ReadMode.OLDEST
185
+ )
195
186
  r"""Read all stored and future event logs, or only future events"""
196
187
 
197
188
  event_format: Annotated[
198
- Annotated[Optional[EventFormat], PlainValidator(validate_open_enum(False))],
199
- pydantic.Field(alias="eventFormat"),
189
+ Optional[EventFormat], pydantic.Field(alias="eventFormat")
200
190
  ] = EventFormat.JSON
201
191
  r"""Format of individual events"""
202
192
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputWizType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputWizType(str, Enum):
15
12
  WIZ = "wiz"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputWizConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputWizMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputWizMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputWizCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputWizCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputWizPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputWizPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputWizMode], PlainValidator(validate_open_enum(False))
63
- ] = InputWizMode.ALWAYS
58
+ mode: Optional[InputWizMode] = InputWizMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputWizPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputWizCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputWizCompression.NONE
82
+ compress: Optional[InputWizCompression] = InputWizCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -121,7 +114,7 @@ class InputWizMetadatum(BaseModel):
121
114
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
122
115
 
123
116
 
124
- class InputWizRetryType(str, Enum, metaclass=utils.OpenEnumMeta):
117
+ class InputWizRetryType(str, Enum):
125
118
  r"""The algorithm to use when performing HTTP retries"""
126
119
 
127
120
  NONE = "none"
@@ -149,9 +142,7 @@ class InputWizRetryRulesTypedDict(TypedDict):
149
142
 
150
143
 
151
144
  class InputWizRetryRules(BaseModel):
152
- type: Annotated[
153
- Optional[InputWizRetryType], PlainValidator(validate_open_enum(False))
154
- ] = InputWizRetryType.BACKOFF
145
+ type: Optional[InputWizRetryType] = InputWizRetryType.BACKOFF
155
146
  r"""The algorithm to use when performing HTTP retries"""
156
147
 
157
148
  interval: Optional[float] = 1000
@@ -182,7 +173,7 @@ class InputWizRetryRules(BaseModel):
182
173
  r"""Retry request when a connection reset (ECONNRESET) error occurs"""
183
174
 
184
175
 
185
- class InputWizAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
176
+ class InputWizAuthenticationMethod(str, Enum):
186
177
  r"""Enter client secret directly, or select a stored secret"""
187
178
 
188
179
  MANUAL = "manual"
@@ -252,9 +243,7 @@ class InputWiz(BaseModel):
252
243
  id: Optional[str] = None
253
244
  r"""Unique ID for this input"""
254
245
 
255
- type: Annotated[
256
- Optional[InputWizType], PlainValidator(validate_open_enum(False))
257
- ] = None
246
+ type: Optional[InputWizType] = None
258
247
 
259
248
  disabled: Optional[bool] = False
260
249
 
@@ -319,11 +308,7 @@ class InputWiz(BaseModel):
319
308
  ] = None
320
309
 
321
310
  auth_type: Annotated[
322
- Annotated[
323
- Optional[InputWizAuthenticationMethod],
324
- PlainValidator(validate_open_enum(False)),
325
- ],
326
- pydantic.Field(alias="authType"),
311
+ Optional[InputWizAuthenticationMethod], pydantic.Field(alias="authType")
327
312
  ] = InputWizAuthenticationMethod.MANUAL
328
313
  r"""Enter client secret directly, or select a stored secret"""
329
314
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputZscalerHecType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputZscalerHecType(str, Enum):
15
12
  ZSCALER_HEC = "zscaler_hec"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputZscalerHecConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputZscalerHecMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputZscalerHecMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputZscalerHecCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputZscalerHecCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputZscalerHecPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputZscalerHecPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputZscalerHecMode], PlainValidator(validate_open_enum(False))
63
- ] = InputZscalerHecMode.ALWAYS
58
+ mode: Optional[InputZscalerHecMode] = InputZscalerHecMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,13 +79,11 @@ class InputZscalerHecPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputZscalerHecCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputZscalerHecCompression.NONE
82
+ compress: Optional[InputZscalerHecCompression] = InputZscalerHecCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
93
- class InputZscalerHecAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
86
+ class InputZscalerHecAuthenticationMethod(str, Enum):
94
87
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
95
88
 
96
89
  MANUAL = "manual"
@@ -127,11 +120,7 @@ class InputZscalerHecAuthToken(BaseModel):
127
120
  token: Any
128
121
 
129
122
  auth_type: Annotated[
130
- Annotated[
131
- Optional[InputZscalerHecAuthenticationMethod],
132
- PlainValidator(validate_open_enum(False)),
133
- ],
134
- pydantic.Field(alias="authType"),
123
+ Optional[InputZscalerHecAuthenticationMethod], pydantic.Field(alias="authType")
135
124
  ] = InputZscalerHecAuthenticationMethod.MANUAL
136
125
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
137
126
 
@@ -150,14 +139,14 @@ class InputZscalerHecAuthToken(BaseModel):
150
139
  r"""Fields to add to events referencing this token"""
151
140
 
152
141
 
153
- class InputZscalerHecMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
142
+ class InputZscalerHecMinimumTLSVersion(str, Enum):
154
143
  TL_SV1 = "TLSv1"
155
144
  TL_SV1_1 = "TLSv1.1"
156
145
  TL_SV1_2 = "TLSv1.2"
157
146
  TL_SV1_3 = "TLSv1.3"
158
147
 
159
148
 
160
- class InputZscalerHecMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
149
+ class InputZscalerHecMaximumTLSVersion(str, Enum):
161
150
  TL_SV1 = "TLSv1"
162
151
  TL_SV1_1 = "TLSv1.1"
163
152
  TL_SV1_2 = "TLSv1.2"
@@ -216,19 +205,11 @@ class InputZscalerHecTLSSettingsServerSide(BaseModel):
216
205
  ] = None
217
206
 
218
207
  min_version: Annotated[
219
- Annotated[
220
- Optional[InputZscalerHecMinimumTLSVersion],
221
- PlainValidator(validate_open_enum(False)),
222
- ],
223
- pydantic.Field(alias="minVersion"),
208
+ Optional[InputZscalerHecMinimumTLSVersion], pydantic.Field(alias="minVersion")
224
209
  ] = None
225
210
 
226
211
  max_version: Annotated[
227
- Annotated[
228
- Optional[InputZscalerHecMaximumTLSVersion],
229
- PlainValidator(validate_open_enum(False)),
230
- ],
231
- pydantic.Field(alias="maxVersion"),
212
+ Optional[InputZscalerHecMaximumTLSVersion], pydantic.Field(alias="maxVersion")
232
213
  ] = None
233
214
 
234
215
 
@@ -315,9 +296,7 @@ class InputZscalerHec(BaseModel):
315
296
  id: Optional[str] = None
316
297
  r"""Unique ID for this input"""
317
298
 
318
- type: Annotated[
319
- Optional[InputZscalerHecType], PlainValidator(validate_open_enum(False))
320
- ] = None
299
+ type: Optional[InputZscalerHecType] = None
321
300
 
322
301
  disabled: Optional[bool] = False
323
302
 
@@ -40,10 +40,7 @@ from .outputgooglecloudstorage import (
40
40
  OutputGoogleCloudStorageTypedDict,
41
41
  )
42
42
  from .outputgooglepubsub import OutputGooglePubsub, OutputGooglePubsubTypedDict
43
- from .outputgrafanacloud_union import (
44
- OutputGrafanaCloudUnion,
45
- OutputGrafanaCloudUnionTypedDict,
46
- )
43
+ from .outputgrafanacloud import OutputGrafanaCloud, OutputGrafanaCloudTypedDict
47
44
  from .outputgraphite import OutputGraphite, OutputGraphiteTypedDict
48
45
  from .outputhoneycomb import OutputHoneycomb, OutputHoneycombTypedDict
49
46
  from .outputhumiohec import OutputHumioHec, OutputHumioHecTypedDict
@@ -153,7 +150,7 @@ OutputTypedDict = TypeAliasType(
153
150
  OutputAzureDataExplorerTypedDict,
154
151
  OutputWebhookTypedDict,
155
152
  OutputGoogleCloudLoggingTypedDict,
156
- OutputGrafanaCloudUnionTypedDict,
153
+ OutputGrafanaCloudTypedDict,
157
154
  ],
158
155
  )
159
156
 
@@ -224,6 +221,6 @@ Output = TypeAliasType(
224
221
  OutputAzureDataExplorer,
225
222
  OutputWebhook,
226
223
  OutputGoogleCloudLogging,
227
- OutputGrafanaCloudUnion,
224
+ OutputGrafanaCloud,
228
225
  ],
229
226
  )
@@ -1,21 +1,18 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class OutputAzureBlobType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class OutputAzureBlobType(str, Enum):
15
12
  AZURE_BLOB = "azure_blob"
16
13
 
17
14
 
18
- class OutputAzureBlobDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ class OutputAzureBlobDataFormat(str, Enum):
19
16
  r"""Format of the output data"""
20
17
 
21
18
  JSON = "json"
@@ -23,28 +20,28 @@ class OutputAzureBlobDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
23
20
  PARQUET = "parquet"
24
21
 
25
22
 
26
- class OutputAzureBlobBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
23
+ class OutputAzureBlobBackpressureBehavior(str, Enum):
27
24
  r"""How to handle events when all receivers are exerting backpressure"""
28
25
 
29
26
  BLOCK = "block"
30
27
  DROP = "drop"
31
28
 
32
29
 
33
- class OutputAzureBlobDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ class OutputAzureBlobDiskSpaceProtection(str, Enum):
34
31
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
35
32
 
36
33
  BLOCK = "block"
37
34
  DROP = "drop"
38
35
 
39
36
 
40
- class OutputAzureBlobAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ class OutputAzureBlobAuthenticationMethod(str, Enum):
41
38
  MANUAL = "manual"
42
39
  SECRET = "secret"
43
40
  CLIENT_SECRET = "clientSecret"
44
41
  CLIENT_CERT = "clientCert"
45
42
 
46
43
 
47
- class BlobAccessTier(str, Enum, metaclass=utils.OpenEnumMeta):
44
+ class BlobAccessTier(str, Enum):
48
45
  INFERRED = "Inferred"
49
46
  HOT = "Hot"
50
47
  COOL = "Cool"
@@ -52,14 +49,14 @@ class BlobAccessTier(str, Enum, metaclass=utils.OpenEnumMeta):
52
49
  ARCHIVE = "Archive"
53
50
 
54
51
 
55
- class OutputAzureBlobCompression(str, Enum, metaclass=utils.OpenEnumMeta):
52
+ class OutputAzureBlobCompression(str, Enum):
56
53
  r"""Data compression format to apply to HTTP content before it is delivered"""
57
54
 
58
55
  NONE = "none"
59
56
  GZIP = "gzip"
60
57
 
61
58
 
62
- class OutputAzureBlobCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
59
+ class OutputAzureBlobCompressionLevel(str, Enum):
63
60
  r"""Compression level to apply before moving files to final destination"""
64
61
 
65
62
  BEST_SPEED = "best_speed"
@@ -67,7 +64,7 @@ class OutputAzureBlobCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
67
64
  BEST_COMPRESSION = "best_compression"
68
65
 
69
66
 
70
- class OutputAzureBlobParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
67
+ class OutputAzureBlobParquetVersion(str, Enum):
71
68
  r"""Determines which data types are supported and how they are represented"""
72
69
 
73
70
  PARQUET_1_0 = "PARQUET_1_0"
@@ -75,7 +72,7 @@ class OutputAzureBlobParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
75
72
  PARQUET_2_6 = "PARQUET_2_6"
76
73
 
77
74
 
78
- class OutputAzureBlobDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
75
+ class OutputAzureBlobDataPageVersion(str, Enum):
79
76
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
80
77
 
81
78
  DATA_PAGE_V1 = "DATA_PAGE_V1"
@@ -214,9 +211,7 @@ class OutputAzureBlob(BaseModel):
214
211
  id: Optional[str] = None
215
212
  r"""Unique ID for this output"""
216
213
 
217
- type: Annotated[
218
- Optional[OutputAzureBlobType], PlainValidator(validate_open_enum(False))
219
- ] = None
214
+ type: Optional[OutputAzureBlobType] = None
220
215
 
221
216
  pipeline: Optional[str] = None
222
217
  r"""Pipeline to process data before sending out to this output"""
@@ -266,11 +261,7 @@ class OutputAzureBlob(BaseModel):
266
261
  r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
267
262
 
268
263
  format_: Annotated[
269
- Annotated[
270
- Optional[OutputAzureBlobDataFormat],
271
- PlainValidator(validate_open_enum(False)),
272
- ],
273
- pydantic.Field(alias="format"),
264
+ Optional[OutputAzureBlobDataFormat], pydantic.Field(alias="format")
274
265
  ] = OutputAzureBlobDataFormat.JSON
275
266
  r"""Format of the output data"""
276
267
 
@@ -313,10 +304,7 @@ class OutputAzureBlob(BaseModel):
313
304
  r"""Buffer size used to write to a file"""
314
305
 
315
306
  on_backpressure: Annotated[
316
- Annotated[
317
- Optional[OutputAzureBlobBackpressureBehavior],
318
- PlainValidator(validate_open_enum(False)),
319
- ],
307
+ Optional[OutputAzureBlobBackpressureBehavior],
320
308
  pydantic.Field(alias="onBackpressure"),
321
309
  ] = OutputAzureBlobBackpressureBehavior.BLOCK
322
310
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -327,39 +315,26 @@ class OutputAzureBlob(BaseModel):
327
315
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
328
316
 
329
317
  on_disk_full_backpressure: Annotated[
330
- Annotated[
331
- Optional[OutputAzureBlobDiskSpaceProtection],
332
- PlainValidator(validate_open_enum(False)),
333
- ],
318
+ Optional[OutputAzureBlobDiskSpaceProtection],
334
319
  pydantic.Field(alias="onDiskFullBackpressure"),
335
320
  ] = OutputAzureBlobDiskSpaceProtection.BLOCK
336
321
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
337
322
 
338
323
  auth_type: Annotated[
339
- Annotated[
340
- Optional[OutputAzureBlobAuthenticationMethod],
341
- PlainValidator(validate_open_enum(False)),
342
- ],
343
- pydantic.Field(alias="authType"),
324
+ Optional[OutputAzureBlobAuthenticationMethod], pydantic.Field(alias="authType")
344
325
  ] = OutputAzureBlobAuthenticationMethod.MANUAL
345
326
 
346
327
  storage_class: Annotated[
347
- Annotated[Optional[BlobAccessTier], PlainValidator(validate_open_enum(False))],
348
- pydantic.Field(alias="storageClass"),
328
+ Optional[BlobAccessTier], pydantic.Field(alias="storageClass")
349
329
  ] = BlobAccessTier.INFERRED
350
330
 
351
331
  description: Optional[str] = None
352
332
 
353
- compress: Annotated[
354
- Optional[OutputAzureBlobCompression], PlainValidator(validate_open_enum(False))
355
- ] = OutputAzureBlobCompression.GZIP
333
+ compress: Optional[OutputAzureBlobCompression] = OutputAzureBlobCompression.GZIP
356
334
  r"""Data compression format to apply to HTTP content before it is delivered"""
357
335
 
358
336
  compression_level: Annotated[
359
- Annotated[
360
- Optional[OutputAzureBlobCompressionLevel],
361
- PlainValidator(validate_open_enum(False)),
362
- ],
337
+ Optional[OutputAzureBlobCompressionLevel],
363
338
  pydantic.Field(alias="compressionLevel"),
364
339
  ] = OutputAzureBlobCompressionLevel.BEST_SPEED
365
340
  r"""Compression level to apply before moving files to final destination"""
@@ -370,19 +345,12 @@ class OutputAzureBlob(BaseModel):
370
345
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
371
346
 
372
347
  parquet_version: Annotated[
373
- Annotated[
374
- Optional[OutputAzureBlobParquetVersion],
375
- PlainValidator(validate_open_enum(False)),
376
- ],
377
- pydantic.Field(alias="parquetVersion"),
348
+ Optional[OutputAzureBlobParquetVersion], pydantic.Field(alias="parquetVersion")
378
349
  ] = OutputAzureBlobParquetVersion.PARQUET_2_6
379
350
  r"""Determines which data types are supported and how they are represented"""
380
351
 
381
352
  parquet_data_page_version: Annotated[
382
- Annotated[
383
- Optional[OutputAzureBlobDataPageVersion],
384
- PlainValidator(validate_open_enum(False)),
385
- ],
353
+ Optional[OutputAzureBlobDataPageVersion],
386
354
  pydantic.Field(alias="parquetDataPageVersion"),
387
355
  ] = OutputAzureBlobDataPageVersion.DATA_PAGE_V2
388
356
  r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""