cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (179) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/errors/__init__.py +5 -8
  3. cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
  4. cribl_control_plane/groups_sdk.py +28 -52
  5. cribl_control_plane/health.py +16 -22
  6. cribl_control_plane/models/__init__.py +54 -217
  7. cribl_control_plane/models/appmode.py +14 -0
  8. cribl_control_plane/models/authtoken.py +1 -5
  9. cribl_control_plane/models/cacheconnection.py +0 -20
  10. cribl_control_plane/models/configgroup.py +7 -55
  11. cribl_control_plane/models/configgroupcloud.py +1 -11
  12. cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
  13. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  14. cribl_control_plane/models/createversionundoop.py +3 -3
  15. cribl_control_plane/models/cribllakedataset.py +1 -11
  16. cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
  17. cribl_control_plane/models/datasetmetadata.py +1 -11
  18. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
  19. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  20. cribl_control_plane/models/distributedsummary.py +0 -6
  21. cribl_control_plane/models/error.py +16 -0
  22. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
  23. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
  24. cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
  25. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
  26. cribl_control_plane/models/gethealthinfoop.py +17 -0
  27. cribl_control_plane/models/getsummaryop.py +0 -11
  28. cribl_control_plane/models/hbcriblinfo.py +3 -24
  29. cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
  30. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  31. cribl_control_plane/models/input.py +78 -80
  32. cribl_control_plane/models/inputappscope.py +17 -80
  33. cribl_control_plane/models/inputazureblob.py +1 -33
  34. cribl_control_plane/models/inputcollection.py +1 -24
  35. cribl_control_plane/models/inputconfluentcloud.py +18 -195
  36. cribl_control_plane/models/inputcribl.py +1 -24
  37. cribl_control_plane/models/inputcriblhttp.py +17 -62
  38. cribl_control_plane/models/inputcribllakehttp.py +17 -62
  39. cribl_control_plane/models/inputcriblmetrics.py +1 -24
  40. cribl_control_plane/models/inputcribltcp.py +17 -62
  41. cribl_control_plane/models/inputcrowdstrike.py +1 -54
  42. cribl_control_plane/models/inputdatadogagent.py +17 -62
  43. cribl_control_plane/models/inputdatagen.py +1 -24
  44. cribl_control_plane/models/inputedgeprometheus.py +34 -147
  45. cribl_control_plane/models/inputelastic.py +27 -119
  46. cribl_control_plane/models/inputeventhub.py +1 -182
  47. cribl_control_plane/models/inputexec.py +1 -33
  48. cribl_control_plane/models/inputfile.py +3 -42
  49. cribl_control_plane/models/inputfirehose.py +17 -62
  50. cribl_control_plane/models/inputgooglepubsub.py +1 -36
  51. cribl_control_plane/models/inputgrafana.py +32 -157
  52. cribl_control_plane/models/inputhttp.py +17 -62
  53. cribl_control_plane/models/inputhttpraw.py +17 -62
  54. cribl_control_plane/models/inputjournalfiles.py +1 -24
  55. cribl_control_plane/models/inputkafka.py +17 -189
  56. cribl_control_plane/models/inputkinesis.py +1 -80
  57. cribl_control_plane/models/inputkubeevents.py +1 -24
  58. cribl_control_plane/models/inputkubelogs.py +1 -33
  59. cribl_control_plane/models/inputkubemetrics.py +1 -33
  60. cribl_control_plane/models/inputloki.py +17 -71
  61. cribl_control_plane/models/inputmetrics.py +17 -62
  62. cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
  63. cribl_control_plane/models/inputmsk.py +18 -81
  64. cribl_control_plane/models/inputnetflow.py +1 -24
  65. cribl_control_plane/models/inputoffice365mgmt.py +1 -67
  66. cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
  67. cribl_control_plane/models/inputoffice365service.py +1 -67
  68. cribl_control_plane/models/inputopentelemetry.py +16 -92
  69. cribl_control_plane/models/inputprometheus.py +34 -138
  70. cribl_control_plane/models/inputprometheusrw.py +17 -71
  71. cribl_control_plane/models/inputrawudp.py +1 -24
  72. cribl_control_plane/models/inputs3.py +1 -45
  73. cribl_control_plane/models/inputs3inventory.py +1 -54
  74. cribl_control_plane/models/inputsecuritylake.py +1 -54
  75. cribl_control_plane/models/inputsnmp.py +1 -40
  76. cribl_control_plane/models/inputsplunk.py +17 -85
  77. cribl_control_plane/models/inputsplunkhec.py +16 -70
  78. cribl_control_plane/models/inputsplunksearch.py +1 -63
  79. cribl_control_plane/models/inputsqs.py +1 -56
  80. cribl_control_plane/models/inputsyslog.py +32 -121
  81. cribl_control_plane/models/inputsystemmetrics.py +9 -142
  82. cribl_control_plane/models/inputsystemstate.py +1 -33
  83. cribl_control_plane/models/inputtcp.py +17 -81
  84. cribl_control_plane/models/inputtcpjson.py +17 -71
  85. cribl_control_plane/models/inputwef.py +1 -71
  86. cribl_control_plane/models/inputwindowsmetrics.py +9 -129
  87. cribl_control_plane/models/inputwineventlogs.py +1 -60
  88. cribl_control_plane/models/inputwiz.py +1 -45
  89. cribl_control_plane/models/inputwizwebhook.py +17 -62
  90. cribl_control_plane/models/inputzscalerhec.py +16 -70
  91. cribl_control_plane/models/jobinfo.py +1 -4
  92. cribl_control_plane/models/jobstatus.py +3 -34
  93. cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
  94. cribl_control_plane/models/logininfo.py +3 -3
  95. cribl_control_plane/models/masterworkerentry.py +1 -11
  96. cribl_control_plane/models/nodeprovidedinfo.py +1 -11
  97. cribl_control_plane/models/nodeupgradestatus.py +0 -38
  98. cribl_control_plane/models/output.py +88 -93
  99. cribl_control_plane/models/outputazureblob.py +1 -110
  100. cribl_control_plane/models/outputazuredataexplorer.py +87 -452
  101. cribl_control_plane/models/outputazureeventhub.py +19 -281
  102. cribl_control_plane/models/outputazurelogs.py +19 -115
  103. cribl_control_plane/models/outputchronicle.py +19 -115
  104. cribl_control_plane/models/outputclickhouse.py +19 -155
  105. cribl_control_plane/models/outputcloudwatch.py +19 -106
  106. cribl_control_plane/models/outputconfluentcloud.py +38 -311
  107. cribl_control_plane/models/outputcriblhttp.py +19 -135
  108. cribl_control_plane/models/outputcribllake.py +1 -97
  109. cribl_control_plane/models/outputcribltcp.py +19 -132
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
  111. cribl_control_plane/models/outputdatadog.py +19 -159
  112. cribl_control_plane/models/outputdataset.py +19 -143
  113. cribl_control_plane/models/outputdiskspool.py +1 -11
  114. cribl_control_plane/models/outputdls3.py +1 -152
  115. cribl_control_plane/models/outputdynatracehttp.py +19 -160
  116. cribl_control_plane/models/outputdynatraceotlp.py +19 -160
  117. cribl_control_plane/models/outputelastic.py +19 -163
  118. cribl_control_plane/models/outputelasticcloud.py +19 -140
  119. cribl_control_plane/models/outputexabeam.py +1 -61
  120. cribl_control_plane/models/outputfilesystem.py +1 -87
  121. cribl_control_plane/models/outputgooglechronicle.py +20 -166
  122. cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
  123. cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
  124. cribl_control_plane/models/outputgooglepubsub.py +19 -106
  125. cribl_control_plane/models/outputgrafanacloud.py +37 -288
  126. cribl_control_plane/models/outputgraphite.py +19 -105
  127. cribl_control_plane/models/outputhoneycomb.py +19 -115
  128. cribl_control_plane/models/outputhumiohec.py +19 -126
  129. cribl_control_plane/models/outputinfluxdb.py +19 -130
  130. cribl_control_plane/models/outputkafka.py +34 -302
  131. cribl_control_plane/models/outputkinesis.py +19 -133
  132. cribl_control_plane/models/outputloki.py +17 -129
  133. cribl_control_plane/models/outputminio.py +1 -145
  134. cribl_control_plane/models/outputmsk.py +34 -193
  135. cribl_control_plane/models/outputnewrelic.py +19 -136
  136. cribl_control_plane/models/outputnewrelicevents.py +20 -128
  137. cribl_control_plane/models/outputopentelemetry.py +19 -178
  138. cribl_control_plane/models/outputprometheus.py +19 -115
  139. cribl_control_plane/models/outputring.py +1 -31
  140. cribl_control_plane/models/outputs3.py +1 -152
  141. cribl_control_plane/models/outputsecuritylake.py +1 -114
  142. cribl_control_plane/models/outputsentinel.py +19 -135
  143. cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
  144. cribl_control_plane/models/outputservicenow.py +19 -168
  145. cribl_control_plane/models/outputsignalfx.py +19 -115
  146. cribl_control_plane/models/outputsns.py +17 -113
  147. cribl_control_plane/models/outputsplunk.py +19 -153
  148. cribl_control_plane/models/outputsplunkhec.py +19 -208
  149. cribl_control_plane/models/outputsplunklb.py +19 -182
  150. cribl_control_plane/models/outputsqs.py +17 -124
  151. cribl_control_plane/models/outputstatsd.py +19 -105
  152. cribl_control_plane/models/outputstatsdext.py +19 -105
  153. cribl_control_plane/models/outputsumologic.py +19 -117
  154. cribl_control_plane/models/outputsyslog.py +96 -259
  155. cribl_control_plane/models/outputtcpjson.py +19 -141
  156. cribl_control_plane/models/outputwavefront.py +19 -115
  157. cribl_control_plane/models/outputwebhook.py +19 -161
  158. cribl_control_plane/models/outputxsiam.py +17 -113
  159. cribl_control_plane/models/packinfo.py +5 -8
  160. cribl_control_plane/models/packinstallinfo.py +5 -8
  161. cribl_control_plane/models/resourcepolicy.py +0 -11
  162. cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
  163. cribl_control_plane/models/routeconf.py +4 -3
  164. cribl_control_plane/models/runnablejobcollection.py +9 -72
  165. cribl_control_plane/models/runnablejobexecutor.py +9 -32
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
  169. cribl_control_plane/packs.py +7 -202
  170. cribl_control_plane/routes_sdk.py +6 -6
  171. cribl_control_plane/tokens.py +15 -23
  172. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
  173. cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
  174. cribl_control_plane/models/groupcreaterequest.py +0 -171
  175. cribl_control_plane/models/outpostnodeinfo.py +0 -16
  176. cribl_control_plane/models/outputdatabricks.py +0 -482
  177. cribl_control_plane/models/updatepacksop.py +0 -25
  178. cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
  179. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -19,92 +18,28 @@ class OutputAzureEventhubType(str, Enum):
19
18
  class OutputAzureEventhubAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
20
19
  r"""Control the number of required acknowledgments"""
21
20
 
22
- # Leader
23
21
  ONE = 1
24
- # None
25
22
  ZERO = 0
26
- # All
27
23
  MINUS_1 = -1
28
24
 
29
25
 
30
26
  class OutputAzureEventhubRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
31
27
  r"""Format to use to serialize events before writing to the Event Hubs Kafka brokers"""
32
28
 
33
- # JSON
34
29
  JSON = "json"
35
- # Field _raw
36
30
  RAW = "raw"
37
31
 
38
32
 
39
- class OutputAzureEventhubAuthTypeAuthenticationMethod(
40
- str, Enum, metaclass=utils.OpenEnumMeta
41
- ):
42
- r"""Enter password directly, or select a stored secret"""
43
-
44
- MANUAL = "manual"
45
- SECRET = "secret"
46
-
47
-
48
33
  class OutputAzureEventhubSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
49
- # PLAIN
50
34
  PLAIN = "plain"
51
- # OAUTHBEARER
52
35
  OAUTHBEARER = "oauthbearer"
53
36
 
54
37
 
55
- class OutputAzureEventhubClientSecretAuthTypeAuthenticationMethod(
56
- str, Enum, metaclass=utils.OpenEnumMeta
57
- ):
58
- MANUAL = "manual"
59
- SECRET = "secret"
60
- CERTIFICATE = "certificate"
61
-
62
-
63
- class OutputAzureEventhubMicrosoftEntraIDAuthenticationEndpoint(
64
- str, Enum, metaclass=utils.OpenEnumMeta
65
- ):
66
- r"""Endpoint used to acquire authentication tokens from Azure"""
67
-
68
- HTTPS_LOGIN_MICROSOFTONLINE_COM = "https://login.microsoftonline.com"
69
- HTTPS_LOGIN_MICROSOFTONLINE_US = "https://login.microsoftonline.us"
70
- HTTPS_LOGIN_PARTNER_MICROSOFTONLINE_CN = "https://login.partner.microsoftonline.cn"
71
-
72
-
73
38
  class OutputAzureEventhubAuthenticationTypedDict(TypedDict):
74
39
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
75
40
 
76
41
  disabled: NotRequired[bool]
77
- auth_type: NotRequired[OutputAzureEventhubAuthTypeAuthenticationMethod]
78
- r"""Enter password directly, or select a stored secret"""
79
- password: NotRequired[str]
80
- r"""Connection-string primary key, or connection-string secondary key, from the Event Hubs workspace"""
81
- text_secret: NotRequired[str]
82
- r"""Select or create a stored text secret"""
83
42
  mechanism: NotRequired[OutputAzureEventhubSASLMechanism]
84
- username: NotRequired[str]
85
- r"""The username for authentication. For Event Hubs, this should always be $ConnectionString."""
86
- client_secret_auth_type: NotRequired[
87
- OutputAzureEventhubClientSecretAuthTypeAuthenticationMethod
88
- ]
89
- client_secret: NotRequired[str]
90
- r"""client_secret to pass in the OAuth request parameter"""
91
- client_text_secret: NotRequired[str]
92
- r"""Select or create a stored text secret"""
93
- certificate_name: NotRequired[str]
94
- r"""Select or create a stored certificate"""
95
- cert_path: NotRequired[str]
96
- priv_key_path: NotRequired[str]
97
- passphrase: NotRequired[str]
98
- oauth_endpoint: NotRequired[
99
- OutputAzureEventhubMicrosoftEntraIDAuthenticationEndpoint
100
- ]
101
- r"""Endpoint used to acquire authentication tokens from Azure"""
102
- client_id: NotRequired[str]
103
- r"""client_id to pass in the OAuth request parameter"""
104
- tenant_id: NotRequired[str]
105
- r"""Directory ID (tenant identifier) in Azure Active Directory"""
106
- scope: NotRequired[str]
107
- r"""Scope to pass in the OAuth request parameter"""
108
43
 
109
44
 
110
45
  class OutputAzureEventhubAuthentication(BaseModel):
@@ -112,116 +47,11 @@ class OutputAzureEventhubAuthentication(BaseModel):
112
47
 
113
48
  disabled: Optional[bool] = False
114
49
 
115
- auth_type: Annotated[
116
- Annotated[
117
- Optional[OutputAzureEventhubAuthTypeAuthenticationMethod],
118
- PlainValidator(validate_open_enum(False)),
119
- ],
120
- pydantic.Field(alias="authType"),
121
- ] = OutputAzureEventhubAuthTypeAuthenticationMethod.MANUAL
122
- r"""Enter password directly, or select a stored secret"""
123
-
124
- password: Optional[str] = None
125
- r"""Connection-string primary key, or connection-string secondary key, from the Event Hubs workspace"""
126
-
127
- text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
128
- r"""Select or create a stored text secret"""
129
-
130
50
  mechanism: Annotated[
131
51
  Optional[OutputAzureEventhubSASLMechanism],
132
52
  PlainValidator(validate_open_enum(False)),
133
53
  ] = OutputAzureEventhubSASLMechanism.PLAIN
134
54
 
135
- username: Optional[str] = "$ConnectionString"
136
- r"""The username for authentication. For Event Hubs, this should always be $ConnectionString."""
137
-
138
- client_secret_auth_type: Annotated[
139
- Annotated[
140
- Optional[OutputAzureEventhubClientSecretAuthTypeAuthenticationMethod],
141
- PlainValidator(validate_open_enum(False)),
142
- ],
143
- pydantic.Field(alias="clientSecretAuthType"),
144
- ] = OutputAzureEventhubClientSecretAuthTypeAuthenticationMethod.MANUAL
145
-
146
- client_secret: Annotated[Optional[str], pydantic.Field(alias="clientSecret")] = None
147
- r"""client_secret to pass in the OAuth request parameter"""
148
-
149
- client_text_secret: Annotated[
150
- Optional[str], pydantic.Field(alias="clientTextSecret")
151
- ] = None
152
- r"""Select or create a stored text secret"""
153
-
154
- certificate_name: Annotated[
155
- Optional[str], pydantic.Field(alias="certificateName")
156
- ] = None
157
- r"""Select or create a stored certificate"""
158
-
159
- cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
160
-
161
- priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
162
-
163
- passphrase: Optional[str] = None
164
-
165
- oauth_endpoint: Annotated[
166
- Annotated[
167
- Optional[OutputAzureEventhubMicrosoftEntraIDAuthenticationEndpoint],
168
- PlainValidator(validate_open_enum(False)),
169
- ],
170
- pydantic.Field(alias="oauthEndpoint"),
171
- ] = OutputAzureEventhubMicrosoftEntraIDAuthenticationEndpoint.HTTPS_LOGIN_MICROSOFTONLINE_COM
172
- r"""Endpoint used to acquire authentication tokens from Azure"""
173
-
174
- client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
175
- r"""client_id to pass in the OAuth request parameter"""
176
-
177
- tenant_id: Annotated[Optional[str], pydantic.Field(alias="tenantId")] = None
178
- r"""Directory ID (tenant identifier) in Azure Active Directory"""
179
-
180
- scope: Optional[str] = None
181
- r"""Scope to pass in the OAuth request parameter"""
182
-
183
- @field_serializer("auth_type")
184
- def serialize_auth_type(self, value):
185
- if isinstance(value, str):
186
- try:
187
- return models.OutputAzureEventhubAuthTypeAuthenticationMethod(value)
188
- except ValueError:
189
- return value
190
- return value
191
-
192
- @field_serializer("mechanism")
193
- def serialize_mechanism(self, value):
194
- if isinstance(value, str):
195
- try:
196
- return models.OutputAzureEventhubSASLMechanism(value)
197
- except ValueError:
198
- return value
199
- return value
200
-
201
- @field_serializer("client_secret_auth_type")
202
- def serialize_client_secret_auth_type(self, value):
203
- if isinstance(value, str):
204
- try:
205
- return (
206
- models.OutputAzureEventhubClientSecretAuthTypeAuthenticationMethod(
207
- value
208
- )
209
- )
210
- except ValueError:
211
- return value
212
- return value
213
-
214
- @field_serializer("oauth_endpoint")
215
- def serialize_oauth_endpoint(self, value):
216
- if isinstance(value, str):
217
- try:
218
- return models.OutputAzureEventhubMicrosoftEntraIDAuthenticationEndpoint(
219
- value
220
- )
221
- except ValueError:
222
- return value
223
- return value
224
-
225
55
 
226
56
  class OutputAzureEventhubTLSSettingsClientSideTypedDict(TypedDict):
227
57
  disabled: NotRequired[bool]
@@ -241,43 +71,33 @@ class OutputAzureEventhubTLSSettingsClientSide(BaseModel):
241
71
  class OutputAzureEventhubBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
242
72
  r"""How to handle events when all receivers are exerting backpressure"""
243
73
 
244
- # Block
245
74
  BLOCK = "block"
246
- # Drop
247
75
  DROP = "drop"
248
- # Persistent Queue
249
76
  QUEUE = "queue"
250
77
 
251
78
 
252
- class OutputAzureEventhubMode(str, Enum, metaclass=utils.OpenEnumMeta):
253
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
254
-
255
- # Error
256
- ERROR = "error"
257
- # Backpressure
258
- ALWAYS = "always"
259
- # Always On
260
- BACKPRESSURE = "backpressure"
261
-
262
-
263
79
  class OutputAzureEventhubCompression(str, Enum, metaclass=utils.OpenEnumMeta):
264
80
  r"""Codec to use to compress the persisted data"""
265
81
 
266
- # None
267
82
  NONE = "none"
268
- # Gzip
269
83
  GZIP = "gzip"
270
84
 
271
85
 
272
86
  class OutputAzureEventhubQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
273
87
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
274
88
 
275
- # Block
276
89
  BLOCK = "block"
277
- # Drop new data
278
90
  DROP = "drop"
279
91
 
280
92
 
93
+ class OutputAzureEventhubMode(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
95
+
96
+ ERROR = "error"
97
+ BACKPRESSURE = "backpressure"
98
+ ALWAYS = "always"
99
+
100
+
281
101
  class OutputAzureEventhubPqControlsTypedDict(TypedDict):
282
102
  pass
283
103
 
@@ -334,16 +154,6 @@ class OutputAzureEventhubTypedDict(TypedDict):
334
154
  on_backpressure: NotRequired[OutputAzureEventhubBackpressureBehavior]
335
155
  r"""How to handle events when all receivers are exerting backpressure"""
336
156
  description: NotRequired[str]
337
- pq_strict_ordering: NotRequired[bool]
338
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
339
- pq_rate_per_sec: NotRequired[float]
340
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
341
- pq_mode: NotRequired[OutputAzureEventhubMode]
342
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
343
- pq_max_buffer_size: NotRequired[float]
344
- r"""The maximum number of events to hold in memory before writing the events to disk"""
345
- pq_max_backpressure_sec: NotRequired[float]
346
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
347
157
  pq_max_file_size: NotRequired[str]
348
158
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
349
159
  pq_max_size: NotRequired[str]
@@ -354,6 +164,8 @@ class OutputAzureEventhubTypedDict(TypedDict):
354
164
  r"""Codec to use to compress the persisted data"""
355
165
  pq_on_backpressure: NotRequired[OutputAzureEventhubQueueFullBehavior]
356
166
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
167
+ pq_mode: NotRequired[OutputAzureEventhubMode]
168
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
357
169
  pq_controls: NotRequired[OutputAzureEventhubPqControlsTypedDict]
358
170
 
359
171
 
@@ -463,34 +275,6 @@ class OutputAzureEventhub(BaseModel):
463
275
 
464
276
  description: Optional[str] = None
465
277
 
466
- pq_strict_ordering: Annotated[
467
- Optional[bool], pydantic.Field(alias="pqStrictOrdering")
468
- ] = True
469
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
470
-
471
- pq_rate_per_sec: Annotated[
472
- Optional[float], pydantic.Field(alias="pqRatePerSec")
473
- ] = 0
474
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
475
-
476
- pq_mode: Annotated[
477
- Annotated[
478
- Optional[OutputAzureEventhubMode], PlainValidator(validate_open_enum(False))
479
- ],
480
- pydantic.Field(alias="pqMode"),
481
- ] = OutputAzureEventhubMode.ERROR
482
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
483
-
484
- pq_max_buffer_size: Annotated[
485
- Optional[float], pydantic.Field(alias="pqMaxBufferSize")
486
- ] = 42
487
- r"""The maximum number of events to hold in memory before writing the events to disk"""
488
-
489
- pq_max_backpressure_sec: Annotated[
490
- Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
491
- ] = 30
492
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
493
-
494
278
  pq_max_file_size: Annotated[
495
279
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
496
280
  ] = "1 MB"
@@ -522,60 +306,14 @@ class OutputAzureEventhub(BaseModel):
522
306
  ] = OutputAzureEventhubQueueFullBehavior.BLOCK
523
307
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
524
308
 
309
+ pq_mode: Annotated[
310
+ Annotated[
311
+ Optional[OutputAzureEventhubMode], PlainValidator(validate_open_enum(False))
312
+ ],
313
+ pydantic.Field(alias="pqMode"),
314
+ ] = OutputAzureEventhubMode.ERROR
315
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
316
+
525
317
  pq_controls: Annotated[
526
318
  Optional[OutputAzureEventhubPqControls], pydantic.Field(alias="pqControls")
527
319
  ] = None
528
-
529
- @field_serializer("ack")
530
- def serialize_ack(self, value):
531
- if isinstance(value, str):
532
- try:
533
- return models.OutputAzureEventhubAcknowledgments(value)
534
- except ValueError:
535
- return value
536
- return value
537
-
538
- @field_serializer("format_")
539
- def serialize_format_(self, value):
540
- if isinstance(value, str):
541
- try:
542
- return models.OutputAzureEventhubRecordDataFormat(value)
543
- except ValueError:
544
- return value
545
- return value
546
-
547
- @field_serializer("on_backpressure")
548
- def serialize_on_backpressure(self, value):
549
- if isinstance(value, str):
550
- try:
551
- return models.OutputAzureEventhubBackpressureBehavior(value)
552
- except ValueError:
553
- return value
554
- return value
555
-
556
- @field_serializer("pq_mode")
557
- def serialize_pq_mode(self, value):
558
- if isinstance(value, str):
559
- try:
560
- return models.OutputAzureEventhubMode(value)
561
- except ValueError:
562
- return value
563
- return value
564
-
565
- @field_serializer("pq_compress")
566
- def serialize_pq_compress(self, value):
567
- if isinstance(value, str):
568
- try:
569
- return models.OutputAzureEventhubCompression(value)
570
- except ValueError:
571
- return value
572
- return value
573
-
574
- @field_serializer("pq_on_backpressure")
575
- def serialize_pq_on_backpressure(self, value):
576
- if isinstance(value, str):
577
- try:
578
- return models.OutputAzureEventhubQueueFullBehavior(value)
579
- except ValueError:
580
- return value
581
- return value
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,11 +29,8 @@ class OutputAzureLogsExtraHTTPHeader(BaseModel):
30
29
  class OutputAzureLogsFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
32
31
 
33
- # Payload
34
32
  PAYLOAD = "payload"
35
- # Payload + Headers
36
33
  PAYLOAD_AND_HEADERS = "payloadAndHeaders"
37
- # None
38
34
  NONE = "none"
39
35
 
40
36
 
@@ -95,11 +91,8 @@ class OutputAzureLogsTimeoutRetrySettings(BaseModel):
95
91
  class OutputAzureLogsBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
96
92
  r"""How to handle events when all receivers are exerting backpressure"""
97
93
 
98
- # Block
99
94
  BLOCK = "block"
100
- # Drop
101
95
  DROP = "drop"
102
- # Persistent Queue
103
96
  QUEUE = "queue"
104
97
 
105
98
 
@@ -110,35 +103,28 @@ class OutputAzureLogsAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMet
110
103
  SECRET = "secret"
111
104
 
112
105
 
113
- class OutputAzureLogsMode(str, Enum, metaclass=utils.OpenEnumMeta):
114
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
115
-
116
- # Error
117
- ERROR = "error"
118
- # Backpressure
119
- ALWAYS = "always"
120
- # Always On
121
- BACKPRESSURE = "backpressure"
122
-
123
-
124
106
  class OutputAzureLogsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
125
107
  r"""Codec to use to compress the persisted data"""
126
108
 
127
- # None
128
109
  NONE = "none"
129
- # Gzip
130
110
  GZIP = "gzip"
131
111
 
132
112
 
133
113
  class OutputAzureLogsQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
134
114
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
135
115
 
136
- # Block
137
116
  BLOCK = "block"
138
- # Drop new data
139
117
  DROP = "drop"
140
118
 
141
119
 
120
+ class OutputAzureLogsMode(str, Enum, metaclass=utils.OpenEnumMeta):
121
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
122
+
123
+ ERROR = "error"
124
+ BACKPRESSURE = "backpressure"
125
+ ALWAYS = "always"
126
+
127
+
142
128
  class OutputAzureLogsPqControlsTypedDict(TypedDict):
143
129
  pass
144
130
 
@@ -201,16 +187,6 @@ class OutputAzureLogsTypedDict(TypedDict):
201
187
  auth_type: NotRequired[OutputAzureLogsAuthenticationMethod]
202
188
  r"""Enter workspace ID and workspace key directly, or select a stored secret"""
203
189
  description: NotRequired[str]
204
- pq_strict_ordering: NotRequired[bool]
205
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
206
- pq_rate_per_sec: NotRequired[float]
207
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
208
- pq_mode: NotRequired[OutputAzureLogsMode]
209
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
210
- pq_max_buffer_size: NotRequired[float]
211
- r"""The maximum number of events to hold in memory before writing the events to disk"""
212
- pq_max_backpressure_sec: NotRequired[float]
213
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
214
190
  pq_max_file_size: NotRequired[str]
215
191
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
216
192
  pq_max_size: NotRequired[str]
@@ -221,6 +197,8 @@ class OutputAzureLogsTypedDict(TypedDict):
221
197
  r"""Codec to use to compress the persisted data"""
222
198
  pq_on_backpressure: NotRequired[OutputAzureLogsQueueFullBehavior]
223
199
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
200
+ pq_mode: NotRequired[OutputAzureLogsMode]
201
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
224
202
  pq_controls: NotRequired[OutputAzureLogsPqControlsTypedDict]
225
203
  workspace_id: NotRequired[str]
226
204
  r"""Azure Log Analytics Workspace ID. See Azure Dashboard Workspace > Advanced settings."""
@@ -353,34 +331,6 @@ class OutputAzureLogs(BaseModel):
353
331
 
354
332
  description: Optional[str] = None
355
333
 
356
- pq_strict_ordering: Annotated[
357
- Optional[bool], pydantic.Field(alias="pqStrictOrdering")
358
- ] = True
359
- r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
360
-
361
- pq_rate_per_sec: Annotated[
362
- Optional[float], pydantic.Field(alias="pqRatePerSec")
363
- ] = 0
364
- r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
365
-
366
- pq_mode: Annotated[
367
- Annotated[
368
- Optional[OutputAzureLogsMode], PlainValidator(validate_open_enum(False))
369
- ],
370
- pydantic.Field(alias="pqMode"),
371
- ] = OutputAzureLogsMode.ERROR
372
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
373
-
374
- pq_max_buffer_size: Annotated[
375
- Optional[float], pydantic.Field(alias="pqMaxBufferSize")
376
- ] = 42
377
- r"""The maximum number of events to hold in memory before writing the events to disk"""
378
-
379
- pq_max_backpressure_sec: Annotated[
380
- Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
381
- ] = 30
382
- r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
383
-
384
334
  pq_max_file_size: Annotated[
385
335
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
386
336
  ] = "1 MB"
@@ -412,6 +362,14 @@ class OutputAzureLogs(BaseModel):
412
362
  ] = OutputAzureLogsQueueFullBehavior.BLOCK
413
363
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
414
364
 
365
+ pq_mode: Annotated[
366
+ Annotated[
367
+ Optional[OutputAzureLogsMode], PlainValidator(validate_open_enum(False))
368
+ ],
369
+ pydantic.Field(alias="pqMode"),
370
+ ] = OutputAzureLogsMode.ERROR
371
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
372
+
415
373
  pq_controls: Annotated[
416
374
  Optional[OutputAzureLogsPqControls], pydantic.Field(alias="pqControls")
417
375
  ] = None
@@ -426,57 +384,3 @@ class OutputAzureLogs(BaseModel):
426
384
  None
427
385
  )
428
386
  r"""Select or create a stored secret that references your access key and secret key"""
429
-
430
- @field_serializer("failed_request_logging_mode")
431
- def serialize_failed_request_logging_mode(self, value):
432
- if isinstance(value, str):
433
- try:
434
- return models.OutputAzureLogsFailedRequestLoggingMode(value)
435
- except ValueError:
436
- return value
437
- return value
438
-
439
- @field_serializer("on_backpressure")
440
- def serialize_on_backpressure(self, value):
441
- if isinstance(value, str):
442
- try:
443
- return models.OutputAzureLogsBackpressureBehavior(value)
444
- except ValueError:
445
- return value
446
- return value
447
-
448
- @field_serializer("auth_type")
449
- def serialize_auth_type(self, value):
450
- if isinstance(value, str):
451
- try:
452
- return models.OutputAzureLogsAuthenticationMethod(value)
453
- except ValueError:
454
- return value
455
- return value
456
-
457
- @field_serializer("pq_mode")
458
- def serialize_pq_mode(self, value):
459
- if isinstance(value, str):
460
- try:
461
- return models.OutputAzureLogsMode(value)
462
- except ValueError:
463
- return value
464
- return value
465
-
466
- @field_serializer("pq_compress")
467
- def serialize_pq_compress(self, value):
468
- if isinstance(value, str):
469
- try:
470
- return models.OutputAzureLogsCompression(value)
471
- except ValueError:
472
- return value
473
- return value
474
-
475
- @field_serializer("pq_on_backpressure")
476
- def serialize_pq_on_backpressure(self, value):
477
- if isinstance(value, str):
478
- try:
479
- return models.OutputAzureLogsQueueFullBehavior(value)
480
- except ValueError:
481
- return value
482
- return value