cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (179) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/errors/__init__.py +5 -8
  3. cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
  4. cribl_control_plane/groups_sdk.py +28 -52
  5. cribl_control_plane/health.py +16 -22
  6. cribl_control_plane/models/__init__.py +54 -217
  7. cribl_control_plane/models/appmode.py +14 -0
  8. cribl_control_plane/models/authtoken.py +1 -5
  9. cribl_control_plane/models/cacheconnection.py +0 -20
  10. cribl_control_plane/models/configgroup.py +7 -55
  11. cribl_control_plane/models/configgroupcloud.py +1 -11
  12. cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
  13. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  14. cribl_control_plane/models/createversionundoop.py +3 -3
  15. cribl_control_plane/models/cribllakedataset.py +1 -11
  16. cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
  17. cribl_control_plane/models/datasetmetadata.py +1 -11
  18. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
  19. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  20. cribl_control_plane/models/distributedsummary.py +0 -6
  21. cribl_control_plane/models/error.py +16 -0
  22. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
  23. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
  24. cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
  25. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
  26. cribl_control_plane/models/gethealthinfoop.py +17 -0
  27. cribl_control_plane/models/getsummaryop.py +0 -11
  28. cribl_control_plane/models/hbcriblinfo.py +3 -24
  29. cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
  30. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  31. cribl_control_plane/models/input.py +78 -80
  32. cribl_control_plane/models/inputappscope.py +17 -80
  33. cribl_control_plane/models/inputazureblob.py +1 -33
  34. cribl_control_plane/models/inputcollection.py +1 -24
  35. cribl_control_plane/models/inputconfluentcloud.py +18 -195
  36. cribl_control_plane/models/inputcribl.py +1 -24
  37. cribl_control_plane/models/inputcriblhttp.py +17 -62
  38. cribl_control_plane/models/inputcribllakehttp.py +17 -62
  39. cribl_control_plane/models/inputcriblmetrics.py +1 -24
  40. cribl_control_plane/models/inputcribltcp.py +17 -62
  41. cribl_control_plane/models/inputcrowdstrike.py +1 -54
  42. cribl_control_plane/models/inputdatadogagent.py +17 -62
  43. cribl_control_plane/models/inputdatagen.py +1 -24
  44. cribl_control_plane/models/inputedgeprometheus.py +34 -147
  45. cribl_control_plane/models/inputelastic.py +27 -119
  46. cribl_control_plane/models/inputeventhub.py +1 -182
  47. cribl_control_plane/models/inputexec.py +1 -33
  48. cribl_control_plane/models/inputfile.py +3 -42
  49. cribl_control_plane/models/inputfirehose.py +17 -62
  50. cribl_control_plane/models/inputgooglepubsub.py +1 -36
  51. cribl_control_plane/models/inputgrafana.py +32 -157
  52. cribl_control_plane/models/inputhttp.py +17 -62
  53. cribl_control_plane/models/inputhttpraw.py +17 -62
  54. cribl_control_plane/models/inputjournalfiles.py +1 -24
  55. cribl_control_plane/models/inputkafka.py +17 -189
  56. cribl_control_plane/models/inputkinesis.py +1 -80
  57. cribl_control_plane/models/inputkubeevents.py +1 -24
  58. cribl_control_plane/models/inputkubelogs.py +1 -33
  59. cribl_control_plane/models/inputkubemetrics.py +1 -33
  60. cribl_control_plane/models/inputloki.py +17 -71
  61. cribl_control_plane/models/inputmetrics.py +17 -62
  62. cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
  63. cribl_control_plane/models/inputmsk.py +18 -81
  64. cribl_control_plane/models/inputnetflow.py +1 -24
  65. cribl_control_plane/models/inputoffice365mgmt.py +1 -67
  66. cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
  67. cribl_control_plane/models/inputoffice365service.py +1 -67
  68. cribl_control_plane/models/inputopentelemetry.py +16 -92
  69. cribl_control_plane/models/inputprometheus.py +34 -138
  70. cribl_control_plane/models/inputprometheusrw.py +17 -71
  71. cribl_control_plane/models/inputrawudp.py +1 -24
  72. cribl_control_plane/models/inputs3.py +1 -45
  73. cribl_control_plane/models/inputs3inventory.py +1 -54
  74. cribl_control_plane/models/inputsecuritylake.py +1 -54
  75. cribl_control_plane/models/inputsnmp.py +1 -40
  76. cribl_control_plane/models/inputsplunk.py +17 -85
  77. cribl_control_plane/models/inputsplunkhec.py +16 -70
  78. cribl_control_plane/models/inputsplunksearch.py +1 -63
  79. cribl_control_plane/models/inputsqs.py +1 -56
  80. cribl_control_plane/models/inputsyslog.py +32 -121
  81. cribl_control_plane/models/inputsystemmetrics.py +9 -142
  82. cribl_control_plane/models/inputsystemstate.py +1 -33
  83. cribl_control_plane/models/inputtcp.py +17 -81
  84. cribl_control_plane/models/inputtcpjson.py +17 -71
  85. cribl_control_plane/models/inputwef.py +1 -71
  86. cribl_control_plane/models/inputwindowsmetrics.py +9 -129
  87. cribl_control_plane/models/inputwineventlogs.py +1 -60
  88. cribl_control_plane/models/inputwiz.py +1 -45
  89. cribl_control_plane/models/inputwizwebhook.py +17 -62
  90. cribl_control_plane/models/inputzscalerhec.py +16 -70
  91. cribl_control_plane/models/jobinfo.py +1 -4
  92. cribl_control_plane/models/jobstatus.py +3 -34
  93. cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
  94. cribl_control_plane/models/logininfo.py +3 -3
  95. cribl_control_plane/models/masterworkerentry.py +1 -11
  96. cribl_control_plane/models/nodeprovidedinfo.py +1 -11
  97. cribl_control_plane/models/nodeupgradestatus.py +0 -38
  98. cribl_control_plane/models/output.py +88 -93
  99. cribl_control_plane/models/outputazureblob.py +1 -110
  100. cribl_control_plane/models/outputazuredataexplorer.py +87 -452
  101. cribl_control_plane/models/outputazureeventhub.py +19 -281
  102. cribl_control_plane/models/outputazurelogs.py +19 -115
  103. cribl_control_plane/models/outputchronicle.py +19 -115
  104. cribl_control_plane/models/outputclickhouse.py +19 -155
  105. cribl_control_plane/models/outputcloudwatch.py +19 -106
  106. cribl_control_plane/models/outputconfluentcloud.py +38 -311
  107. cribl_control_plane/models/outputcriblhttp.py +19 -135
  108. cribl_control_plane/models/outputcribllake.py +1 -97
  109. cribl_control_plane/models/outputcribltcp.py +19 -132
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
  111. cribl_control_plane/models/outputdatadog.py +19 -159
  112. cribl_control_plane/models/outputdataset.py +19 -143
  113. cribl_control_plane/models/outputdiskspool.py +1 -11
  114. cribl_control_plane/models/outputdls3.py +1 -152
  115. cribl_control_plane/models/outputdynatracehttp.py +19 -160
  116. cribl_control_plane/models/outputdynatraceotlp.py +19 -160
  117. cribl_control_plane/models/outputelastic.py +19 -163
  118. cribl_control_plane/models/outputelasticcloud.py +19 -140
  119. cribl_control_plane/models/outputexabeam.py +1 -61
  120. cribl_control_plane/models/outputfilesystem.py +1 -87
  121. cribl_control_plane/models/outputgooglechronicle.py +20 -166
  122. cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
  123. cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
  124. cribl_control_plane/models/outputgooglepubsub.py +19 -106
  125. cribl_control_plane/models/outputgrafanacloud.py +37 -288
  126. cribl_control_plane/models/outputgraphite.py +19 -105
  127. cribl_control_plane/models/outputhoneycomb.py +19 -115
  128. cribl_control_plane/models/outputhumiohec.py +19 -126
  129. cribl_control_plane/models/outputinfluxdb.py +19 -130
  130. cribl_control_plane/models/outputkafka.py +34 -302
  131. cribl_control_plane/models/outputkinesis.py +19 -133
  132. cribl_control_plane/models/outputloki.py +17 -129
  133. cribl_control_plane/models/outputminio.py +1 -145
  134. cribl_control_plane/models/outputmsk.py +34 -193
  135. cribl_control_plane/models/outputnewrelic.py +19 -136
  136. cribl_control_plane/models/outputnewrelicevents.py +20 -128
  137. cribl_control_plane/models/outputopentelemetry.py +19 -178
  138. cribl_control_plane/models/outputprometheus.py +19 -115
  139. cribl_control_plane/models/outputring.py +1 -31
  140. cribl_control_plane/models/outputs3.py +1 -152
  141. cribl_control_plane/models/outputsecuritylake.py +1 -114
  142. cribl_control_plane/models/outputsentinel.py +19 -135
  143. cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
  144. cribl_control_plane/models/outputservicenow.py +19 -168
  145. cribl_control_plane/models/outputsignalfx.py +19 -115
  146. cribl_control_plane/models/outputsns.py +17 -113
  147. cribl_control_plane/models/outputsplunk.py +19 -153
  148. cribl_control_plane/models/outputsplunkhec.py +19 -208
  149. cribl_control_plane/models/outputsplunklb.py +19 -182
  150. cribl_control_plane/models/outputsqs.py +17 -124
  151. cribl_control_plane/models/outputstatsd.py +19 -105
  152. cribl_control_plane/models/outputstatsdext.py +19 -105
  153. cribl_control_plane/models/outputsumologic.py +19 -117
  154. cribl_control_plane/models/outputsyslog.py +96 -259
  155. cribl_control_plane/models/outputtcpjson.py +19 -141
  156. cribl_control_plane/models/outputwavefront.py +19 -115
  157. cribl_control_plane/models/outputwebhook.py +19 -161
  158. cribl_control_plane/models/outputxsiam.py +17 -113
  159. cribl_control_plane/models/packinfo.py +5 -8
  160. cribl_control_plane/models/packinstallinfo.py +5 -8
  161. cribl_control_plane/models/resourcepolicy.py +0 -11
  162. cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
  163. cribl_control_plane/models/routeconf.py +4 -3
  164. cribl_control_plane/models/runnablejobcollection.py +9 -72
  165. cribl_control_plane/models/runnablejobexecutor.py +9 -32
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
  169. cribl_control_plane/packs.py +7 -202
  170. cribl_control_plane/routes_sdk.py +6 -6
  171. cribl_control_plane/tokens.py +15 -23
  172. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
  173. cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
  174. cribl_control_plane/models/groupcreaterequest.py +0 -171
  175. cribl_control_plane/models/outpostnodeinfo.py +0 -16
  176. cribl_control_plane/models/outputdatabricks.py +0 -482
  177. cribl_control_plane/models/updatepacksop.py +0 -25
  178. cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
  179. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputPrometheusConnection(BaseModel):
30
29
  class InputPrometheusMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputPrometheusCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,33 +102,12 @@ class InputPrometheusPq(BaseModel):
107
102
  Optional[InputPrometheusPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputPrometheusMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputPrometheusCompression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class InputPrometheusDiscoveryType(str, Enum, metaclass=utils.OpenEnumMeta):
130
107
  r"""Target discovery mechanism. Use static to manually enter a list of targets."""
131
108
 
132
- # Static
133
109
  STATIC = "static"
134
- # DNS
135
110
  DNS = "dns"
136
- # AWS EC2
137
111
  EC2 = "ec2"
138
112
 
139
113
 
@@ -183,19 +157,6 @@ class MetricsProtocol(str, Enum, metaclass=utils.OpenEnumMeta):
183
157
  HTTPS = "https"
184
158
 
185
159
 
186
- class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(
187
- str, Enum, metaclass=utils.OpenEnumMeta
188
- ):
189
- r"""AWS authentication method. Choose Auto to use IAM roles."""
190
-
191
- # Auto
192
- AUTO = "auto"
193
- # Manual
194
- MANUAL = "manual"
195
- # Secret Key pair
196
- SECRET = "secret"
197
-
198
-
199
160
  class InputPrometheusSearchFilterTypedDict(TypedDict):
200
161
  name: str
201
162
  r"""Search filter attribute name, see: https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstances.html for more information. Attributes can be manually entered if not present in the drop down list"""
@@ -211,6 +172,16 @@ class InputPrometheusSearchFilter(BaseModel):
211
172
  r"""Search Filter Values, if empty only \"running\" EC2 instances will be returned"""
212
173
 
213
174
 
175
+ class InputPrometheusAwsAuthenticationMethodAuthenticationMethod(
176
+ str, Enum, metaclass=utils.OpenEnumMeta
177
+ ):
178
+ r"""AWS authentication method. Choose Auto to use IAM roles."""
179
+
180
+ AUTO = "auto"
181
+ MANUAL = "manual"
182
+ SECRET = "secret"
183
+
184
+
214
185
  class InputPrometheusSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
215
186
  r"""Signature version to use for signing EC2 requests"""
216
187
 
@@ -263,27 +234,24 @@ class InputPrometheusTypedDict(TypedDict):
263
234
  description: NotRequired[str]
264
235
  target_list: NotRequired[List[str]]
265
236
  r"""List of Prometheus targets to pull metrics from. Values can be in URL or host[:port] format. For example: http://localhost:9090/metrics, localhost:9090, or localhost. In cases where just host[:port] is specified, the endpoint will resolve to 'http://host[:port]/metrics'."""
266
- record_type: NotRequired[InputPrometheusRecordType]
267
- r"""DNS Record type to resolve"""
268
- scrape_port: NotRequired[float]
269
- r"""The port number in the metrics URL for discovered targets."""
270
237
  name_list: NotRequired[List[str]]
271
238
  r"""List of DNS names to resolve"""
239
+ record_type: NotRequired[InputPrometheusRecordType]
240
+ r"""DNS Record type to resolve"""
272
241
  scrape_protocol: NotRequired[MetricsProtocol]
273
242
  r"""Protocol to use when collecting metrics"""
274
243
  scrape_path: NotRequired[str]
275
244
  r"""Path to use when collecting metrics from discovered targets"""
276
- aws_authentication_method: NotRequired[
277
- InputPrometheusAwsAuthenticationMethodAuthenticationMethod
278
- ]
279
- r"""AWS authentication method. Choose Auto to use IAM roles."""
280
- aws_api_key: NotRequired[str]
281
- aws_secret: NotRequired[str]
282
- r"""Select or create a stored secret that references your access key and secret key"""
283
245
  use_public_ip: NotRequired[bool]
284
246
  r"""Use public IP address for discovered targets. Set to false if the private IP address should be used."""
247
+ scrape_port: NotRequired[float]
248
+ r"""The port number in the metrics URL for discovered targets."""
285
249
  search_filter: NotRequired[List[InputPrometheusSearchFilterTypedDict]]
286
250
  r"""EC2 Instance Search Filter"""
251
+ aws_authentication_method: NotRequired[
252
+ InputPrometheusAwsAuthenticationMethodAuthenticationMethod
253
+ ]
254
+ r"""AWS authentication method. Choose Auto to use IAM roles."""
287
255
  aws_secret_key: NotRequired[str]
288
256
  region: NotRequired[str]
289
257
  r"""Region where the EC2 is located"""
@@ -409,6 +377,9 @@ class InputPrometheus(BaseModel):
409
377
  )
410
378
  r"""List of Prometheus targets to pull metrics from. Values can be in URL or host[:port] format. For example: http://localhost:9090/metrics, localhost:9090, or localhost. In cases where just host[:port] is specified, the endpoint will resolve to 'http://host[:port]/metrics'."""
411
379
 
380
+ name_list: Annotated[Optional[List[str]], pydantic.Field(alias="nameList")] = None
381
+ r"""List of DNS names to resolve"""
382
+
412
383
  record_type: Annotated[
413
384
  Annotated[
414
385
  Optional[InputPrometheusRecordType],
@@ -418,12 +389,6 @@ class InputPrometheus(BaseModel):
418
389
  ] = InputPrometheusRecordType.SRV
419
390
  r"""DNS Record type to resolve"""
420
391
 
421
- scrape_port: Annotated[Optional[float], pydantic.Field(alias="scrapePort")] = 9090
422
- r"""The port number in the metrics URL for discovered targets."""
423
-
424
- name_list: Annotated[Optional[List[str]], pydantic.Field(alias="nameList")] = None
425
- r"""List of DNS names to resolve"""
426
-
427
392
  scrape_protocol: Annotated[
428
393
  Annotated[Optional[MetricsProtocol], PlainValidator(validate_open_enum(False))],
429
394
  pydantic.Field(alias="scrapeProtocol"),
@@ -435,29 +400,27 @@ class InputPrometheus(BaseModel):
435
400
  )
436
401
  r"""Path to use when collecting metrics from discovered targets"""
437
402
 
438
- aws_authentication_method: Annotated[
439
- Annotated[
440
- Optional[InputPrometheusAwsAuthenticationMethodAuthenticationMethod],
441
- PlainValidator(validate_open_enum(False)),
442
- ],
443
- pydantic.Field(alias="awsAuthenticationMethod"),
444
- ] = InputPrometheusAwsAuthenticationMethodAuthenticationMethod.AUTO
445
- r"""AWS authentication method. Choose Auto to use IAM roles."""
446
-
447
- aws_api_key: Annotated[Optional[str], pydantic.Field(alias="awsApiKey")] = None
448
-
449
- aws_secret: Annotated[Optional[str], pydantic.Field(alias="awsSecret")] = None
450
- r"""Select or create a stored secret that references your access key and secret key"""
451
-
452
403
  use_public_ip: Annotated[Optional[bool], pydantic.Field(alias="usePublicIp")] = True
453
404
  r"""Use public IP address for discovered targets. Set to false if the private IP address should be used."""
454
405
 
406
+ scrape_port: Annotated[Optional[float], pydantic.Field(alias="scrapePort")] = 9090
407
+ r"""The port number in the metrics URL for discovered targets."""
408
+
455
409
  search_filter: Annotated[
456
410
  Optional[List[InputPrometheusSearchFilter]],
457
411
  pydantic.Field(alias="searchFilter"),
458
412
  ] = None
459
413
  r"""EC2 Instance Search Filter"""
460
414
 
415
+ aws_authentication_method: Annotated[
416
+ Annotated[
417
+ Optional[InputPrometheusAwsAuthenticationMethodAuthenticationMethod],
418
+ PlainValidator(validate_open_enum(False)),
419
+ ],
420
+ pydantic.Field(alias="awsAuthenticationMethod"),
421
+ ] = InputPrometheusAwsAuthenticationMethodAuthenticationMethod.AUTO
422
+ r"""AWS authentication method. Choose Auto to use IAM roles."""
423
+
461
424
  aws_secret_key: Annotated[Optional[str], pydantic.Field(alias="awsSecretKey")] = (
462
425
  None
463
426
  )
@@ -512,70 +475,3 @@ class InputPrometheus(BaseModel):
512
475
  Optional[str], pydantic.Field(alias="credentialsSecret")
513
476
  ] = None
514
477
  r"""Select or create a secret that references your credentials"""
515
-
516
- @field_serializer("discovery_type")
517
- def serialize_discovery_type(self, value):
518
- if isinstance(value, str):
519
- try:
520
- return models.InputPrometheusDiscoveryType(value)
521
- except ValueError:
522
- return value
523
- return value
524
-
525
- @field_serializer("log_level")
526
- def serialize_log_level(self, value):
527
- if isinstance(value, str):
528
- try:
529
- return models.InputPrometheusLogLevel(value)
530
- except ValueError:
531
- return value
532
- return value
533
-
534
- @field_serializer("auth_type")
535
- def serialize_auth_type(self, value):
536
- if isinstance(value, str):
537
- try:
538
- return models.InputPrometheusAuthTypeAuthenticationMethod(value)
539
- except ValueError:
540
- return value
541
- return value
542
-
543
- @field_serializer("record_type")
544
- def serialize_record_type(self, value):
545
- if isinstance(value, str):
546
- try:
547
- return models.InputPrometheusRecordType(value)
548
- except ValueError:
549
- return value
550
- return value
551
-
552
- @field_serializer("scrape_protocol")
553
- def serialize_scrape_protocol(self, value):
554
- if isinstance(value, str):
555
- try:
556
- return models.MetricsProtocol(value)
557
- except ValueError:
558
- return value
559
- return value
560
-
561
- @field_serializer("aws_authentication_method")
562
- def serialize_aws_authentication_method(self, value):
563
- if isinstance(value, str):
564
- try:
565
- return (
566
- models.InputPrometheusAwsAuthenticationMethodAuthenticationMethod(
567
- value
568
- )
569
- )
570
- except ValueError:
571
- return value
572
- return value
573
-
574
- @field_serializer("signature_version")
575
- def serialize_signature_version(self, value):
576
- if isinstance(value, str):
577
- try:
578
- return models.InputPrometheusSignatureVersion(value)
579
- except ValueError:
580
- return value
581
- return value
@@ -1,14 +1,13 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
- from typing import List, Optional
10
+ from typing import Any, List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
13
12
 
14
13
 
@@ -30,18 +29,14 @@ class InputPrometheusRwConnection(BaseModel):
30
29
  class InputPrometheusRwMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputPrometheusRwCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -108,24 +103,6 @@ class InputPrometheusRwPq(BaseModel):
108
103
  Optional[InputPrometheusRwPqControls], pydantic.Field(alias="pqControls")
109
104
  ] = None
110
105
 
111
- @field_serializer("mode")
112
- def serialize_mode(self, value):
113
- if isinstance(value, str):
114
- try:
115
- return models.InputPrometheusRwMode(value)
116
- except ValueError:
117
- return value
118
- return value
119
-
120
- @field_serializer("compress")
121
- def serialize_compress(self, value):
122
- if isinstance(value, str):
123
- try:
124
- return models.InputPrometheusRwCompression(value)
125
- except ValueError:
126
- return value
127
- return value
128
-
129
106
 
130
107
  class InputPrometheusRwMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
131
108
  TL_SV1 = "TLSv1"
@@ -143,12 +120,6 @@ class InputPrometheusRwMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta
143
120
 
144
121
  class InputPrometheusRwTLSSettingsServerSideTypedDict(TypedDict):
145
122
  disabled: NotRequired[bool]
146
- request_cert: NotRequired[bool]
147
- r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
148
- reject_unauthorized: NotRequired[bool]
149
- r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's)"""
150
- common_name_regex: NotRequired[str]
151
- r"""Regex matching allowable common names in peer certificates' subject attribute"""
152
123
  certificate_name: NotRequired[str]
153
124
  r"""The name of the predefined certificate"""
154
125
  priv_key_path: NotRequired[str]
@@ -159,6 +130,10 @@ class InputPrometheusRwTLSSettingsServerSideTypedDict(TypedDict):
159
130
  r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
160
131
  ca_path: NotRequired[str]
161
132
  r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
133
+ request_cert: NotRequired[bool]
134
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
135
+ reject_unauthorized: NotRequired[Any]
136
+ common_name_regex: NotRequired[Any]
162
137
  min_version: NotRequired[InputPrometheusRwMinimumTLSVersion]
163
138
  max_version: NotRequired[InputPrometheusRwMaximumTLSVersion]
164
139
 
@@ -166,19 +141,6 @@ class InputPrometheusRwTLSSettingsServerSideTypedDict(TypedDict):
166
141
  class InputPrometheusRwTLSSettingsServerSide(BaseModel):
167
142
  disabled: Optional[bool] = True
168
143
 
169
- request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
170
- r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
171
-
172
- reject_unauthorized: Annotated[
173
- Optional[bool], pydantic.Field(alias="rejectUnauthorized")
174
- ] = True
175
- r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's)"""
176
-
177
- common_name_regex: Annotated[
178
- Optional[str], pydantic.Field(alias="commonNameRegex")
179
- ] = "/.*/"
180
- r"""Regex matching allowable common names in peer certificates' subject attribute"""
181
-
182
144
  certificate_name: Annotated[
183
145
  Optional[str], pydantic.Field(alias="certificateName")
184
146
  ] = None
@@ -196,6 +158,17 @@ class InputPrometheusRwTLSSettingsServerSide(BaseModel):
196
158
  ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
197
159
  r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
198
160
 
161
+ request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
162
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
163
+
164
+ reject_unauthorized: Annotated[
165
+ Optional[Any], pydantic.Field(alias="rejectUnauthorized")
166
+ ] = None
167
+
168
+ common_name_regex: Annotated[
169
+ Optional[Any], pydantic.Field(alias="commonNameRegex")
170
+ ] = None
171
+
199
172
  min_version: Annotated[
200
173
  Annotated[
201
174
  Optional[InputPrometheusRwMinimumTLSVersion],
@@ -212,24 +185,6 @@ class InputPrometheusRwTLSSettingsServerSide(BaseModel):
212
185
  pydantic.Field(alias="maxVersion"),
213
186
  ] = None
214
187
 
215
- @field_serializer("min_version")
216
- def serialize_min_version(self, value):
217
- if isinstance(value, str):
218
- try:
219
- return models.InputPrometheusRwMinimumTLSVersion(value)
220
- except ValueError:
221
- return value
222
- return value
223
-
224
- @field_serializer("max_version")
225
- def serialize_max_version(self, value):
226
- if isinstance(value, str):
227
- try:
228
- return models.InputPrometheusRwMaximumTLSVersion(value)
229
- except ValueError:
230
- return value
231
- return value
232
-
233
188
 
234
189
  class InputPrometheusRwAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
235
190
  r"""Remote Write authentication type"""
@@ -526,12 +481,3 @@ class InputPrometheusRw(BaseModel):
526
481
  pydantic.Field(alias="oauthHeaders"),
527
482
  ] = None
528
483
  r"""Additional headers to send in the OAuth login request. @{product} will automatically add the content-type header 'application/x-www-form-urlencoded' when sending this request."""
529
-
530
- @field_serializer("auth_type")
531
- def serialize_auth_type(self, value):
532
- if isinstance(value, str):
533
- try:
534
- return models.InputPrometheusRwAuthenticationType(value)
535
- except ValueError:
536
- return value
537
- return value
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputRawUDPConnection(BaseModel):
30
29
  class InputRawUDPMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputRawUDPCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,24 +102,6 @@ class InputRawUDPPq(BaseModel):
107
102
  Optional[InputRawUDPPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputRawUDPMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputRawUDPCompression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class InputRawUDPMetadatumTypedDict(TypedDict):
130
107
  name: str
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputS3Connection(BaseModel):
30
29
  class InputS3Mode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputS3Compression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,33 +102,12 @@ class InputS3Pq(BaseModel):
107
102
  Optional[InputS3PqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputS3Mode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputS3Compression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class InputS3AuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
130
107
  r"""AWS authentication method. Choose Auto to use IAM roles."""
131
108
 
132
- # Auto
133
109
  AUTO = "auto"
134
- # Manual
135
110
  MANUAL = "manual"
136
- # Secret Key pair
137
111
  SECRET = "secret"
138
112
 
139
113
 
@@ -460,21 +434,3 @@ class InputS3(BaseModel):
460
434
  Optional[str], pydantic.Field(alias="processedTagValue")
461
435
  ] = None
462
436
  r"""The value for the S3 object tag applied after processing. This field accepts an expression for dynamic generation."""
463
-
464
- @field_serializer("aws_authentication_method")
465
- def serialize_aws_authentication_method(self, value):
466
- if isinstance(value, str):
467
- try:
468
- return models.InputS3AuthenticationMethod(value)
469
- except ValueError:
470
- return value
471
- return value
472
-
473
- @field_serializer("signature_version")
474
- def serialize_signature_version(self, value):
475
- if isinstance(value, str):
476
- try:
477
- return models.InputS3SignatureVersion(value)
478
- except ValueError:
479
- return value
480
- return value
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -30,18 +29,14 @@ class InputS3InventoryConnection(BaseModel):
30
29
  class InputS3InventoryMode(str, Enum, metaclass=utils.OpenEnumMeta):
31
30
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
32
31
 
33
- # Smart
34
32
  SMART = "smart"
35
- # Always On
36
33
  ALWAYS = "always"
37
34
 
38
35
 
39
36
  class InputS3InventoryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
37
  r"""Codec to use to compress the persisted data"""
41
38
 
42
- # None
43
39
  NONE = "none"
44
- # Gzip
45
40
  GZIP = "gzip"
46
41
 
47
42
 
@@ -107,33 +102,12 @@ class InputS3InventoryPq(BaseModel):
107
102
  Optional[InputS3InventoryPqControls], pydantic.Field(alias="pqControls")
108
103
  ] = None
109
104
 
110
- @field_serializer("mode")
111
- def serialize_mode(self, value):
112
- if isinstance(value, str):
113
- try:
114
- return models.InputS3InventoryMode(value)
115
- except ValueError:
116
- return value
117
- return value
118
-
119
- @field_serializer("compress")
120
- def serialize_compress(self, value):
121
- if isinstance(value, str):
122
- try:
123
- return models.InputS3InventoryCompression(value)
124
- except ValueError:
125
- return value
126
- return value
127
-
128
105
 
129
106
  class InputS3InventoryAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
130
107
  r"""AWS authentication method. Choose Auto to use IAM roles."""
131
108
 
132
- # Auto
133
109
  AUTO = "auto"
134
- # Manual
135
110
  MANUAL = "manual"
136
- # Secret Key pair
137
111
  SECRET = "secret"
138
112
 
139
113
 
@@ -484,30 +458,3 @@ class InputS3Inventory(BaseModel):
484
458
  Optional[str], pydantic.Field(alias="processedTagValue")
485
459
  ] = None
486
460
  r"""The value for the S3 object tag applied after processing. This field accepts an expression for dynamic generation."""
487
-
488
- @field_serializer("aws_authentication_method")
489
- def serialize_aws_authentication_method(self, value):
490
- if isinstance(value, str):
491
- try:
492
- return models.InputS3InventoryAuthenticationMethod(value)
493
- except ValueError:
494
- return value
495
- return value
496
-
497
- @field_serializer("signature_version")
498
- def serialize_signature_version(self, value):
499
- if isinstance(value, str):
500
- try:
501
- return models.InputS3InventorySignatureVersion(value)
502
- except ValueError:
503
- return value
504
- return value
505
-
506
- @field_serializer("tag_after_processing")
507
- def serialize_tag_after_processing(self, value):
508
- if isinstance(value, str):
509
- try:
510
- return models.InputS3InventoryTagAfterProcessing(value)
511
- except ValueError:
512
- return value
513
- return value