cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (144) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/{outputs.py → destinations.py} +69 -71
  3. cribl_control_plane/errors/healthstatus_error.py +2 -8
  4. cribl_control_plane/models/__init__.py +5347 -115
  5. cribl_control_plane/models/createinputop.py +18216 -2
  6. cribl_control_plane/models/createoutputop.py +18417 -4
  7. cribl_control_plane/models/createoutputtestbyidop.py +2 -2
  8. cribl_control_plane/models/deleteoutputbyidop.py +2 -2
  9. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  10. cribl_control_plane/models/getoutputbyidop.py +2 -2
  11. cribl_control_plane/models/getoutputpqbyidop.py +2 -2
  12. cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
  13. cribl_control_plane/models/healthstatus.py +4 -7
  14. cribl_control_plane/models/inputappscope.py +16 -36
  15. cribl_control_plane/models/inputazureblob.py +8 -19
  16. cribl_control_plane/models/inputcollection.py +6 -15
  17. cribl_control_plane/models/inputconfluentcloud.py +22 -45
  18. cribl_control_plane/models/inputcribl.py +6 -13
  19. cribl_control_plane/models/inputcriblhttp.py +12 -27
  20. cribl_control_plane/models/inputcribllakehttp.py +14 -26
  21. cribl_control_plane/models/inputcriblmetrics.py +6 -14
  22. cribl_control_plane/models/inputcribltcp.py +12 -27
  23. cribl_control_plane/models/inputcrowdstrike.py +12 -28
  24. cribl_control_plane/models/inputdatadogagent.py +12 -28
  25. cribl_control_plane/models/inputdatagen.py +6 -13
  26. cribl_control_plane/models/inputedgeprometheus.py +33 -64
  27. cribl_control_plane/models/inputelastic.py +18 -44
  28. cribl_control_plane/models/inputeventhub.py +10 -19
  29. cribl_control_plane/models/inputexec.py +8 -16
  30. cribl_control_plane/models/inputfile.py +8 -17
  31. cribl_control_plane/models/inputfirehose.py +12 -27
  32. cribl_control_plane/models/inputgooglepubsub.py +10 -23
  33. cribl_control_plane/models/inputgrafana_union.py +39 -81
  34. cribl_control_plane/models/inputhttp.py +12 -27
  35. cribl_control_plane/models/inputhttpraw.py +12 -27
  36. cribl_control_plane/models/inputjournalfiles.py +8 -16
  37. cribl_control_plane/models/inputkafka.py +18 -45
  38. cribl_control_plane/models/inputkinesis.py +18 -42
  39. cribl_control_plane/models/inputkubeevents.py +6 -13
  40. cribl_control_plane/models/inputkubelogs.py +10 -18
  41. cribl_control_plane/models/inputkubemetrics.py +10 -18
  42. cribl_control_plane/models/inputloki.py +14 -33
  43. cribl_control_plane/models/inputmetrics.py +10 -25
  44. cribl_control_plane/models/inputmodeldriventelemetry.py +14 -33
  45. cribl_control_plane/models/inputmsk.py +20 -52
  46. cribl_control_plane/models/inputnetflow.py +8 -15
  47. cribl_control_plane/models/inputoffice365mgmt.py +18 -37
  48. cribl_control_plane/models/inputoffice365msgtrace.py +20 -41
  49. cribl_control_plane/models/inputoffice365service.py +20 -41
  50. cribl_control_plane/models/inputopentelemetry.py +20 -42
  51. cribl_control_plane/models/inputprometheus.py +22 -54
  52. cribl_control_plane/models/inputprometheusrw.py +14 -34
  53. cribl_control_plane/models/inputrawudp.py +8 -15
  54. cribl_control_plane/models/inputs3.py +10 -23
  55. cribl_control_plane/models/inputs3inventory.py +12 -28
  56. cribl_control_plane/models/inputsecuritylake.py +12 -29
  57. cribl_control_plane/models/inputsnmp.py +10 -20
  58. cribl_control_plane/models/inputsplunk.py +16 -37
  59. cribl_control_plane/models/inputsplunkhec.py +14 -33
  60. cribl_control_plane/models/inputsplunksearch.py +18 -37
  61. cribl_control_plane/models/inputsqs.py +14 -31
  62. cribl_control_plane/models/inputsyslog_union.py +29 -53
  63. cribl_control_plane/models/inputsystemmetrics.py +26 -50
  64. cribl_control_plane/models/inputsystemstate.py +10 -18
  65. cribl_control_plane/models/inputtcp.py +14 -33
  66. cribl_control_plane/models/inputtcpjson.py +14 -33
  67. cribl_control_plane/models/inputwef.py +22 -45
  68. cribl_control_plane/models/inputwindowsmetrics.py +26 -46
  69. cribl_control_plane/models/inputwineventlogs.py +12 -22
  70. cribl_control_plane/models/inputwiz.py +12 -25
  71. cribl_control_plane/models/inputzscalerhec.py +14 -33
  72. cribl_control_plane/models/listoutputop.py +2 -2
  73. cribl_control_plane/models/output.py +3 -6
  74. cribl_control_plane/models/outputazureblob.py +20 -52
  75. cribl_control_plane/models/outputazuredataexplorer.py +30 -77
  76. cribl_control_plane/models/outputazureeventhub.py +20 -44
  77. cribl_control_plane/models/outputazurelogs.py +14 -37
  78. cribl_control_plane/models/outputclickhouse.py +22 -59
  79. cribl_control_plane/models/outputcloudwatch.py +12 -33
  80. cribl_control_plane/models/outputconfluentcloud.py +32 -75
  81. cribl_control_plane/models/outputcriblhttp.py +18 -46
  82. cribl_control_plane/models/outputcribllake.py +18 -48
  83. cribl_control_plane/models/outputcribltcp.py +20 -47
  84. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
  85. cribl_control_plane/models/outputdatadog.py +22 -50
  86. cribl_control_plane/models/outputdataset.py +20 -48
  87. cribl_control_plane/models/outputdefault.py +2 -5
  88. cribl_control_plane/models/outputdevnull.py +2 -5
  89. cribl_control_plane/models/outputdiskspool.py +4 -9
  90. cribl_control_plane/models/outputdls3.py +26 -72
  91. cribl_control_plane/models/outputdynatracehttp.py +22 -57
  92. cribl_control_plane/models/outputdynatraceotlp.py +24 -59
  93. cribl_control_plane/models/outputelastic.py +20 -45
  94. cribl_control_plane/models/outputelasticcloud.py +14 -40
  95. cribl_control_plane/models/outputexabeam.py +12 -33
  96. cribl_control_plane/models/outputfilesystem.py +16 -41
  97. cribl_control_plane/models/outputgooglechronicle.py +18 -54
  98. cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
  99. cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
  100. cribl_control_plane/models/outputgooglepubsub.py +16 -39
  101. cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
  102. cribl_control_plane/models/outputgraphite.py +16 -35
  103. cribl_control_plane/models/outputhoneycomb.py +14 -37
  104. cribl_control_plane/models/outputhumiohec.py +18 -47
  105. cribl_control_plane/models/outputinfluxdb.py +18 -44
  106. cribl_control_plane/models/outputkafka.py +28 -73
  107. cribl_control_plane/models/outputkinesis.py +18 -44
  108. cribl_control_plane/models/outputloki.py +18 -43
  109. cribl_control_plane/models/outputminio.py +26 -69
  110. cribl_control_plane/models/outputmsk.py +30 -81
  111. cribl_control_plane/models/outputnetflow.py +2 -5
  112. cribl_control_plane/models/outputnewrelic.py +20 -45
  113. cribl_control_plane/models/outputnewrelicevents.py +16 -45
  114. cribl_control_plane/models/outputopentelemetry.py +28 -69
  115. cribl_control_plane/models/outputprometheus.py +14 -37
  116. cribl_control_plane/models/outputring.py +10 -21
  117. cribl_control_plane/models/outputrouter.py +2 -5
  118. cribl_control_plane/models/outputs3.py +28 -72
  119. cribl_control_plane/models/outputsecuritylake.py +20 -56
  120. cribl_control_plane/models/outputsentinel.py +20 -49
  121. cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
  122. cribl_control_plane/models/outputservicenow.py +26 -64
  123. cribl_control_plane/models/outputsignalfx.py +16 -39
  124. cribl_control_plane/models/outputsnmp.py +2 -5
  125. cribl_control_plane/models/outputsns.py +16 -40
  126. cribl_control_plane/models/outputsplunk.py +26 -64
  127. cribl_control_plane/models/outputsplunkhec.py +14 -37
  128. cribl_control_plane/models/outputsplunklb.py +36 -83
  129. cribl_control_plane/models/outputsqs.py +18 -45
  130. cribl_control_plane/models/outputstatsd.py +16 -34
  131. cribl_control_plane/models/outputstatsdext.py +14 -33
  132. cribl_control_plane/models/outputsumologic.py +14 -37
  133. cribl_control_plane/models/outputsyslog.py +26 -60
  134. cribl_control_plane/models/outputtcpjson.py +22 -54
  135. cribl_control_plane/models/outputwavefront.py +14 -37
  136. cribl_control_plane/models/outputwebhook.py +24 -60
  137. cribl_control_plane/models/outputxsiam.py +16 -37
  138. cribl_control_plane/models/updateoutputbyidop.py +4 -4
  139. cribl_control_plane/sdk.py +3 -5
  140. cribl_control_plane/sources.py +8 -10
  141. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/METADATA +13 -13
  142. cribl_control_plane-0.0.17.dist-info/RECORD +215 -0
  143. cribl_control_plane-0.0.15.dist-info/RECORD +0 -215
  144. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.17.dist-info}/WHEEL +0 -0
@@ -11,7 +11,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict
11
11
 
12
12
  class CreateOutputTestByIDRequestTypedDict(TypedDict):
13
13
  id: str
14
- r"""Output Id"""
14
+ r"""Destination Id"""
15
15
  output_test_request: OutputTestRequestTypedDict
16
16
  r"""OutputTestRequest object"""
17
17
 
@@ -20,7 +20,7 @@ class CreateOutputTestByIDRequest(BaseModel):
20
20
  id: Annotated[
21
21
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
22
22
  ]
23
- r"""Output Id"""
23
+ r"""Destination Id"""
24
24
 
25
25
  output_test_request: Annotated[
26
26
  OutputTestRequest,
@@ -21,7 +21,7 @@ class DeleteOutputByIDRequest(BaseModel):
21
21
 
22
22
 
23
23
  class DeleteOutputByIDResponseTypedDict(TypedDict):
24
- r"""a list of Output objects"""
24
+ r"""a list of Destination objects"""
25
25
 
26
26
  count: NotRequired[int]
27
27
  r"""number of items present in the items array"""
@@ -29,7 +29,7 @@ class DeleteOutputByIDResponseTypedDict(TypedDict):
29
29
 
30
30
 
31
31
  class DeleteOutputByIDResponse(BaseModel):
32
- r"""a list of Output objects"""
32
+ r"""a list of Destination objects"""
33
33
 
34
34
  count: Optional[int] = None
35
35
  r"""number of items present in the items array"""
@@ -9,14 +9,14 @@ from typing_extensions import Annotated, NotRequired, TypedDict
9
9
 
10
10
  class DeleteOutputPqByIDRequestTypedDict(TypedDict):
11
11
  id: str
12
- r"""Output Id"""
12
+ r"""Destination Id"""
13
13
 
14
14
 
15
15
  class DeleteOutputPqByIDRequest(BaseModel):
16
16
  id: Annotated[
17
17
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
18
18
  ]
19
- r"""Output Id"""
19
+ r"""Destination Id"""
20
20
 
21
21
 
22
22
  class DeleteOutputPqByIDResponseTypedDict(TypedDict):
@@ -21,7 +21,7 @@ class GetOutputByIDRequest(BaseModel):
21
21
 
22
22
 
23
23
  class GetOutputByIDResponseTypedDict(TypedDict):
24
- r"""a list of Output objects"""
24
+ r"""a list of Destination objects"""
25
25
 
26
26
  count: NotRequired[int]
27
27
  r"""number of items present in the items array"""
@@ -29,7 +29,7 @@ class GetOutputByIDResponseTypedDict(TypedDict):
29
29
 
30
30
 
31
31
  class GetOutputByIDResponse(BaseModel):
32
- r"""a list of Output objects"""
32
+ r"""a list of Destination objects"""
33
33
 
34
34
  count: Optional[int] = None
35
35
  r"""number of items present in the items array"""
@@ -9,14 +9,14 @@ from typing_extensions import Annotated, NotRequired, TypedDict
9
9
 
10
10
  class GetOutputPqByIDRequestTypedDict(TypedDict):
11
11
  id: str
12
- r"""Output Id"""
12
+ r"""Destination Id"""
13
13
 
14
14
 
15
15
  class GetOutputPqByIDRequest(BaseModel):
16
16
  id: Annotated[
17
17
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
18
18
  ]
19
- r"""Output Id"""
19
+ r"""Destination Id"""
20
20
 
21
21
 
22
22
  class GetOutputPqByIDResponseTypedDict(TypedDict):
@@ -10,14 +10,14 @@ from typing_extensions import Annotated, NotRequired, TypedDict
10
10
 
11
11
  class GetOutputSamplesByIDRequestTypedDict(TypedDict):
12
12
  id: str
13
- r"""Output Id"""
13
+ r"""Destination Id"""
14
14
 
15
15
 
16
16
  class GetOutputSamplesByIDRequest(BaseModel):
17
17
  id: Annotated[
18
18
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
19
19
  ]
20
- r"""Output Id"""
20
+ r"""Destination Id"""
21
21
 
22
22
 
23
23
  class GetOutputSamplesByIDResponseTypedDict(TypedDict):
@@ -1,22 +1,19 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class Role(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class Role(str, Enum):
15
12
  PRIMARY = "primary"
16
13
  STANDBY = "standby"
17
14
 
18
15
 
19
- class Status(str, Enum, metaclass=utils.OpenEnumMeta):
16
+ class Status(str, Enum):
20
17
  HEALTHY = "healthy"
21
18
  SHUTTING_DOWN = "shutting down"
22
19
  STANDBY = "standby"
@@ -29,8 +26,8 @@ class HealthStatusTypedDict(TypedDict):
29
26
 
30
27
 
31
28
  class HealthStatus(BaseModel):
32
- status: Annotated[Status, PlainValidator(validate_open_enum(False))]
29
+ status: Status
33
30
 
34
31
  start_time: Annotated[float, pydantic.Field(alias="startTime")]
35
32
 
36
- role: Annotated[Optional[Role], PlainValidator(validate_open_enum(False))] = None
33
+ role: Optional[Role] = None
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import Any, List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputAppscopeType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputAppscopeType(str, Enum):
15
12
  APPSCOPE = "appscope"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputAppscopeConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputAppscopeMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputAppscopeMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputAppscopeCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputAppscopeCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputAppscopePqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputAppscopePq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputAppscopeMode], PlainValidator(validate_open_enum(False))
63
- ] = InputAppscopeMode.ALWAYS
58
+ mode: Optional[InputAppscopeMode] = InputAppscopeMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputAppscopePq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputAppscopeCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputAppscopeCompression.NONE
82
+ compress: Optional[InputAppscopeCompression] = InputAppscopeCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -138,7 +131,7 @@ class InputAppscopeFilter(BaseModel):
138
131
  r"""To override the UNIX domain socket or address/port specified in General Settings (while leaving Authentication settings as is), enter a URL."""
139
132
 
140
133
 
141
- class InputAppscopeDataCompressionFormat(str, Enum, metaclass=utils.OpenEnumMeta):
134
+ class InputAppscopeDataCompressionFormat(str, Enum):
142
135
  NONE = "none"
143
136
  GZIP = "gzip"
144
137
 
@@ -170,10 +163,9 @@ class InputAppscopePersistence(BaseModel):
170
163
  max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
171
164
  r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
172
165
 
173
- compress: Annotated[
174
- Optional[InputAppscopeDataCompressionFormat],
175
- PlainValidator(validate_open_enum(False)),
176
- ] = InputAppscopeDataCompressionFormat.GZIP
166
+ compress: Optional[InputAppscopeDataCompressionFormat] = (
167
+ InputAppscopeDataCompressionFormat.GZIP
168
+ )
177
169
 
178
170
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
179
171
  "$CRIBL_HOME/state/appscope"
@@ -181,21 +173,21 @@ class InputAppscopePersistence(BaseModel):
181
173
  r"""Path to use to write metrics. Defaults to $CRIBL_HOME/state/appscope"""
182
174
 
183
175
 
184
- class InputAppscopeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
176
+ class InputAppscopeAuthenticationMethod(str, Enum):
185
177
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
186
178
 
187
179
  MANUAL = "manual"
188
180
  SECRET = "secret"
189
181
 
190
182
 
191
- class InputAppscopeMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
183
+ class InputAppscopeMinimumTLSVersion(str, Enum):
192
184
  TL_SV1 = "TLSv1"
193
185
  TL_SV1_1 = "TLSv1.1"
194
186
  TL_SV1_2 = "TLSv1.2"
195
187
  TL_SV1_3 = "TLSv1.3"
196
188
 
197
189
 
198
- class InputAppscopeMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
190
+ class InputAppscopeMaximumTLSVersion(str, Enum):
199
191
  TL_SV1 = "TLSv1"
200
192
  TL_SV1_1 = "TLSv1.1"
201
193
  TL_SV1_2 = "TLSv1.2"
@@ -254,19 +246,11 @@ class InputAppscopeTLSSettingsServerSide(BaseModel):
254
246
  ] = None
255
247
 
256
248
  min_version: Annotated[
257
- Annotated[
258
- Optional[InputAppscopeMinimumTLSVersion],
259
- PlainValidator(validate_open_enum(False)),
260
- ],
261
- pydantic.Field(alias="minVersion"),
249
+ Optional[InputAppscopeMinimumTLSVersion], pydantic.Field(alias="minVersion")
262
250
  ] = None
263
251
 
264
252
  max_version: Annotated[
265
- Annotated[
266
- Optional[InputAppscopeMaximumTLSVersion],
267
- PlainValidator(validate_open_enum(False)),
268
- ],
269
- pydantic.Field(alias="maxVersion"),
253
+ Optional[InputAppscopeMaximumTLSVersion], pydantic.Field(alias="maxVersion")
270
254
  ] = None
271
255
 
272
256
 
@@ -332,7 +316,7 @@ class InputAppscope(BaseModel):
332
316
  id: str
333
317
  r"""Unique ID for this input"""
334
318
 
335
- type: Annotated[InputAppscopeType, PlainValidator(validate_open_enum(False))]
319
+ type: InputAppscopeType
336
320
 
337
321
  disabled: Optional[bool] = False
338
322
 
@@ -413,11 +397,7 @@ class InputAppscope(BaseModel):
413
397
  persistence: Optional[InputAppscopePersistence] = None
414
398
 
415
399
  auth_type: Annotated[
416
- Annotated[
417
- Optional[InputAppscopeAuthenticationMethod],
418
- PlainValidator(validate_open_enum(False)),
419
- ],
420
- pydantic.Field(alias="authType"),
400
+ Optional[InputAppscopeAuthenticationMethod], pydantic.Field(alias="authType")
421
401
  ] = InputAppscopeAuthenticationMethod.MANUAL
422
402
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
423
403
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputAzureBlobType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputAzureBlobType(str, Enum):
15
12
  AZURE_BLOB = "azure_blob"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputAzureBlobConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputAzureBlobMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputAzureBlobMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputAzureBlobCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputAzureBlobCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputAzureBlobPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputAzureBlobPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputAzureBlobMode], PlainValidator(validate_open_enum(False))
63
- ] = InputAzureBlobMode.ALWAYS
58
+ mode: Optional[InputAzureBlobMode] = InputAzureBlobMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputAzureBlobPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputAzureBlobCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputAzureBlobCompression.NONE
82
+ compress: Optional[InputAzureBlobCompression] = InputAzureBlobCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -103,7 +96,7 @@ class InputAzureBlobMetadatum(BaseModel):
103
96
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
104
97
 
105
98
 
106
- class InputAzureBlobAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
99
+ class InputAzureBlobAuthenticationMethod(str, Enum):
107
100
  MANUAL = "manual"
108
101
  SECRET = "secret"
109
102
  CLIENT_SECRET = "clientSecret"
@@ -184,7 +177,7 @@ class InputAzureBlobTypedDict(TypedDict):
184
177
 
185
178
 
186
179
  class InputAzureBlob(BaseModel):
187
- type: Annotated[InputAzureBlobType, PlainValidator(validate_open_enum(False))]
180
+ type: InputAzureBlobType
188
181
 
189
182
  queue_name: Annotated[str, pydantic.Field(alias="queueName")]
190
183
  r"""The storage account queue name blob notifications will be read from. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myQueue-${C.vars.myVar}`"""
@@ -264,11 +257,7 @@ class InputAzureBlob(BaseModel):
264
257
  r"""The maximum time allowed for downloading a Parquet chunk. Processing will stop if a chunk cannot be downloaded within the time specified."""
265
258
 
266
259
  auth_type: Annotated[
267
- Annotated[
268
- Optional[InputAzureBlobAuthenticationMethod],
269
- PlainValidator(validate_open_enum(False)),
270
- ],
271
- pydantic.Field(alias="authType"),
260
+ Optional[InputAzureBlobAuthenticationMethod], pydantic.Field(alias="authType")
272
261
  ] = InputAzureBlobAuthenticationMethod.MANUAL
273
262
 
274
263
  description: Optional[str] = None
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputCollectionType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputCollectionType(str, Enum):
15
12
  COLLECTION = "collection"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputCollectionConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputCollectionMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputCollectionMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputCollectionCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputCollectionCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputCollectionPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputCollectionPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputCollectionMode], PlainValidator(validate_open_enum(False))
63
- ] = InputCollectionMode.ALWAYS
58
+ mode: Optional[InputCollectionMode] = InputCollectionMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,9 +79,7 @@ class InputCollectionPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputCollectionCompression], PlainValidator(validate_open_enum(False))
89
- ] = InputCollectionCompression.NONE
82
+ compress: Optional[InputCollectionCompression] = InputCollectionCompression.NONE
90
83
  r"""Codec to use to compress the persisted data"""
91
84
 
92
85
 
@@ -156,9 +149,7 @@ class InputCollection(BaseModel):
156
149
  id: str
157
150
  r"""Unique ID for this input"""
158
151
 
159
- type: Annotated[
160
- Optional[InputCollectionType], PlainValidator(validate_open_enum(False))
161
- ] = InputCollectionType.COLLECTION
152
+ type: Optional[InputCollectionType] = InputCollectionType.COLLECTION
162
153
 
163
154
  disabled: Optional[bool] = False
164
155
 
@@ -1,17 +1,14 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
5
4
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
5
  from enum import Enum
8
6
  import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
7
  from typing import List, Optional
11
8
  from typing_extensions import Annotated, NotRequired, TypedDict
12
9
 
13
10
 
14
- class InputConfluentCloudType(str, Enum, metaclass=utils.OpenEnumMeta):
11
+ class InputConfluentCloudType(str, Enum):
15
12
  CONFLUENT_CLOUD = "confluent_cloud"
16
13
 
17
14
 
@@ -26,14 +23,14 @@ class InputConfluentCloudConnection(BaseModel):
26
23
  pipeline: Optional[str] = None
27
24
 
28
25
 
29
- class InputConfluentCloudMode(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ class InputConfluentCloudMode(str, Enum):
30
27
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
28
 
32
29
  SMART = "smart"
33
30
  ALWAYS = "always"
34
31
 
35
32
 
36
- class InputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
33
+ class InputConfluentCloudCompression(str, Enum):
37
34
  r"""Codec to use to compress the persisted data"""
38
35
 
39
36
  NONE = "none"
@@ -58,9 +55,7 @@ class InputConfluentCloudPqTypedDict(TypedDict):
58
55
 
59
56
 
60
57
  class InputConfluentCloudPq(BaseModel):
61
- mode: Annotated[
62
- Optional[InputConfluentCloudMode], PlainValidator(validate_open_enum(False))
63
- ] = InputConfluentCloudMode.ALWAYS
58
+ mode: Optional[InputConfluentCloudMode] = InputConfluentCloudMode.ALWAYS
64
59
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
60
 
66
61
  max_buffer_size: Annotated[
@@ -84,21 +79,20 @@ class InputConfluentCloudPq(BaseModel):
84
79
  path: Optional[str] = "$CRIBL_HOME/state/queues"
85
80
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
81
 
87
- compress: Annotated[
88
- Optional[InputConfluentCloudCompression],
89
- PlainValidator(validate_open_enum(False)),
90
- ] = InputConfluentCloudCompression.NONE
82
+ compress: Optional[InputConfluentCloudCompression] = (
83
+ InputConfluentCloudCompression.NONE
84
+ )
91
85
  r"""Codec to use to compress the persisted data"""
92
86
 
93
87
 
94
- class InputConfluentCloudMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
88
+ class InputConfluentCloudMinimumTLSVersion(str, Enum):
95
89
  TL_SV1 = "TLSv1"
96
90
  TL_SV1_1 = "TLSv1.1"
97
91
  TL_SV1_2 = "TLSv1.2"
98
92
  TL_SV1_3 = "TLSv1.3"
99
93
 
100
94
 
101
- class InputConfluentCloudMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
95
+ class InputConfluentCloudMaximumTLSVersion(str, Enum):
102
96
  TL_SV1 = "TLSv1"
103
97
  TL_SV1_1 = "TLSv1.1"
104
98
  TL_SV1_2 = "TLSv1.2"
@@ -158,18 +152,12 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
158
152
  r"""Passphrase to use to decrypt private key"""
159
153
 
160
154
  min_version: Annotated[
161
- Annotated[
162
- Optional[InputConfluentCloudMinimumTLSVersion],
163
- PlainValidator(validate_open_enum(False)),
164
- ],
155
+ Optional[InputConfluentCloudMinimumTLSVersion],
165
156
  pydantic.Field(alias="minVersion"),
166
157
  ] = None
167
158
 
168
159
  max_version: Annotated[
169
- Annotated[
170
- Optional[InputConfluentCloudMaximumTLSVersion],
171
- PlainValidator(validate_open_enum(False)),
172
- ],
160
+ Optional[InputConfluentCloudMaximumTLSVersion],
173
161
  pydantic.Field(alias="maxVersion"),
174
162
  ] = None
175
163
 
@@ -193,18 +181,14 @@ class InputConfluentCloudAuth(BaseModel):
193
181
  r"""Select or create a secret that references your credentials"""
194
182
 
195
183
 
196
- class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
197
- str, Enum, metaclass=utils.OpenEnumMeta
198
- ):
184
+ class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
199
185
  TL_SV1 = "TLSv1"
200
186
  TL_SV1_1 = "TLSv1.1"
201
187
  TL_SV1_2 = "TLSv1.2"
202
188
  TL_SV1_3 = "TLSv1.3"
203
189
 
204
190
 
205
- class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
206
- str, Enum, metaclass=utils.OpenEnumMeta
207
- ):
191
+ class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
208
192
  TL_SV1 = "TLSv1"
209
193
  TL_SV1_1 = "TLSv1.1"
210
194
  TL_SV1_2 = "TLSv1.2"
@@ -264,18 +248,12 @@ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
264
248
  r"""Passphrase to use to decrypt private key"""
265
249
 
266
250
  min_version: Annotated[
267
- Annotated[
268
- Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
269
- PlainValidator(validate_open_enum(False)),
270
- ],
251
+ Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
271
252
  pydantic.Field(alias="minVersion"),
272
253
  ] = None
273
254
 
274
255
  max_version: Annotated[
275
- Annotated[
276
- Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
277
- PlainValidator(validate_open_enum(False)),
278
- ],
256
+ Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
279
257
  pydantic.Field(alias="maxVersion"),
280
258
  ] = None
281
259
 
@@ -324,7 +302,7 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
324
302
  tls: Optional[InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
325
303
 
326
304
 
327
- class InputConfluentCloudSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
305
+ class InputConfluentCloudSASLMechanism(str, Enum):
328
306
  PLAIN = "plain"
329
307
  SCRAM_SHA_256 = "scram-sha-256"
330
308
  SCRAM_SHA_512 = "scram-sha-512"
@@ -343,10 +321,9 @@ class InputConfluentCloudAuthentication(BaseModel):
343
321
 
344
322
  disabled: Optional[bool] = True
345
323
 
346
- mechanism: Annotated[
347
- Optional[InputConfluentCloudSASLMechanism],
348
- PlainValidator(validate_open_enum(False)),
349
- ] = InputConfluentCloudSASLMechanism.PLAIN
324
+ mechanism: Optional[InputConfluentCloudSASLMechanism] = (
325
+ InputConfluentCloudSASLMechanism.PLAIN
326
+ )
350
327
 
351
328
 
352
329
  class InputConfluentCloudMetadatumTypedDict(TypedDict):
@@ -363,13 +340,13 @@ class InputConfluentCloudMetadatum(BaseModel):
363
340
 
364
341
 
365
342
  class InputConfluentCloudTypedDict(TypedDict):
366
- type: InputConfluentCloudType
367
343
  brokers: List[str]
368
344
  r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
369
345
  topics: List[str]
370
346
  r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
371
347
  id: NotRequired[str]
372
348
  r"""Unique ID for this input"""
349
+ type: NotRequired[InputConfluentCloudType]
373
350
  disabled: NotRequired[bool]
374
351
  pipeline: NotRequired[str]
375
352
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -443,8 +420,6 @@ class InputConfluentCloudTypedDict(TypedDict):
443
420
 
444
421
 
445
422
  class InputConfluentCloud(BaseModel):
446
- type: Annotated[InputConfluentCloudType, PlainValidator(validate_open_enum(False))]
447
-
448
423
  brokers: List[str]
449
424
  r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
450
425
 
@@ -454,6 +429,8 @@ class InputConfluentCloud(BaseModel):
454
429
  id: Optional[str] = None
455
430
  r"""Unique ID for this input"""
456
431
 
432
+ type: Optional[InputConfluentCloudType] = None
433
+
457
434
  disabled: Optional[bool] = False
458
435
 
459
436
  pipeline: Optional[str] = None