cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +4 -4
- cribl_control_plane/errors/__init__.py +5 -8
- cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
- cribl_control_plane/groups_sdk.py +28 -52
- cribl_control_plane/health.py +16 -22
- cribl_control_plane/models/__init__.py +54 -217
- cribl_control_plane/models/appmode.py +14 -0
- cribl_control_plane/models/authtoken.py +1 -5
- cribl_control_plane/models/cacheconnection.py +0 -20
- cribl_control_plane/models/configgroup.py +7 -55
- cribl_control_plane/models/configgroupcloud.py +1 -11
- cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
- cribl_control_plane/models/createroutesappendbyidop.py +2 -2
- cribl_control_plane/models/createversionundoop.py +3 -3
- cribl_control_plane/models/cribllakedataset.py +1 -11
- cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
- cribl_control_plane/models/datasetmetadata.py +1 -11
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
- cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
- cribl_control_plane/models/distributedsummary.py +0 -6
- cribl_control_plane/models/error.py +16 -0
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
- cribl_control_plane/models/gethealthinfoop.py +17 -0
- cribl_control_plane/models/getsummaryop.py +0 -11
- cribl_control_plane/models/hbcriblinfo.py +3 -24
- cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
- cribl_control_plane/models/heartbeatmetadata.py +0 -3
- cribl_control_plane/models/input.py +78 -80
- cribl_control_plane/models/inputappscope.py +17 -80
- cribl_control_plane/models/inputazureblob.py +1 -33
- cribl_control_plane/models/inputcollection.py +1 -24
- cribl_control_plane/models/inputconfluentcloud.py +18 -195
- cribl_control_plane/models/inputcribl.py +1 -24
- cribl_control_plane/models/inputcriblhttp.py +17 -62
- cribl_control_plane/models/inputcribllakehttp.py +17 -62
- cribl_control_plane/models/inputcriblmetrics.py +1 -24
- cribl_control_plane/models/inputcribltcp.py +17 -62
- cribl_control_plane/models/inputcrowdstrike.py +1 -54
- cribl_control_plane/models/inputdatadogagent.py +17 -62
- cribl_control_plane/models/inputdatagen.py +1 -24
- cribl_control_plane/models/inputedgeprometheus.py +34 -147
- cribl_control_plane/models/inputelastic.py +27 -119
- cribl_control_plane/models/inputeventhub.py +1 -182
- cribl_control_plane/models/inputexec.py +1 -33
- cribl_control_plane/models/inputfile.py +3 -42
- cribl_control_plane/models/inputfirehose.py +17 -62
- cribl_control_plane/models/inputgooglepubsub.py +1 -36
- cribl_control_plane/models/inputgrafana.py +32 -157
- cribl_control_plane/models/inputhttp.py +17 -62
- cribl_control_plane/models/inputhttpraw.py +17 -62
- cribl_control_plane/models/inputjournalfiles.py +1 -24
- cribl_control_plane/models/inputkafka.py +17 -189
- cribl_control_plane/models/inputkinesis.py +1 -80
- cribl_control_plane/models/inputkubeevents.py +1 -24
- cribl_control_plane/models/inputkubelogs.py +1 -33
- cribl_control_plane/models/inputkubemetrics.py +1 -33
- cribl_control_plane/models/inputloki.py +17 -71
- cribl_control_plane/models/inputmetrics.py +17 -62
- cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
- cribl_control_plane/models/inputmsk.py +18 -81
- cribl_control_plane/models/inputnetflow.py +1 -24
- cribl_control_plane/models/inputoffice365mgmt.py +1 -67
- cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
- cribl_control_plane/models/inputoffice365service.py +1 -67
- cribl_control_plane/models/inputopentelemetry.py +16 -92
- cribl_control_plane/models/inputprometheus.py +34 -138
- cribl_control_plane/models/inputprometheusrw.py +17 -71
- cribl_control_plane/models/inputrawudp.py +1 -24
- cribl_control_plane/models/inputs3.py +1 -45
- cribl_control_plane/models/inputs3inventory.py +1 -54
- cribl_control_plane/models/inputsecuritylake.py +1 -54
- cribl_control_plane/models/inputsnmp.py +1 -40
- cribl_control_plane/models/inputsplunk.py +17 -85
- cribl_control_plane/models/inputsplunkhec.py +16 -70
- cribl_control_plane/models/inputsplunksearch.py +1 -63
- cribl_control_plane/models/inputsqs.py +1 -56
- cribl_control_plane/models/inputsyslog.py +32 -121
- cribl_control_plane/models/inputsystemmetrics.py +9 -142
- cribl_control_plane/models/inputsystemstate.py +1 -33
- cribl_control_plane/models/inputtcp.py +17 -81
- cribl_control_plane/models/inputtcpjson.py +17 -71
- cribl_control_plane/models/inputwef.py +1 -71
- cribl_control_plane/models/inputwindowsmetrics.py +9 -129
- cribl_control_plane/models/inputwineventlogs.py +1 -60
- cribl_control_plane/models/inputwiz.py +1 -45
- cribl_control_plane/models/inputwizwebhook.py +17 -62
- cribl_control_plane/models/inputzscalerhec.py +16 -70
- cribl_control_plane/models/jobinfo.py +1 -4
- cribl_control_plane/models/jobstatus.py +3 -34
- cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
- cribl_control_plane/models/logininfo.py +3 -3
- cribl_control_plane/models/masterworkerentry.py +1 -11
- cribl_control_plane/models/nodeprovidedinfo.py +1 -11
- cribl_control_plane/models/nodeupgradestatus.py +0 -38
- cribl_control_plane/models/output.py +88 -93
- cribl_control_plane/models/outputazureblob.py +1 -110
- cribl_control_plane/models/outputazuredataexplorer.py +87 -452
- cribl_control_plane/models/outputazureeventhub.py +19 -281
- cribl_control_plane/models/outputazurelogs.py +19 -115
- cribl_control_plane/models/outputchronicle.py +19 -115
- cribl_control_plane/models/outputclickhouse.py +19 -155
- cribl_control_plane/models/outputcloudwatch.py +19 -106
- cribl_control_plane/models/outputconfluentcloud.py +38 -311
- cribl_control_plane/models/outputcriblhttp.py +19 -135
- cribl_control_plane/models/outputcribllake.py +1 -97
- cribl_control_plane/models/outputcribltcp.py +19 -132
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
- cribl_control_plane/models/outputdatadog.py +19 -159
- cribl_control_plane/models/outputdataset.py +19 -143
- cribl_control_plane/models/outputdiskspool.py +1 -11
- cribl_control_plane/models/outputdls3.py +1 -152
- cribl_control_plane/models/outputdynatracehttp.py +19 -160
- cribl_control_plane/models/outputdynatraceotlp.py +19 -160
- cribl_control_plane/models/outputelastic.py +19 -163
- cribl_control_plane/models/outputelasticcloud.py +19 -140
- cribl_control_plane/models/outputexabeam.py +1 -61
- cribl_control_plane/models/outputfilesystem.py +1 -87
- cribl_control_plane/models/outputgooglechronicle.py +20 -166
- cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
- cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
- cribl_control_plane/models/outputgooglepubsub.py +19 -106
- cribl_control_plane/models/outputgrafanacloud.py +37 -288
- cribl_control_plane/models/outputgraphite.py +19 -105
- cribl_control_plane/models/outputhoneycomb.py +19 -115
- cribl_control_plane/models/outputhumiohec.py +19 -126
- cribl_control_plane/models/outputinfluxdb.py +19 -130
- cribl_control_plane/models/outputkafka.py +34 -302
- cribl_control_plane/models/outputkinesis.py +19 -133
- cribl_control_plane/models/outputloki.py +17 -129
- cribl_control_plane/models/outputminio.py +1 -145
- cribl_control_plane/models/outputmsk.py +34 -193
- cribl_control_plane/models/outputnewrelic.py +19 -136
- cribl_control_plane/models/outputnewrelicevents.py +20 -128
- cribl_control_plane/models/outputopentelemetry.py +19 -178
- cribl_control_plane/models/outputprometheus.py +19 -115
- cribl_control_plane/models/outputring.py +1 -31
- cribl_control_plane/models/outputs3.py +1 -152
- cribl_control_plane/models/outputsecuritylake.py +1 -114
- cribl_control_plane/models/outputsentinel.py +19 -135
- cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
- cribl_control_plane/models/outputservicenow.py +19 -168
- cribl_control_plane/models/outputsignalfx.py +19 -115
- cribl_control_plane/models/outputsns.py +17 -113
- cribl_control_plane/models/outputsplunk.py +19 -153
- cribl_control_plane/models/outputsplunkhec.py +19 -208
- cribl_control_plane/models/outputsplunklb.py +19 -182
- cribl_control_plane/models/outputsqs.py +17 -124
- cribl_control_plane/models/outputstatsd.py +19 -105
- cribl_control_plane/models/outputstatsdext.py +19 -105
- cribl_control_plane/models/outputsumologic.py +19 -117
- cribl_control_plane/models/outputsyslog.py +96 -259
- cribl_control_plane/models/outputtcpjson.py +19 -141
- cribl_control_plane/models/outputwavefront.py +19 -115
- cribl_control_plane/models/outputwebhook.py +19 -161
- cribl_control_plane/models/outputxsiam.py +17 -113
- cribl_control_plane/models/packinfo.py +5 -8
- cribl_control_plane/models/packinstallinfo.py +5 -8
- cribl_control_plane/models/resourcepolicy.py +0 -11
- cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
- cribl_control_plane/models/routeconf.py +4 -3
- cribl_control_plane/models/runnablejobcollection.py +9 -72
- cribl_control_plane/models/runnablejobexecutor.py +9 -32
- cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
- cribl_control_plane/packs.py +7 -202
- cribl_control_plane/routes_sdk.py +6 -6
- cribl_control_plane/tokens.py +15 -23
- {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
- cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
- cribl_control_plane/models/groupcreaterequest.py +0 -171
- cribl_control_plane/models/outpostnodeinfo.py +0 -16
- cribl_control_plane/models/outputdatabricks.py +0 -482
- cribl_control_plane/models/updatepacksop.py +0 -25
- cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
- {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
5
|
from cribl_control_plane.types import BaseModel
|
|
6
6
|
from cribl_control_plane.utils import validate_open_enum
|
|
7
7
|
from enum import Enum
|
|
8
8
|
import pydantic
|
|
9
|
-
from pydantic import field_serializer
|
|
10
9
|
from pydantic.functional_validators import PlainValidator
|
|
11
10
|
from typing import List, Optional
|
|
12
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
@@ -27,13 +26,9 @@ class OutputWebhookMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
27
26
|
class OutputWebhookFormat(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
28
27
|
r"""How to format events before sending out"""
|
|
29
28
|
|
|
30
|
-
# NDJSON (Newline Delimited JSON)
|
|
31
29
|
NDJSON = "ndjson"
|
|
32
|
-
# JSON Array
|
|
33
30
|
JSON_ARRAY = "json_array"
|
|
34
|
-
# Custom
|
|
35
31
|
CUSTOM = "custom"
|
|
36
|
-
# Advanced
|
|
37
32
|
ADVANCED = "advanced"
|
|
38
33
|
|
|
39
34
|
|
|
@@ -51,11 +46,8 @@ class OutputWebhookExtraHTTPHeader(BaseModel):
|
|
|
51
46
|
class OutputWebhookFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
52
47
|
r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
|
|
53
48
|
|
|
54
|
-
# Payload
|
|
55
49
|
PAYLOAD = "payload"
|
|
56
|
-
# Payload + Headers
|
|
57
50
|
PAYLOAD_AND_HEADERS = "payloadAndHeaders"
|
|
58
|
-
# None
|
|
59
51
|
NONE = "none"
|
|
60
52
|
|
|
61
53
|
|
|
@@ -116,28 +108,19 @@ class OutputWebhookTimeoutRetrySettings(BaseModel):
|
|
|
116
108
|
class OutputWebhookBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
117
109
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
118
110
|
|
|
119
|
-
# Block
|
|
120
111
|
BLOCK = "block"
|
|
121
|
-
# Drop
|
|
122
112
|
DROP = "drop"
|
|
123
|
-
# Persistent Queue
|
|
124
113
|
QUEUE = "queue"
|
|
125
114
|
|
|
126
115
|
|
|
127
116
|
class OutputWebhookAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
128
117
|
r"""Authentication method to use for the HTTP request"""
|
|
129
118
|
|
|
130
|
-
# None
|
|
131
119
|
NONE = "none"
|
|
132
|
-
# Basic
|
|
133
120
|
BASIC = "basic"
|
|
134
|
-
# Basic (credentials secret)
|
|
135
121
|
CREDENTIALS_SECRET = "credentialsSecret"
|
|
136
|
-
# Token
|
|
137
122
|
TOKEN = "token"
|
|
138
|
-
# Token (text secret)
|
|
139
123
|
TEXT_SECRET = "textSecret"
|
|
140
|
-
# OAuth
|
|
141
124
|
OAUTH = "oauth"
|
|
142
125
|
|
|
143
126
|
|
|
@@ -212,54 +195,29 @@ class OutputWebhookTLSSettingsClientSide(BaseModel):
|
|
|
212
195
|
pydantic.Field(alias="maxVersion"),
|
|
213
196
|
] = None
|
|
214
197
|
|
|
215
|
-
@field_serializer("min_version")
|
|
216
|
-
def serialize_min_version(self, value):
|
|
217
|
-
if isinstance(value, str):
|
|
218
|
-
try:
|
|
219
|
-
return models.OutputWebhookMinimumTLSVersion(value)
|
|
220
|
-
except ValueError:
|
|
221
|
-
return value
|
|
222
|
-
return value
|
|
223
|
-
|
|
224
|
-
@field_serializer("max_version")
|
|
225
|
-
def serialize_max_version(self, value):
|
|
226
|
-
if isinstance(value, str):
|
|
227
|
-
try:
|
|
228
|
-
return models.OutputWebhookMaximumTLSVersion(value)
|
|
229
|
-
except ValueError:
|
|
230
|
-
return value
|
|
231
|
-
return value
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
class OutputWebhookMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
235
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
236
|
-
|
|
237
|
-
# Error
|
|
238
|
-
ERROR = "error"
|
|
239
|
-
# Backpressure
|
|
240
|
-
ALWAYS = "always"
|
|
241
|
-
# Always On
|
|
242
|
-
BACKPRESSURE = "backpressure"
|
|
243
|
-
|
|
244
198
|
|
|
245
199
|
class OutputWebhookCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
246
200
|
r"""Codec to use to compress the persisted data"""
|
|
247
201
|
|
|
248
|
-
# None
|
|
249
202
|
NONE = "none"
|
|
250
|
-
# Gzip
|
|
251
203
|
GZIP = "gzip"
|
|
252
204
|
|
|
253
205
|
|
|
254
206
|
class OutputWebhookQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
255
207
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
256
208
|
|
|
257
|
-
# Block
|
|
258
209
|
BLOCK = "block"
|
|
259
|
-
# Drop new data
|
|
260
210
|
DROP = "drop"
|
|
261
211
|
|
|
262
212
|
|
|
213
|
+
class OutputWebhookMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
214
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
215
|
+
|
|
216
|
+
ERROR = "error"
|
|
217
|
+
BACKPRESSURE = "backpressure"
|
|
218
|
+
ALWAYS = "always"
|
|
219
|
+
|
|
220
|
+
|
|
263
221
|
class OutputWebhookPqControlsTypedDict(TypedDict):
|
|
264
222
|
pass
|
|
265
223
|
|
|
@@ -389,16 +347,6 @@ class OutputWebhookTypedDict(TypedDict):
|
|
|
389
347
|
r"""Custom JavaScript code to format incoming event data accessible through the __e variable. The formatted content is added to (__e['__eventOut']) if available. Otherwise, the original event is serialized as JSON. Caution: This function is evaluated in an unprotected context, allowing you to execute almost any JavaScript code."""
|
|
390
348
|
format_payload_code: NotRequired[str]
|
|
391
349
|
r"""Optional JavaScript code to format the payload sent to the Destination. The payload, containing a batch of formatted events, is accessible through the __e['payload'] variable. The formatted payload is returned in the __e['__payloadOut'] variable. Caution: This function is evaluated in an unprotected context, allowing you to execute almost any JavaScript code."""
|
|
392
|
-
pq_strict_ordering: NotRequired[bool]
|
|
393
|
-
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
394
|
-
pq_rate_per_sec: NotRequired[float]
|
|
395
|
-
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
396
|
-
pq_mode: NotRequired[OutputWebhookMode]
|
|
397
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
398
|
-
pq_max_buffer_size: NotRequired[float]
|
|
399
|
-
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
400
|
-
pq_max_backpressure_sec: NotRequired[float]
|
|
401
|
-
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
402
350
|
pq_max_file_size: NotRequired[str]
|
|
403
351
|
r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
|
|
404
352
|
pq_max_size: NotRequired[str]
|
|
@@ -409,6 +357,8 @@ class OutputWebhookTypedDict(TypedDict):
|
|
|
409
357
|
r"""Codec to use to compress the persisted data"""
|
|
410
358
|
pq_on_backpressure: NotRequired[OutputWebhookQueueFullBehavior]
|
|
411
359
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
360
|
+
pq_mode: NotRequired[OutputWebhookMode]
|
|
361
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
412
362
|
pq_controls: NotRequired[OutputWebhookPqControlsTypedDict]
|
|
413
363
|
username: NotRequired[str]
|
|
414
364
|
password: NotRequired[str]
|
|
@@ -626,34 +576,6 @@ class OutputWebhook(BaseModel):
|
|
|
626
576
|
] = None
|
|
627
577
|
r"""Optional JavaScript code to format the payload sent to the Destination. The payload, containing a batch of formatted events, is accessible through the __e['payload'] variable. The formatted payload is returned in the __e['__payloadOut'] variable. Caution: This function is evaluated in an unprotected context, allowing you to execute almost any JavaScript code."""
|
|
628
578
|
|
|
629
|
-
pq_strict_ordering: Annotated[
|
|
630
|
-
Optional[bool], pydantic.Field(alias="pqStrictOrdering")
|
|
631
|
-
] = True
|
|
632
|
-
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
633
|
-
|
|
634
|
-
pq_rate_per_sec: Annotated[
|
|
635
|
-
Optional[float], pydantic.Field(alias="pqRatePerSec")
|
|
636
|
-
] = 0
|
|
637
|
-
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
638
|
-
|
|
639
|
-
pq_mode: Annotated[
|
|
640
|
-
Annotated[
|
|
641
|
-
Optional[OutputWebhookMode], PlainValidator(validate_open_enum(False))
|
|
642
|
-
],
|
|
643
|
-
pydantic.Field(alias="pqMode"),
|
|
644
|
-
] = OutputWebhookMode.ERROR
|
|
645
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
646
|
-
|
|
647
|
-
pq_max_buffer_size: Annotated[
|
|
648
|
-
Optional[float], pydantic.Field(alias="pqMaxBufferSize")
|
|
649
|
-
] = 42
|
|
650
|
-
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
651
|
-
|
|
652
|
-
pq_max_backpressure_sec: Annotated[
|
|
653
|
-
Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
|
|
654
|
-
] = 30
|
|
655
|
-
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
656
|
-
|
|
657
579
|
pq_max_file_size: Annotated[
|
|
658
580
|
Optional[str], pydantic.Field(alias="pqMaxFileSize")
|
|
659
581
|
] = "1 MB"
|
|
@@ -685,6 +607,14 @@ class OutputWebhook(BaseModel):
|
|
|
685
607
|
] = OutputWebhookQueueFullBehavior.BLOCK
|
|
686
608
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
687
609
|
|
|
610
|
+
pq_mode: Annotated[
|
|
611
|
+
Annotated[
|
|
612
|
+
Optional[OutputWebhookMode], PlainValidator(validate_open_enum(False))
|
|
613
|
+
],
|
|
614
|
+
pydantic.Field(alias="pqMode"),
|
|
615
|
+
] = OutputWebhookMode.ERROR
|
|
616
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
617
|
+
|
|
688
618
|
pq_controls: Annotated[
|
|
689
619
|
Optional[OutputWebhookPqControls], pydantic.Field(alias="pqControls")
|
|
690
620
|
] = None
|
|
@@ -757,75 +687,3 @@ class OutputWebhook(BaseModel):
|
|
|
757
687
|
Optional[float], pydantic.Field(alias="loadBalanceStatsPeriodSec")
|
|
758
688
|
] = 300
|
|
759
689
|
r"""How far back in time to keep traffic stats for load balancing purposes"""
|
|
760
|
-
|
|
761
|
-
@field_serializer("method")
|
|
762
|
-
def serialize_method(self, value):
|
|
763
|
-
if isinstance(value, str):
|
|
764
|
-
try:
|
|
765
|
-
return models.OutputWebhookMethod(value)
|
|
766
|
-
except ValueError:
|
|
767
|
-
return value
|
|
768
|
-
return value
|
|
769
|
-
|
|
770
|
-
@field_serializer("format_")
|
|
771
|
-
def serialize_format_(self, value):
|
|
772
|
-
if isinstance(value, str):
|
|
773
|
-
try:
|
|
774
|
-
return models.OutputWebhookFormat(value)
|
|
775
|
-
except ValueError:
|
|
776
|
-
return value
|
|
777
|
-
return value
|
|
778
|
-
|
|
779
|
-
@field_serializer("failed_request_logging_mode")
|
|
780
|
-
def serialize_failed_request_logging_mode(self, value):
|
|
781
|
-
if isinstance(value, str):
|
|
782
|
-
try:
|
|
783
|
-
return models.OutputWebhookFailedRequestLoggingMode(value)
|
|
784
|
-
except ValueError:
|
|
785
|
-
return value
|
|
786
|
-
return value
|
|
787
|
-
|
|
788
|
-
@field_serializer("on_backpressure")
|
|
789
|
-
def serialize_on_backpressure(self, value):
|
|
790
|
-
if isinstance(value, str):
|
|
791
|
-
try:
|
|
792
|
-
return models.OutputWebhookBackpressureBehavior(value)
|
|
793
|
-
except ValueError:
|
|
794
|
-
return value
|
|
795
|
-
return value
|
|
796
|
-
|
|
797
|
-
@field_serializer("auth_type")
|
|
798
|
-
def serialize_auth_type(self, value):
|
|
799
|
-
if isinstance(value, str):
|
|
800
|
-
try:
|
|
801
|
-
return models.OutputWebhookAuthenticationType(value)
|
|
802
|
-
except ValueError:
|
|
803
|
-
return value
|
|
804
|
-
return value
|
|
805
|
-
|
|
806
|
-
@field_serializer("pq_mode")
|
|
807
|
-
def serialize_pq_mode(self, value):
|
|
808
|
-
if isinstance(value, str):
|
|
809
|
-
try:
|
|
810
|
-
return models.OutputWebhookMode(value)
|
|
811
|
-
except ValueError:
|
|
812
|
-
return value
|
|
813
|
-
return value
|
|
814
|
-
|
|
815
|
-
@field_serializer("pq_compress")
|
|
816
|
-
def serialize_pq_compress(self, value):
|
|
817
|
-
if isinstance(value, str):
|
|
818
|
-
try:
|
|
819
|
-
return models.OutputWebhookCompression(value)
|
|
820
|
-
except ValueError:
|
|
821
|
-
return value
|
|
822
|
-
return value
|
|
823
|
-
|
|
824
|
-
@field_serializer("pq_on_backpressure")
|
|
825
|
-
def serialize_pq_on_backpressure(self, value):
|
|
826
|
-
if isinstance(value, str):
|
|
827
|
-
try:
|
|
828
|
-
return models.OutputWebhookQueueFullBehavior(value)
|
|
829
|
-
except ValueError:
|
|
830
|
-
return value
|
|
831
|
-
return value
|
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
5
|
from cribl_control_plane.types import BaseModel
|
|
6
6
|
from cribl_control_plane.utils import validate_open_enum
|
|
7
7
|
from enum import Enum
|
|
8
8
|
import pydantic
|
|
9
|
-
from pydantic import field_serializer
|
|
10
9
|
from pydantic.functional_validators import PlainValidator
|
|
11
10
|
from typing import Any, List, Optional
|
|
12
11
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
@@ -30,11 +29,8 @@ class OutputXsiamExtraHTTPHeader(BaseModel):
|
|
|
30
29
|
class OutputXsiamFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
31
30
|
r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
|
|
32
31
|
|
|
33
|
-
# Payload
|
|
34
32
|
PAYLOAD = "payload"
|
|
35
|
-
# Payload + Headers
|
|
36
33
|
PAYLOAD_AND_HEADERS = "payloadAndHeaders"
|
|
37
|
-
# None
|
|
38
34
|
NONE = "none"
|
|
39
35
|
|
|
40
36
|
|
|
@@ -102,11 +98,8 @@ class OutputXsiamTimeoutRetrySettings(BaseModel):
|
|
|
102
98
|
class OutputXsiamBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
103
99
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
104
100
|
|
|
105
|
-
# Block
|
|
106
101
|
BLOCK = "block"
|
|
107
|
-
# Drop
|
|
108
102
|
DROP = "drop"
|
|
109
|
-
# Persistent Queue
|
|
110
103
|
QUEUE = "queue"
|
|
111
104
|
|
|
112
105
|
|
|
@@ -123,35 +116,28 @@ class OutputXsiamURL(BaseModel):
|
|
|
123
116
|
r"""Assign a weight (>0) to each endpoint to indicate its traffic-handling capability"""
|
|
124
117
|
|
|
125
118
|
|
|
126
|
-
class OutputXsiamMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
127
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
128
|
-
|
|
129
|
-
# Error
|
|
130
|
-
ERROR = "error"
|
|
131
|
-
# Backpressure
|
|
132
|
-
ALWAYS = "always"
|
|
133
|
-
# Always On
|
|
134
|
-
BACKPRESSURE = "backpressure"
|
|
135
|
-
|
|
136
|
-
|
|
137
119
|
class OutputXsiamCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
138
120
|
r"""Codec to use to compress the persisted data"""
|
|
139
121
|
|
|
140
|
-
# None
|
|
141
122
|
NONE = "none"
|
|
142
|
-
# Gzip
|
|
143
123
|
GZIP = "gzip"
|
|
144
124
|
|
|
145
125
|
|
|
146
126
|
class OutputXsiamQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
147
127
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
148
128
|
|
|
149
|
-
# Block
|
|
150
129
|
BLOCK = "block"
|
|
151
|
-
# Drop new data
|
|
152
130
|
DROP = "drop"
|
|
153
131
|
|
|
154
132
|
|
|
133
|
+
class OutputXsiamMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
134
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
135
|
+
|
|
136
|
+
ERROR = "error"
|
|
137
|
+
BACKPRESSURE = "backpressure"
|
|
138
|
+
ALWAYS = "always"
|
|
139
|
+
|
|
140
|
+
|
|
155
141
|
class OutputXsiamPqControlsTypedDict(TypedDict):
|
|
156
142
|
pass
|
|
157
143
|
|
|
@@ -224,16 +210,6 @@ class OutputXsiamTypedDict(TypedDict):
|
|
|
224
210
|
r"""XSIAM authentication token"""
|
|
225
211
|
text_secret: NotRequired[str]
|
|
226
212
|
r"""Select or create a stored text secret"""
|
|
227
|
-
pq_strict_ordering: NotRequired[bool]
|
|
228
|
-
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
229
|
-
pq_rate_per_sec: NotRequired[float]
|
|
230
|
-
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
231
|
-
pq_mode: NotRequired[OutputXsiamMode]
|
|
232
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
233
|
-
pq_max_buffer_size: NotRequired[float]
|
|
234
|
-
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
235
|
-
pq_max_backpressure_sec: NotRequired[float]
|
|
236
|
-
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
237
213
|
pq_max_file_size: NotRequired[str]
|
|
238
214
|
r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
|
|
239
215
|
pq_max_size: NotRequired[str]
|
|
@@ -244,6 +220,8 @@ class OutputXsiamTypedDict(TypedDict):
|
|
|
244
220
|
r"""Codec to use to compress the persisted data"""
|
|
245
221
|
pq_on_backpressure: NotRequired[OutputXsiamQueueFullBehavior]
|
|
246
222
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
223
|
+
pq_mode: NotRequired[OutputXsiamMode]
|
|
224
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
247
225
|
pq_controls: NotRequired[OutputXsiamPqControlsTypedDict]
|
|
248
226
|
|
|
249
227
|
|
|
@@ -396,32 +374,6 @@ class OutputXsiam(BaseModel):
|
|
|
396
374
|
text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
|
|
397
375
|
r"""Select or create a stored text secret"""
|
|
398
376
|
|
|
399
|
-
pq_strict_ordering: Annotated[
|
|
400
|
-
Optional[bool], pydantic.Field(alias="pqStrictOrdering")
|
|
401
|
-
] = True
|
|
402
|
-
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
403
|
-
|
|
404
|
-
pq_rate_per_sec: Annotated[
|
|
405
|
-
Optional[float], pydantic.Field(alias="pqRatePerSec")
|
|
406
|
-
] = 0
|
|
407
|
-
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
408
|
-
|
|
409
|
-
pq_mode: Annotated[
|
|
410
|
-
Annotated[Optional[OutputXsiamMode], PlainValidator(validate_open_enum(False))],
|
|
411
|
-
pydantic.Field(alias="pqMode"),
|
|
412
|
-
] = OutputXsiamMode.ERROR
|
|
413
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
414
|
-
|
|
415
|
-
pq_max_buffer_size: Annotated[
|
|
416
|
-
Optional[float], pydantic.Field(alias="pqMaxBufferSize")
|
|
417
|
-
] = 42
|
|
418
|
-
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
419
|
-
|
|
420
|
-
pq_max_backpressure_sec: Annotated[
|
|
421
|
-
Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
|
|
422
|
-
] = 30
|
|
423
|
-
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
424
|
-
|
|
425
377
|
pq_max_file_size: Annotated[
|
|
426
378
|
Optional[str], pydantic.Field(alias="pqMaxFileSize")
|
|
427
379
|
] = "1 MB"
|
|
@@ -452,60 +404,12 @@ class OutputXsiam(BaseModel):
|
|
|
452
404
|
] = OutputXsiamQueueFullBehavior.BLOCK
|
|
453
405
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
454
406
|
|
|
407
|
+
pq_mode: Annotated[
|
|
408
|
+
Annotated[Optional[OutputXsiamMode], PlainValidator(validate_open_enum(False))],
|
|
409
|
+
pydantic.Field(alias="pqMode"),
|
|
410
|
+
] = OutputXsiamMode.ERROR
|
|
411
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
412
|
+
|
|
455
413
|
pq_controls: Annotated[
|
|
456
414
|
Optional[OutputXsiamPqControls], pydantic.Field(alias="pqControls")
|
|
457
415
|
] = None
|
|
458
|
-
|
|
459
|
-
@field_serializer("failed_request_logging_mode")
|
|
460
|
-
def serialize_failed_request_logging_mode(self, value):
|
|
461
|
-
if isinstance(value, str):
|
|
462
|
-
try:
|
|
463
|
-
return models.OutputXsiamFailedRequestLoggingMode(value)
|
|
464
|
-
except ValueError:
|
|
465
|
-
return value
|
|
466
|
-
return value
|
|
467
|
-
|
|
468
|
-
@field_serializer("auth_type")
|
|
469
|
-
def serialize_auth_type(self, value):
|
|
470
|
-
if isinstance(value, str):
|
|
471
|
-
try:
|
|
472
|
-
return models.OutputXsiamAuthenticationMethod(value)
|
|
473
|
-
except ValueError:
|
|
474
|
-
return value
|
|
475
|
-
return value
|
|
476
|
-
|
|
477
|
-
@field_serializer("on_backpressure")
|
|
478
|
-
def serialize_on_backpressure(self, value):
|
|
479
|
-
if isinstance(value, str):
|
|
480
|
-
try:
|
|
481
|
-
return models.OutputXsiamBackpressureBehavior(value)
|
|
482
|
-
except ValueError:
|
|
483
|
-
return value
|
|
484
|
-
return value
|
|
485
|
-
|
|
486
|
-
@field_serializer("pq_mode")
|
|
487
|
-
def serialize_pq_mode(self, value):
|
|
488
|
-
if isinstance(value, str):
|
|
489
|
-
try:
|
|
490
|
-
return models.OutputXsiamMode(value)
|
|
491
|
-
except ValueError:
|
|
492
|
-
return value
|
|
493
|
-
return value
|
|
494
|
-
|
|
495
|
-
@field_serializer("pq_compress")
|
|
496
|
-
def serialize_pq_compress(self, value):
|
|
497
|
-
if isinstance(value, str):
|
|
498
|
-
try:
|
|
499
|
-
return models.OutputXsiamCompression(value)
|
|
500
|
-
except ValueError:
|
|
501
|
-
return value
|
|
502
|
-
return value
|
|
503
|
-
|
|
504
|
-
@field_serializer("pq_on_backpressure")
|
|
505
|
-
def serialize_pq_on_backpressure(self, value):
|
|
506
|
-
if isinstance(value, str):
|
|
507
|
-
try:
|
|
508
|
-
return models.OutputXsiamQueueFullBehavior(value)
|
|
509
|
-
except ValueError:
|
|
510
|
-
return value
|
|
511
|
-
return value
|
|
@@ -8,27 +8,26 @@ from typing_extensions import Annotated, NotRequired, TypedDict
|
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class PackInfoTagsTypedDict(TypedDict):
|
|
11
|
-
data_type:
|
|
11
|
+
data_type: List[str]
|
|
12
|
+
technology: List[str]
|
|
12
13
|
domain: NotRequired[List[str]]
|
|
13
14
|
streamtags: NotRequired[List[str]]
|
|
14
|
-
technology: NotRequired[List[str]]
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class PackInfoTags(BaseModel):
|
|
18
|
-
data_type: Annotated[
|
|
18
|
+
data_type: Annotated[List[str], pydantic.Field(alias="dataType")]
|
|
19
|
+
|
|
20
|
+
technology: List[str]
|
|
19
21
|
|
|
20
22
|
domain: Optional[List[str]] = None
|
|
21
23
|
|
|
22
24
|
streamtags: Optional[List[str]] = None
|
|
23
25
|
|
|
24
|
-
technology: Optional[List[str]] = None
|
|
25
|
-
|
|
26
26
|
|
|
27
27
|
class PackInfoTypedDict(TypedDict):
|
|
28
28
|
id: str
|
|
29
29
|
source: str
|
|
30
30
|
author: NotRequired[str]
|
|
31
|
-
dependencies: NotRequired[Dict[str, str]]
|
|
32
31
|
description: NotRequired[str]
|
|
33
32
|
display_name: NotRequired[str]
|
|
34
33
|
exports: NotRequired[List[str]]
|
|
@@ -49,8 +48,6 @@ class PackInfo(BaseModel):
|
|
|
49
48
|
|
|
50
49
|
author: Optional[str] = None
|
|
51
50
|
|
|
52
|
-
dependencies: Optional[Dict[str, str]] = None
|
|
53
|
-
|
|
54
51
|
description: Optional[str] = None
|
|
55
52
|
|
|
56
53
|
display_name: Annotated[Optional[str], pydantic.Field(alias="displayName")] = None
|
|
@@ -8,27 +8,26 @@ from typing_extensions import Annotated, NotRequired, TypedDict
|
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class PackInstallInfoTagsTypedDict(TypedDict):
|
|
11
|
-
data_type:
|
|
11
|
+
data_type: List[str]
|
|
12
|
+
technology: List[str]
|
|
12
13
|
domain: NotRequired[List[str]]
|
|
13
14
|
streamtags: NotRequired[List[str]]
|
|
14
|
-
technology: NotRequired[List[str]]
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class PackInstallInfoTags(BaseModel):
|
|
18
|
-
data_type: Annotated[
|
|
18
|
+
data_type: Annotated[List[str], pydantic.Field(alias="dataType")]
|
|
19
|
+
|
|
20
|
+
technology: List[str]
|
|
19
21
|
|
|
20
22
|
domain: Optional[List[str]] = None
|
|
21
23
|
|
|
22
24
|
streamtags: Optional[List[str]] = None
|
|
23
25
|
|
|
24
|
-
technology: Optional[List[str]] = None
|
|
25
|
-
|
|
26
26
|
|
|
27
27
|
class PackInstallInfoTypedDict(TypedDict):
|
|
28
28
|
id: str
|
|
29
29
|
source: str
|
|
30
30
|
author: NotRequired[str]
|
|
31
|
-
dependencies: NotRequired[Dict[str, str]]
|
|
32
31
|
description: NotRequired[str]
|
|
33
32
|
display_name: NotRequired[str]
|
|
34
33
|
exports: NotRequired[List[str]]
|
|
@@ -50,8 +49,6 @@ class PackInstallInfo(BaseModel):
|
|
|
50
49
|
|
|
51
50
|
author: Optional[str] = None
|
|
52
51
|
|
|
53
|
-
dependencies: Optional[Dict[str, str]] = None
|
|
54
|
-
|
|
55
52
|
description: Optional[str] = None
|
|
56
53
|
|
|
57
54
|
display_name: Annotated[Optional[str], pydantic.Field(alias="displayName")] = None
|
|
@@ -2,10 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
from .rbacresource import RbacResource
|
|
5
|
-
from cribl_control_plane import models
|
|
6
5
|
from cribl_control_plane.types import BaseModel
|
|
7
6
|
from cribl_control_plane.utils import validate_open_enum
|
|
8
|
-
from pydantic import field_serializer
|
|
9
7
|
from pydantic.functional_validators import PlainValidator
|
|
10
8
|
from typing import Optional
|
|
11
9
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
@@ -26,12 +24,3 @@ class ResourcePolicy(BaseModel):
|
|
|
26
24
|
type: Annotated[RbacResource, PlainValidator(validate_open_enum(False))]
|
|
27
25
|
|
|
28
26
|
id: Optional[str] = None
|
|
29
|
-
|
|
30
|
-
@field_serializer("type")
|
|
31
|
-
def serialize_type(self, value):
|
|
32
|
-
if isinstance(value, str):
|
|
33
|
-
try:
|
|
34
|
-
return models.RbacResource(value)
|
|
35
|
-
except ValueError:
|
|
36
|
-
return value
|
|
37
|
-
return value
|
|
@@ -5,9 +5,9 @@ from cribl_control_plane.types import BaseModel
|
|
|
5
5
|
from typing_extensions import TypedDict
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
class
|
|
9
|
-
|
|
8
|
+
class RouteCloneConfTypedDict(TypedDict):
|
|
9
|
+
pass
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
class
|
|
13
|
-
|
|
12
|
+
class RouteCloneConf(BaseModel):
|
|
13
|
+
pass
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
5
6
|
import pydantic
|
|
6
|
-
from typing import
|
|
7
|
+
from typing import List, Optional
|
|
7
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
8
9
|
|
|
9
10
|
|
|
@@ -12,7 +13,7 @@ class RouteConfTypedDict(TypedDict):
|
|
|
12
13
|
id: str
|
|
13
14
|
name: str
|
|
14
15
|
pipeline: str
|
|
15
|
-
clones: NotRequired[List[
|
|
16
|
+
clones: NotRequired[List[RouteCloneConfTypedDict]]
|
|
16
17
|
context: NotRequired[str]
|
|
17
18
|
description: NotRequired[str]
|
|
18
19
|
disabled: NotRequired[bool]
|
|
@@ -32,7 +33,7 @@ class RouteConf(BaseModel):
|
|
|
32
33
|
|
|
33
34
|
pipeline: str
|
|
34
35
|
|
|
35
|
-
clones: Optional[List[
|
|
36
|
+
clones: Optional[List[RouteCloneConf]] = None
|
|
36
37
|
|
|
37
38
|
context: Optional[str] = None
|
|
38
39
|
|