cribl-control-plane 0.0.16__py3-none-any.whl → 0.0.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/errors/healthstatus_error.py +2 -8
- cribl_control_plane/models/__init__.py +4365 -4124
- cribl_control_plane/models/createinputop.py +1734 -2771
- cribl_control_plane/models/createoutputop.py +2153 -4314
- cribl_control_plane/models/createversioncommitop.py +24 -0
- cribl_control_plane/models/createversionpushop.py +23 -0
- cribl_control_plane/models/createversionrevertop.py +47 -0
- cribl_control_plane/models/createversionsyncop.py +23 -0
- cribl_control_plane/models/createversionundoop.py +37 -0
- cribl_control_plane/models/getversionbranchop.py +23 -0
- cribl_control_plane/models/getversioncountop.py +47 -0
- cribl_control_plane/models/getversioncurrentbranchop.py +23 -0
- cribl_control_plane/models/getversiondiffop.py +63 -0
- cribl_control_plane/models/getversionfilesop.py +48 -0
- cribl_control_plane/models/getversioninfoop.py +24 -0
- cribl_control_plane/models/getversionshowop.py +63 -0
- cribl_control_plane/models/getversionstatusop.py +38 -0
- cribl_control_plane/models/gitcommitparams.py +23 -0
- cribl_control_plane/models/gitcommitsummary.py +68 -0
- cribl_control_plane/models/gitfile.py +20 -0
- cribl_control_plane/models/gitfilesresponse.py +22 -0
- cribl_control_plane/models/gitinfo.py +23 -0
- cribl_control_plane/models/gitrevertparams.py +20 -0
- cribl_control_plane/models/gitrevertresult.py +48 -0
- cribl_control_plane/models/gitstatusresult.py +73 -0
- cribl_control_plane/models/healthstatus.py +4 -7
- cribl_control_plane/models/inputappscope.py +16 -36
- cribl_control_plane/models/inputazureblob.py +8 -19
- cribl_control_plane/models/inputcollection.py +6 -15
- cribl_control_plane/models/inputconfluentcloud.py +20 -45
- cribl_control_plane/models/inputcribl.py +6 -13
- cribl_control_plane/models/inputcriblhttp.py +10 -27
- cribl_control_plane/models/inputcribllakehttp.py +12 -26
- cribl_control_plane/models/inputcriblmetrics.py +6 -14
- cribl_control_plane/models/inputcribltcp.py +10 -27
- cribl_control_plane/models/inputcrowdstrike.py +12 -28
- cribl_control_plane/models/inputdatadogagent.py +10 -28
- cribl_control_plane/models/inputdatagen.py +6 -13
- cribl_control_plane/models/inputedgeprometheus.py +31 -64
- cribl_control_plane/models/inputelastic.py +16 -44
- cribl_control_plane/models/inputeventhub.py +8 -19
- cribl_control_plane/models/inputexec.py +8 -16
- cribl_control_plane/models/inputfile.py +8 -17
- cribl_control_plane/models/inputfirehose.py +10 -27
- cribl_control_plane/models/inputgooglepubsub.py +8 -23
- cribl_control_plane/models/inputgrafana_union.py +35 -81
- cribl_control_plane/models/inputhttp.py +10 -27
- cribl_control_plane/models/inputhttpraw.py +10 -27
- cribl_control_plane/models/inputjournalfiles.py +6 -16
- cribl_control_plane/models/inputkafka.py +16 -45
- cribl_control_plane/models/inputkinesis.py +16 -42
- cribl_control_plane/models/inputkubeevents.py +6 -13
- cribl_control_plane/models/inputkubelogs.py +10 -18
- cribl_control_plane/models/inputkubemetrics.py +10 -18
- cribl_control_plane/models/inputloki.py +12 -33
- cribl_control_plane/models/inputmetrics.py +10 -25
- cribl_control_plane/models/inputmodeldriventelemetry.py +12 -32
- cribl_control_plane/models/inputmsk.py +18 -52
- cribl_control_plane/models/inputnetflow.py +6 -15
- cribl_control_plane/models/inputoffice365mgmt.py +16 -37
- cribl_control_plane/models/inputoffice365msgtrace.py +18 -39
- cribl_control_plane/models/inputoffice365service.py +18 -39
- cribl_control_plane/models/inputopentelemetry.py +18 -42
- cribl_control_plane/models/inputprometheus.py +20 -54
- cribl_control_plane/models/inputprometheusrw.py +12 -34
- cribl_control_plane/models/inputrawudp.py +6 -15
- cribl_control_plane/models/inputs3.py +10 -23
- cribl_control_plane/models/inputs3inventory.py +12 -28
- cribl_control_plane/models/inputsecuritylake.py +12 -29
- cribl_control_plane/models/inputsnmp.py +8 -20
- cribl_control_plane/models/inputsplunk.py +14 -37
- cribl_control_plane/models/inputsplunkhec.py +12 -33
- cribl_control_plane/models/inputsplunksearch.py +16 -37
- cribl_control_plane/models/inputsqs.py +12 -31
- cribl_control_plane/models/inputsyslog_union.py +29 -53
- cribl_control_plane/models/inputsystemmetrics.py +26 -50
- cribl_control_plane/models/inputsystemstate.py +10 -18
- cribl_control_plane/models/inputtcp.py +12 -33
- cribl_control_plane/models/inputtcpjson.py +12 -33
- cribl_control_plane/models/inputwef.py +20 -45
- cribl_control_plane/models/inputwindowsmetrics.py +26 -46
- cribl_control_plane/models/inputwineventlogs.py +12 -22
- cribl_control_plane/models/inputwiz.py +10 -25
- cribl_control_plane/models/inputzscalerhec.py +12 -33
- cribl_control_plane/models/output.py +3 -6
- cribl_control_plane/models/outputazureblob.py +20 -52
- cribl_control_plane/models/outputazuredataexplorer.py +30 -77
- cribl_control_plane/models/outputazureeventhub.py +20 -44
- cribl_control_plane/models/outputazurelogs.py +14 -37
- cribl_control_plane/models/outputclickhouse.py +22 -59
- cribl_control_plane/models/outputcloudwatch.py +12 -33
- cribl_control_plane/models/outputconfluentcloud.py +32 -75
- cribl_control_plane/models/outputcriblhttp.py +18 -46
- cribl_control_plane/models/outputcribllake.py +18 -48
- cribl_control_plane/models/outputcribltcp.py +20 -47
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +16 -54
- cribl_control_plane/models/outputdatadog.py +22 -50
- cribl_control_plane/models/outputdataset.py +20 -48
- cribl_control_plane/models/outputdefault.py +2 -5
- cribl_control_plane/models/outputdevnull.py +2 -5
- cribl_control_plane/models/outputdiskspool.py +4 -9
- cribl_control_plane/models/outputdls3.py +26 -72
- cribl_control_plane/models/outputdynatracehttp.py +22 -57
- cribl_control_plane/models/outputdynatraceotlp.py +24 -59
- cribl_control_plane/models/outputelastic.py +20 -45
- cribl_control_plane/models/outputelasticcloud.py +14 -40
- cribl_control_plane/models/outputexabeam.py +12 -33
- cribl_control_plane/models/outputfilesystem.py +16 -41
- cribl_control_plane/models/outputgooglechronicle.py +18 -54
- cribl_control_plane/models/outputgooglecloudlogging.py +16 -46
- cribl_control_plane/models/outputgooglecloudstorage.py +26 -71
- cribl_control_plane/models/outputgooglepubsub.py +16 -39
- cribl_control_plane/models/{outputgrafanacloud_union.py → outputgrafanacloud.py} +49 -110
- cribl_control_plane/models/outputgraphite.py +16 -35
- cribl_control_plane/models/outputhoneycomb.py +14 -37
- cribl_control_plane/models/outputhumiohec.py +18 -47
- cribl_control_plane/models/outputinfluxdb.py +18 -44
- cribl_control_plane/models/outputkafka.py +28 -73
- cribl_control_plane/models/outputkinesis.py +18 -44
- cribl_control_plane/models/outputloki.py +18 -43
- cribl_control_plane/models/outputminio.py +26 -69
- cribl_control_plane/models/outputmsk.py +30 -81
- cribl_control_plane/models/outputnetflow.py +2 -5
- cribl_control_plane/models/outputnewrelic.py +20 -45
- cribl_control_plane/models/outputnewrelicevents.py +16 -45
- cribl_control_plane/models/outputopentelemetry.py +28 -69
- cribl_control_plane/models/outputprometheus.py +14 -37
- cribl_control_plane/models/outputring.py +10 -21
- cribl_control_plane/models/outputrouter.py +2 -5
- cribl_control_plane/models/outputs3.py +28 -72
- cribl_control_plane/models/outputsecuritylake.py +20 -56
- cribl_control_plane/models/outputsentinel.py +20 -49
- cribl_control_plane/models/outputsentineloneaisiem.py +20 -54
- cribl_control_plane/models/outputservicenow.py +26 -64
- cribl_control_plane/models/outputsignalfx.py +16 -39
- cribl_control_plane/models/outputsnmp.py +2 -5
- cribl_control_plane/models/outputsns.py +16 -40
- cribl_control_plane/models/outputsplunk.py +26 -64
- cribl_control_plane/models/outputsplunkhec.py +14 -37
- cribl_control_plane/models/outputsplunklb.py +36 -83
- cribl_control_plane/models/outputsqs.py +18 -45
- cribl_control_plane/models/outputstatsd.py +16 -34
- cribl_control_plane/models/outputstatsdext.py +14 -33
- cribl_control_plane/models/outputsumologic.py +14 -37
- cribl_control_plane/models/outputsyslog.py +26 -60
- cribl_control_plane/models/outputtcpjson.py +22 -54
- cribl_control_plane/models/outputwavefront.py +14 -37
- cribl_control_plane/models/outputwebhook.py +24 -60
- cribl_control_plane/models/outputxsiam.py +16 -37
- cribl_control_plane/sdk.py +4 -0
- cribl_control_plane/versioning.py +2309 -0
- {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.18.dist-info}/METADATA +18 -2
- cribl_control_plane-0.0.18.dist-info/RECORD +237 -0
- cribl_control_plane-0.0.16.dist-info/RECORD +0 -215
- {cribl_control_plane-0.0.16.dist-info → cribl_control_plane-0.0.18.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane.types import BaseModel
|
|
5
|
+
from typing import List, Optional
|
|
6
|
+
from typing_extensions import NotRequired, TypedDict
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GitRevertResultFilesTypedDict(TypedDict):
|
|
10
|
+
created: NotRequired[List[str]]
|
|
11
|
+
deleted: NotRequired[List[str]]
|
|
12
|
+
modified: NotRequired[List[str]]
|
|
13
|
+
renamed: NotRequired[List[str]]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class GitRevertResultFiles(BaseModel):
|
|
17
|
+
created: Optional[List[str]] = None
|
|
18
|
+
|
|
19
|
+
deleted: Optional[List[str]] = None
|
|
20
|
+
|
|
21
|
+
modified: Optional[List[str]] = None
|
|
22
|
+
|
|
23
|
+
renamed: Optional[List[str]] = None
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class AuditTypedDict(TypedDict):
|
|
27
|
+
files: GitRevertResultFilesTypedDict
|
|
28
|
+
group: str
|
|
29
|
+
id: str
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class Audit(BaseModel):
|
|
33
|
+
files: GitRevertResultFiles
|
|
34
|
+
|
|
35
|
+
group: str
|
|
36
|
+
|
|
37
|
+
id: str
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class GitRevertResultTypedDict(TypedDict):
|
|
41
|
+
audit: AuditTypedDict
|
|
42
|
+
reverted: bool
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class GitRevertResult(BaseModel):
|
|
46
|
+
audit: Audit
|
|
47
|
+
|
|
48
|
+
reverted: bool
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane.types import BaseModel
|
|
5
|
+
import pydantic
|
|
6
|
+
from typing import List
|
|
7
|
+
from typing_extensions import Annotated, TypedDict
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class FileTypedDict(TypedDict):
|
|
11
|
+
index: str
|
|
12
|
+
path: str
|
|
13
|
+
working_dir: str
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class File(BaseModel):
|
|
17
|
+
index: str
|
|
18
|
+
|
|
19
|
+
path: str
|
|
20
|
+
|
|
21
|
+
working_dir: str
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class RenamedTypedDict(TypedDict):
|
|
25
|
+
from_: str
|
|
26
|
+
to: str
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Renamed(BaseModel):
|
|
30
|
+
from_: Annotated[str, pydantic.Field(alias="from")]
|
|
31
|
+
|
|
32
|
+
to: str
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class GitStatusResultTypedDict(TypedDict):
|
|
36
|
+
ahead: float
|
|
37
|
+
behind: float
|
|
38
|
+
conflicted: List[str]
|
|
39
|
+
created: List[str]
|
|
40
|
+
current: str
|
|
41
|
+
deleted: List[str]
|
|
42
|
+
files: List[FileTypedDict]
|
|
43
|
+
modified: List[str]
|
|
44
|
+
not_added: List[str]
|
|
45
|
+
renamed: List[RenamedTypedDict]
|
|
46
|
+
staged: List[str]
|
|
47
|
+
tracking: str
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class GitStatusResult(BaseModel):
|
|
51
|
+
ahead: float
|
|
52
|
+
|
|
53
|
+
behind: float
|
|
54
|
+
|
|
55
|
+
conflicted: List[str]
|
|
56
|
+
|
|
57
|
+
created: List[str]
|
|
58
|
+
|
|
59
|
+
current: str
|
|
60
|
+
|
|
61
|
+
deleted: List[str]
|
|
62
|
+
|
|
63
|
+
files: List[File]
|
|
64
|
+
|
|
65
|
+
modified: List[str]
|
|
66
|
+
|
|
67
|
+
not_added: List[str]
|
|
68
|
+
|
|
69
|
+
renamed: List[Renamed]
|
|
70
|
+
|
|
71
|
+
staged: List[str]
|
|
72
|
+
|
|
73
|
+
tracking: str
|
|
@@ -1,22 +1,19 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class Role(str, Enum
|
|
11
|
+
class Role(str, Enum):
|
|
15
12
|
PRIMARY = "primary"
|
|
16
13
|
STANDBY = "standby"
|
|
17
14
|
|
|
18
15
|
|
|
19
|
-
class Status(str, Enum
|
|
16
|
+
class Status(str, Enum):
|
|
20
17
|
HEALTHY = "healthy"
|
|
21
18
|
SHUTTING_DOWN = "shutting down"
|
|
22
19
|
STANDBY = "standby"
|
|
@@ -29,8 +26,8 @@ class HealthStatusTypedDict(TypedDict):
|
|
|
29
26
|
|
|
30
27
|
|
|
31
28
|
class HealthStatus(BaseModel):
|
|
32
|
-
status:
|
|
29
|
+
status: Status
|
|
33
30
|
|
|
34
31
|
start_time: Annotated[float, pydantic.Field(alias="startTime")]
|
|
35
32
|
|
|
36
|
-
role:
|
|
33
|
+
role: Optional[Role] = None
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import Any, List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputAppscopeType(str, Enum
|
|
11
|
+
class InputAppscopeType(str, Enum):
|
|
15
12
|
APPSCOPE = "appscope"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputAppscopeConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputAppscopeMode(str, Enum
|
|
26
|
+
class InputAppscopeMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputAppscopeCompression(str, Enum
|
|
33
|
+
class InputAppscopeCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputAppscopePqTypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputAppscopePq(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputAppscopeMode], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputAppscopeMode.ALWAYS
|
|
58
|
+
mode: Optional[InputAppscopeMode] = InputAppscopeMode.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,9 +79,7 @@ class InputAppscopePq(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
Optional[InputAppscopeCompression], PlainValidator(validate_open_enum(False))
|
|
89
|
-
] = InputAppscopeCompression.NONE
|
|
82
|
+
compress: Optional[InputAppscopeCompression] = InputAppscopeCompression.NONE
|
|
90
83
|
r"""Codec to use to compress the persisted data"""
|
|
91
84
|
|
|
92
85
|
|
|
@@ -138,7 +131,7 @@ class InputAppscopeFilter(BaseModel):
|
|
|
138
131
|
r"""To override the UNIX domain socket or address/port specified in General Settings (while leaving Authentication settings as is), enter a URL."""
|
|
139
132
|
|
|
140
133
|
|
|
141
|
-
class InputAppscopeDataCompressionFormat(str, Enum
|
|
134
|
+
class InputAppscopeDataCompressionFormat(str, Enum):
|
|
142
135
|
NONE = "none"
|
|
143
136
|
GZIP = "gzip"
|
|
144
137
|
|
|
@@ -170,10 +163,9 @@ class InputAppscopePersistence(BaseModel):
|
|
|
170
163
|
max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
|
|
171
164
|
r"""Maximum amount of time to retain data (examples: 2h, 4d). When limit is reached, older data will be deleted."""
|
|
172
165
|
|
|
173
|
-
compress:
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
] = InputAppscopeDataCompressionFormat.GZIP
|
|
166
|
+
compress: Optional[InputAppscopeDataCompressionFormat] = (
|
|
167
|
+
InputAppscopeDataCompressionFormat.GZIP
|
|
168
|
+
)
|
|
177
169
|
|
|
178
170
|
dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = (
|
|
179
171
|
"$CRIBL_HOME/state/appscope"
|
|
@@ -181,21 +173,21 @@ class InputAppscopePersistence(BaseModel):
|
|
|
181
173
|
r"""Path to use to write metrics. Defaults to $CRIBL_HOME/state/appscope"""
|
|
182
174
|
|
|
183
175
|
|
|
184
|
-
class InputAppscopeAuthenticationMethod(str, Enum
|
|
176
|
+
class InputAppscopeAuthenticationMethod(str, Enum):
|
|
185
177
|
r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
|
|
186
178
|
|
|
187
179
|
MANUAL = "manual"
|
|
188
180
|
SECRET = "secret"
|
|
189
181
|
|
|
190
182
|
|
|
191
|
-
class InputAppscopeMinimumTLSVersion(str, Enum
|
|
183
|
+
class InputAppscopeMinimumTLSVersion(str, Enum):
|
|
192
184
|
TL_SV1 = "TLSv1"
|
|
193
185
|
TL_SV1_1 = "TLSv1.1"
|
|
194
186
|
TL_SV1_2 = "TLSv1.2"
|
|
195
187
|
TL_SV1_3 = "TLSv1.3"
|
|
196
188
|
|
|
197
189
|
|
|
198
|
-
class InputAppscopeMaximumTLSVersion(str, Enum
|
|
190
|
+
class InputAppscopeMaximumTLSVersion(str, Enum):
|
|
199
191
|
TL_SV1 = "TLSv1"
|
|
200
192
|
TL_SV1_1 = "TLSv1.1"
|
|
201
193
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -254,19 +246,11 @@ class InputAppscopeTLSSettingsServerSide(BaseModel):
|
|
|
254
246
|
] = None
|
|
255
247
|
|
|
256
248
|
min_version: Annotated[
|
|
257
|
-
|
|
258
|
-
Optional[InputAppscopeMinimumTLSVersion],
|
|
259
|
-
PlainValidator(validate_open_enum(False)),
|
|
260
|
-
],
|
|
261
|
-
pydantic.Field(alias="minVersion"),
|
|
249
|
+
Optional[InputAppscopeMinimumTLSVersion], pydantic.Field(alias="minVersion")
|
|
262
250
|
] = None
|
|
263
251
|
|
|
264
252
|
max_version: Annotated[
|
|
265
|
-
|
|
266
|
-
Optional[InputAppscopeMaximumTLSVersion],
|
|
267
|
-
PlainValidator(validate_open_enum(False)),
|
|
268
|
-
],
|
|
269
|
-
pydantic.Field(alias="maxVersion"),
|
|
253
|
+
Optional[InputAppscopeMaximumTLSVersion], pydantic.Field(alias="maxVersion")
|
|
270
254
|
] = None
|
|
271
255
|
|
|
272
256
|
|
|
@@ -332,7 +316,7 @@ class InputAppscope(BaseModel):
|
|
|
332
316
|
id: str
|
|
333
317
|
r"""Unique ID for this input"""
|
|
334
318
|
|
|
335
|
-
type:
|
|
319
|
+
type: InputAppscopeType
|
|
336
320
|
|
|
337
321
|
disabled: Optional[bool] = False
|
|
338
322
|
|
|
@@ -413,11 +397,7 @@ class InputAppscope(BaseModel):
|
|
|
413
397
|
persistence: Optional[InputAppscopePersistence] = None
|
|
414
398
|
|
|
415
399
|
auth_type: Annotated[
|
|
416
|
-
|
|
417
|
-
Optional[InputAppscopeAuthenticationMethod],
|
|
418
|
-
PlainValidator(validate_open_enum(False)),
|
|
419
|
-
],
|
|
420
|
-
pydantic.Field(alias="authType"),
|
|
400
|
+
Optional[InputAppscopeAuthenticationMethod], pydantic.Field(alias="authType")
|
|
421
401
|
] = InputAppscopeAuthenticationMethod.MANUAL
|
|
422
402
|
r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
|
|
423
403
|
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputAzureBlobType(str, Enum
|
|
11
|
+
class InputAzureBlobType(str, Enum):
|
|
15
12
|
AZURE_BLOB = "azure_blob"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputAzureBlobConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputAzureBlobMode(str, Enum
|
|
26
|
+
class InputAzureBlobMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputAzureBlobCompression(str, Enum
|
|
33
|
+
class InputAzureBlobCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputAzureBlobPqTypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputAzureBlobPq(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputAzureBlobMode], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputAzureBlobMode.ALWAYS
|
|
58
|
+
mode: Optional[InputAzureBlobMode] = InputAzureBlobMode.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,9 +79,7 @@ class InputAzureBlobPq(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
Optional[InputAzureBlobCompression], PlainValidator(validate_open_enum(False))
|
|
89
|
-
] = InputAzureBlobCompression.NONE
|
|
82
|
+
compress: Optional[InputAzureBlobCompression] = InputAzureBlobCompression.NONE
|
|
90
83
|
r"""Codec to use to compress the persisted data"""
|
|
91
84
|
|
|
92
85
|
|
|
@@ -103,7 +96,7 @@ class InputAzureBlobMetadatum(BaseModel):
|
|
|
103
96
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
104
97
|
|
|
105
98
|
|
|
106
|
-
class InputAzureBlobAuthenticationMethod(str, Enum
|
|
99
|
+
class InputAzureBlobAuthenticationMethod(str, Enum):
|
|
107
100
|
MANUAL = "manual"
|
|
108
101
|
SECRET = "secret"
|
|
109
102
|
CLIENT_SECRET = "clientSecret"
|
|
@@ -184,7 +177,7 @@ class InputAzureBlobTypedDict(TypedDict):
|
|
|
184
177
|
|
|
185
178
|
|
|
186
179
|
class InputAzureBlob(BaseModel):
|
|
187
|
-
type:
|
|
180
|
+
type: InputAzureBlobType
|
|
188
181
|
|
|
189
182
|
queue_name: Annotated[str, pydantic.Field(alias="queueName")]
|
|
190
183
|
r"""The storage account queue name blob notifications will be read from. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myQueue-${C.vars.myVar}`"""
|
|
@@ -264,11 +257,7 @@ class InputAzureBlob(BaseModel):
|
|
|
264
257
|
r"""The maximum time allowed for downloading a Parquet chunk. Processing will stop if a chunk cannot be downloaded within the time specified."""
|
|
265
258
|
|
|
266
259
|
auth_type: Annotated[
|
|
267
|
-
|
|
268
|
-
Optional[InputAzureBlobAuthenticationMethod],
|
|
269
|
-
PlainValidator(validate_open_enum(False)),
|
|
270
|
-
],
|
|
271
|
-
pydantic.Field(alias="authType"),
|
|
260
|
+
Optional[InputAzureBlobAuthenticationMethod], pydantic.Field(alias="authType")
|
|
272
261
|
] = InputAzureBlobAuthenticationMethod.MANUAL
|
|
273
262
|
|
|
274
263
|
description: Optional[str] = None
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputCollectionType(str, Enum
|
|
11
|
+
class InputCollectionType(str, Enum):
|
|
15
12
|
COLLECTION = "collection"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputCollectionConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputCollectionMode(str, Enum
|
|
26
|
+
class InputCollectionMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputCollectionCompression(str, Enum
|
|
33
|
+
class InputCollectionCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputCollectionPqTypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputCollectionPq(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputCollectionMode], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputCollectionMode.ALWAYS
|
|
58
|
+
mode: Optional[InputCollectionMode] = InputCollectionMode.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,9 +79,7 @@ class InputCollectionPq(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
Optional[InputCollectionCompression], PlainValidator(validate_open_enum(False))
|
|
89
|
-
] = InputCollectionCompression.NONE
|
|
82
|
+
compress: Optional[InputCollectionCompression] = InputCollectionCompression.NONE
|
|
90
83
|
r"""Codec to use to compress the persisted data"""
|
|
91
84
|
|
|
92
85
|
|
|
@@ -156,9 +149,7 @@ class InputCollection(BaseModel):
|
|
|
156
149
|
id: str
|
|
157
150
|
r"""Unique ID for this input"""
|
|
158
151
|
|
|
159
|
-
type:
|
|
160
|
-
Optional[InputCollectionType], PlainValidator(validate_open_enum(False))
|
|
161
|
-
] = InputCollectionType.COLLECTION
|
|
152
|
+
type: Optional[InputCollectionType] = InputCollectionType.COLLECTION
|
|
162
153
|
|
|
163
154
|
disabled: Optional[bool] = False
|
|
164
155
|
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
|
-
from cribl_control_plane.utils import validate_open_enum
|
|
7
5
|
from enum import Enum
|
|
8
6
|
import pydantic
|
|
9
|
-
from pydantic.functional_validators import PlainValidator
|
|
10
7
|
from typing import List, Optional
|
|
11
8
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
9
|
|
|
13
10
|
|
|
14
|
-
class InputConfluentCloudType(str, Enum
|
|
11
|
+
class InputConfluentCloudType(str, Enum):
|
|
15
12
|
CONFLUENT_CLOUD = "confluent_cloud"
|
|
16
13
|
|
|
17
14
|
|
|
@@ -26,14 +23,14 @@ class InputConfluentCloudConnection(BaseModel):
|
|
|
26
23
|
pipeline: Optional[str] = None
|
|
27
24
|
|
|
28
25
|
|
|
29
|
-
class InputConfluentCloudMode(str, Enum
|
|
26
|
+
class InputConfluentCloudMode(str, Enum):
|
|
30
27
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
31
28
|
|
|
32
29
|
SMART = "smart"
|
|
33
30
|
ALWAYS = "always"
|
|
34
31
|
|
|
35
32
|
|
|
36
|
-
class InputConfluentCloudCompression(str, Enum
|
|
33
|
+
class InputConfluentCloudCompression(str, Enum):
|
|
37
34
|
r"""Codec to use to compress the persisted data"""
|
|
38
35
|
|
|
39
36
|
NONE = "none"
|
|
@@ -58,9 +55,7 @@ class InputConfluentCloudPqTypedDict(TypedDict):
|
|
|
58
55
|
|
|
59
56
|
|
|
60
57
|
class InputConfluentCloudPq(BaseModel):
|
|
61
|
-
mode:
|
|
62
|
-
Optional[InputConfluentCloudMode], PlainValidator(validate_open_enum(False))
|
|
63
|
-
] = InputConfluentCloudMode.ALWAYS
|
|
58
|
+
mode: Optional[InputConfluentCloudMode] = InputConfluentCloudMode.ALWAYS
|
|
64
59
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
65
60
|
|
|
66
61
|
max_buffer_size: Annotated[
|
|
@@ -84,21 +79,20 @@ class InputConfluentCloudPq(BaseModel):
|
|
|
84
79
|
path: Optional[str] = "$CRIBL_HOME/state/queues"
|
|
85
80
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
86
81
|
|
|
87
|
-
compress:
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
] = InputConfluentCloudCompression.NONE
|
|
82
|
+
compress: Optional[InputConfluentCloudCompression] = (
|
|
83
|
+
InputConfluentCloudCompression.NONE
|
|
84
|
+
)
|
|
91
85
|
r"""Codec to use to compress the persisted data"""
|
|
92
86
|
|
|
93
87
|
|
|
94
|
-
class InputConfluentCloudMinimumTLSVersion(str, Enum
|
|
88
|
+
class InputConfluentCloudMinimumTLSVersion(str, Enum):
|
|
95
89
|
TL_SV1 = "TLSv1"
|
|
96
90
|
TL_SV1_1 = "TLSv1.1"
|
|
97
91
|
TL_SV1_2 = "TLSv1.2"
|
|
98
92
|
TL_SV1_3 = "TLSv1.3"
|
|
99
93
|
|
|
100
94
|
|
|
101
|
-
class InputConfluentCloudMaximumTLSVersion(str, Enum
|
|
95
|
+
class InputConfluentCloudMaximumTLSVersion(str, Enum):
|
|
102
96
|
TL_SV1 = "TLSv1"
|
|
103
97
|
TL_SV1_1 = "TLSv1.1"
|
|
104
98
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -158,18 +152,12 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
|
|
|
158
152
|
r"""Passphrase to use to decrypt private key"""
|
|
159
153
|
|
|
160
154
|
min_version: Annotated[
|
|
161
|
-
|
|
162
|
-
Optional[InputConfluentCloudMinimumTLSVersion],
|
|
163
|
-
PlainValidator(validate_open_enum(False)),
|
|
164
|
-
],
|
|
155
|
+
Optional[InputConfluentCloudMinimumTLSVersion],
|
|
165
156
|
pydantic.Field(alias="minVersion"),
|
|
166
157
|
] = None
|
|
167
158
|
|
|
168
159
|
max_version: Annotated[
|
|
169
|
-
|
|
170
|
-
Optional[InputConfluentCloudMaximumTLSVersion],
|
|
171
|
-
PlainValidator(validate_open_enum(False)),
|
|
172
|
-
],
|
|
160
|
+
Optional[InputConfluentCloudMaximumTLSVersion],
|
|
173
161
|
pydantic.Field(alias="maxVersion"),
|
|
174
162
|
] = None
|
|
175
163
|
|
|
@@ -193,18 +181,14 @@ class InputConfluentCloudAuth(BaseModel):
|
|
|
193
181
|
r"""Select or create a secret that references your credentials"""
|
|
194
182
|
|
|
195
183
|
|
|
196
|
-
class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(
|
|
197
|
-
str, Enum, metaclass=utils.OpenEnumMeta
|
|
198
|
-
):
|
|
184
|
+
class InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
|
|
199
185
|
TL_SV1 = "TLSv1"
|
|
200
186
|
TL_SV1_1 = "TLSv1.1"
|
|
201
187
|
TL_SV1_2 = "TLSv1.2"
|
|
202
188
|
TL_SV1_3 = "TLSv1.3"
|
|
203
189
|
|
|
204
190
|
|
|
205
|
-
class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(
|
|
206
|
-
str, Enum, metaclass=utils.OpenEnumMeta
|
|
207
|
-
):
|
|
191
|
+
class InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
|
|
208
192
|
TL_SV1 = "TLSv1"
|
|
209
193
|
TL_SV1_1 = "TLSv1.1"
|
|
210
194
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -264,18 +248,12 @@ class InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
|
|
|
264
248
|
r"""Passphrase to use to decrypt private key"""
|
|
265
249
|
|
|
266
250
|
min_version: Annotated[
|
|
267
|
-
|
|
268
|
-
Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
|
|
269
|
-
PlainValidator(validate_open_enum(False)),
|
|
270
|
-
],
|
|
251
|
+
Optional[InputConfluentCloudKafkaSchemaRegistryMinimumTLSVersion],
|
|
271
252
|
pydantic.Field(alias="minVersion"),
|
|
272
253
|
] = None
|
|
273
254
|
|
|
274
255
|
max_version: Annotated[
|
|
275
|
-
|
|
276
|
-
Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
|
|
277
|
-
PlainValidator(validate_open_enum(False)),
|
|
278
|
-
],
|
|
256
|
+
Optional[InputConfluentCloudKafkaSchemaRegistryMaximumTLSVersion],
|
|
279
257
|
pydantic.Field(alias="maxVersion"),
|
|
280
258
|
] = None
|
|
281
259
|
|
|
@@ -324,7 +302,7 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
324
302
|
tls: Optional[InputConfluentCloudKafkaSchemaRegistryTLSSettingsClientSide] = None
|
|
325
303
|
|
|
326
304
|
|
|
327
|
-
class InputConfluentCloudSASLMechanism(str, Enum
|
|
305
|
+
class InputConfluentCloudSASLMechanism(str, Enum):
|
|
328
306
|
PLAIN = "plain"
|
|
329
307
|
SCRAM_SHA_256 = "scram-sha-256"
|
|
330
308
|
SCRAM_SHA_512 = "scram-sha-512"
|
|
@@ -343,10 +321,9 @@ class InputConfluentCloudAuthentication(BaseModel):
|
|
|
343
321
|
|
|
344
322
|
disabled: Optional[bool] = True
|
|
345
323
|
|
|
346
|
-
mechanism:
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
] = InputConfluentCloudSASLMechanism.PLAIN
|
|
324
|
+
mechanism: Optional[InputConfluentCloudSASLMechanism] = (
|
|
325
|
+
InputConfluentCloudSASLMechanism.PLAIN
|
|
326
|
+
)
|
|
350
327
|
|
|
351
328
|
|
|
352
329
|
class InputConfluentCloudMetadatumTypedDict(TypedDict):
|
|
@@ -452,9 +429,7 @@ class InputConfluentCloud(BaseModel):
|
|
|
452
429
|
id: Optional[str] = None
|
|
453
430
|
r"""Unique ID for this input"""
|
|
454
431
|
|
|
455
|
-
type:
|
|
456
|
-
Optional[InputConfluentCloudType], PlainValidator(validate_open_enum(False))
|
|
457
|
-
] = None
|
|
432
|
+
type: Optional[InputConfluentCloudType] = None
|
|
458
433
|
|
|
459
434
|
disabled: Optional[bool] = False
|
|
460
435
|
|