cribl-control-plane 0.2.0rc1__py3-none-any.whl → 0.2.1rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +4 -6
- cribl_control_plane/groups_sdk.py +11 -8
- cribl_control_plane/mappings.py +1185 -0
- cribl_control_plane/models/__init__.py +195 -33
- cribl_control_plane/models/authtoken.py +8 -1
- cribl_control_plane/models/configgroup.py +30 -2
- cribl_control_plane/models/createadminproductsmappingsactivatebyproductop.py +52 -0
- cribl_control_plane/models/createadminproductsmappingsbyproductop.py +53 -0
- cribl_control_plane/models/createversionundoop.py +3 -3
- cribl_control_plane/models/deleteadminproductsmappingsbyproductandidop.py +51 -0
- cribl_control_plane/models/distributedsummary.py +6 -0
- cribl_control_plane/models/getadminproductsmappingsbyproductandidop.py +51 -0
- cribl_control_plane/models/getadminproductsmappingsbyproductop.py +44 -0
- cribl_control_plane/models/hbcriblinfo.py +14 -3
- cribl_control_plane/models/heartbeatmetadata.py +3 -0
- cribl_control_plane/models/input.py +65 -63
- cribl_control_plane/models/inputappscope.py +4 -0
- cribl_control_plane/models/inputazureblob.py +4 -0
- cribl_control_plane/models/inputcollection.py +4 -0
- cribl_control_plane/models/inputconfluentcloud.py +8 -18
- cribl_control_plane/models/inputcribl.py +4 -0
- cribl_control_plane/models/inputcriblhttp.py +4 -0
- cribl_control_plane/models/inputcribllakehttp.py +4 -0
- cribl_control_plane/models/inputcriblmetrics.py +4 -0
- cribl_control_plane/models/inputcribltcp.py +4 -0
- cribl_control_plane/models/inputcrowdstrike.py +7 -0
- cribl_control_plane/models/inputdatadogagent.py +4 -0
- cribl_control_plane/models/inputdatagen.py +4 -0
- cribl_control_plane/models/inputedgeprometheus.py +12 -0
- cribl_control_plane/models/inputelastic.py +11 -0
- cribl_control_plane/models/inputeventhub.py +6 -0
- cribl_control_plane/models/inputexec.py +4 -0
- cribl_control_plane/models/inputfile.py +6 -0
- cribl_control_plane/models/inputfirehose.py +4 -0
- cribl_control_plane/models/inputgooglepubsub.py +7 -0
- cribl_control_plane/models/inputgrafana.py +8 -0
- cribl_control_plane/models/inputhttp.py +4 -0
- cribl_control_plane/models/inputhttpraw.py +4 -0
- cribl_control_plane/models/inputjournalfiles.py +4 -0
- cribl_control_plane/models/inputkafka.py +8 -17
- cribl_control_plane/models/inputkinesis.py +15 -0
- cribl_control_plane/models/inputkubeevents.py +4 -0
- cribl_control_plane/models/inputkubelogs.py +4 -0
- cribl_control_plane/models/inputkubemetrics.py +4 -0
- cribl_control_plane/models/inputloki.py +4 -0
- cribl_control_plane/models/inputmetrics.py +4 -0
- cribl_control_plane/models/inputmodeldriventelemetry.py +4 -0
- cribl_control_plane/models/inputmsk.py +7 -17
- cribl_control_plane/models/inputnetflow.py +4 -0
- cribl_control_plane/models/inputoffice365mgmt.py +11 -0
- cribl_control_plane/models/inputoffice365msgtrace.py +11 -0
- cribl_control_plane/models/inputoffice365service.py +11 -0
- cribl_control_plane/models/inputopentelemetry.py +8 -0
- cribl_control_plane/models/inputprometheus.py +10 -0
- cribl_control_plane/models/inputprometheusrw.py +4 -0
- cribl_control_plane/models/inputrawudp.py +4 -0
- cribl_control_plane/models/inputs3.py +7 -0
- cribl_control_plane/models/inputs3inventory.py +7 -0
- cribl_control_plane/models/inputsecuritylake.py +7 -0
- cribl_control_plane/models/inputsnmp.py +11 -0
- cribl_control_plane/models/inputsplunk.py +9 -0
- cribl_control_plane/models/inputsplunkhec.py +4 -0
- cribl_control_plane/models/inputsplunksearch.py +7 -0
- cribl_control_plane/models/inputsqs.py +9 -0
- cribl_control_plane/models/inputsyslog.py +8 -0
- cribl_control_plane/models/inputsystemmetrics.py +32 -0
- cribl_control_plane/models/inputsystemstate.py +4 -0
- cribl_control_plane/models/inputtcp.py +4 -0
- cribl_control_plane/models/inputtcpjson.py +4 -0
- cribl_control_plane/models/inputwef.py +6 -0
- cribl_control_plane/models/inputwindowsmetrics.py +28 -0
- cribl_control_plane/models/inputwineventlogs.py +8 -0
- cribl_control_plane/models/inputwiz.py +7 -0
- cribl_control_plane/models/inputwizwebhook.py +4 -0
- cribl_control_plane/models/inputzscalerhec.py +4 -0
- cribl_control_plane/models/jobinfo.py +4 -1
- cribl_control_plane/models/mappingruleset.py +95 -0
- cribl_control_plane/models/nodeprovidedinfo.py +11 -1
- cribl_control_plane/models/outpostnodeinfo.py +16 -0
- cribl_control_plane/models/output.py +77 -72
- cribl_control_plane/models/outputazureblob.py +20 -0
- cribl_control_plane/models/outputazuredataexplorer.py +28 -0
- cribl_control_plane/models/outputazureeventhub.py +17 -0
- cribl_control_plane/models/outputazurelogs.py +13 -0
- cribl_control_plane/models/outputchronicle.py +13 -0
- cribl_control_plane/models/outputclickhouse.py +17 -0
- cribl_control_plane/models/outputcloudwatch.py +13 -0
- cribl_control_plane/models/outputconfluentcloud.py +24 -18
- cribl_control_plane/models/outputcriblhttp.py +15 -0
- cribl_control_plane/models/outputcribllake.py +21 -0
- cribl_control_plane/models/outputcribltcp.py +12 -0
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +15 -0
- cribl_control_plane/models/outputdatabricks.py +411 -0
- cribl_control_plane/models/outputdatadog.py +30 -0
- cribl_control_plane/models/outputdataset.py +23 -0
- cribl_control_plane/models/outputdls3.py +35 -0
- cribl_control_plane/models/outputdynatracehttp.py +22 -0
- cribl_control_plane/models/outputdynatraceotlp.py +22 -0
- cribl_control_plane/models/outputelastic.py +18 -0
- cribl_control_plane/models/outputelasticcloud.py +13 -0
- cribl_control_plane/models/outputexabeam.py +14 -0
- cribl_control_plane/models/outputfilesystem.py +15 -0
- cribl_control_plane/models/outputgooglechronicle.py +21 -0
- cribl_control_plane/models/outputgooglecloudlogging.py +19 -0
- cribl_control_plane/models/outputgooglecloudstorage.py +28 -0
- cribl_control_plane/models/outputgooglepubsub.py +13 -0
- cribl_control_plane/models/outputgrafanacloud.py +50 -0
- cribl_control_plane/models/outputgraphite.py +12 -0
- cribl_control_plane/models/outputhoneycomb.py +13 -0
- cribl_control_plane/models/outputhumiohec.py +15 -0
- cribl_control_plane/models/outputinfluxdb.py +19 -0
- cribl_control_plane/models/outputkafka.py +24 -17
- cribl_control_plane/models/outputkinesis.py +15 -0
- cribl_control_plane/models/outputloki.py +20 -0
- cribl_control_plane/models/outputminio.py +28 -0
- cribl_control_plane/models/outputmsk.py +23 -17
- cribl_control_plane/models/outputnewrelic.py +16 -0
- cribl_control_plane/models/outputnewrelicevents.py +16 -0
- cribl_control_plane/models/outputopentelemetry.py +22 -0
- cribl_control_plane/models/outputprometheus.py +13 -0
- cribl_control_plane/models/outputring.py +2 -0
- cribl_control_plane/models/outputs3.py +35 -0
- cribl_control_plane/models/outputsecuritylake.py +29 -0
- cribl_control_plane/models/outputsentinel.py +15 -0
- cribl_control_plane/models/outputsentineloneaisiem.py +13 -0
- cribl_control_plane/models/outputservicenow.py +21 -0
- cribl_control_plane/models/outputsignalfx.py +13 -0
- cribl_control_plane/models/outputsns.py +13 -0
- cribl_control_plane/models/outputsplunk.py +15 -0
- cribl_control_plane/models/outputsplunkhec.py +13 -0
- cribl_control_plane/models/outputsplunklb.py +15 -0
- cribl_control_plane/models/outputsqs.py +15 -0
- cribl_control_plane/models/outputstatsd.py +12 -0
- cribl_control_plane/models/outputstatsdext.py +12 -0
- cribl_control_plane/models/outputsumologic.py +15 -0
- cribl_control_plane/models/outputsyslog.py +24 -0
- cribl_control_plane/models/outputtcpjson.py +12 -0
- cribl_control_plane/models/outputwavefront.py +13 -0
- cribl_control_plane/models/outputwebhook.py +23 -0
- cribl_control_plane/models/outputxsiam.py +13 -0
- cribl_control_plane/models/packinfo.py +8 -5
- cribl_control_plane/models/packinstallinfo.py +8 -5
- cribl_control_plane/models/pipeline.py +4 -4
- cribl_control_plane/models/routeconf.py +3 -4
- cribl_control_plane/models/{routecloneconf.py → rulesetid.py} +4 -4
- cribl_control_plane/models/runnablejobcollection.py +4 -0
- cribl_control_plane/models/updateadminproductsmappingsbyproductandidop.py +63 -0
- cribl_control_plane/models/updatepacksop.py +25 -0
- cribl_control_plane/models/uploadpackresponse.py +13 -0
- cribl_control_plane/packs.py +202 -7
- cribl_control_plane/pipelines.py +8 -8
- {cribl_control_plane-0.2.0rc1.dist-info → cribl_control_plane-0.2.1rc1.dist-info}/METADATA +44 -3
- {cribl_control_plane-0.2.0rc1.dist-info → cribl_control_plane-0.2.1rc1.dist-info}/RECORD +154 -143
- cribl_control_plane/models/appmode.py +0 -14
- {cribl_control_plane-0.2.0rc1.dist-info → cribl_control_plane-0.2.1rc1.dist-info}/WHEEL +0 -0
|
@@ -29,8 +29,11 @@ class OutputXsiamExtraHTTPHeader(BaseModel):
|
|
|
29
29
|
class OutputXsiamFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
30
30
|
r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
|
|
31
31
|
|
|
32
|
+
# Payload
|
|
32
33
|
PAYLOAD = "payload"
|
|
34
|
+
# Payload + Headers
|
|
33
35
|
PAYLOAD_AND_HEADERS = "payloadAndHeaders"
|
|
36
|
+
# None
|
|
34
37
|
NONE = "none"
|
|
35
38
|
|
|
36
39
|
|
|
@@ -98,8 +101,11 @@ class OutputXsiamTimeoutRetrySettings(BaseModel):
|
|
|
98
101
|
class OutputXsiamBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
99
102
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
100
103
|
|
|
104
|
+
# Block
|
|
101
105
|
BLOCK = "block"
|
|
106
|
+
# Drop
|
|
102
107
|
DROP = "drop"
|
|
108
|
+
# Persistent Queue
|
|
103
109
|
QUEUE = "queue"
|
|
104
110
|
|
|
105
111
|
|
|
@@ -119,22 +125,29 @@ class OutputXsiamURL(BaseModel):
|
|
|
119
125
|
class OutputXsiamCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
120
126
|
r"""Codec to use to compress the persisted data"""
|
|
121
127
|
|
|
128
|
+
# None
|
|
122
129
|
NONE = "none"
|
|
130
|
+
# Gzip
|
|
123
131
|
GZIP = "gzip"
|
|
124
132
|
|
|
125
133
|
|
|
126
134
|
class OutputXsiamQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
127
135
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
128
136
|
|
|
137
|
+
# Block
|
|
129
138
|
BLOCK = "block"
|
|
139
|
+
# Drop new data
|
|
130
140
|
DROP = "drop"
|
|
131
141
|
|
|
132
142
|
|
|
133
143
|
class OutputXsiamMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
134
144
|
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
135
145
|
|
|
146
|
+
# Error
|
|
136
147
|
ERROR = "error"
|
|
148
|
+
# Backpressure
|
|
137
149
|
BACKPRESSURE = "backpressure"
|
|
150
|
+
# Always On
|
|
138
151
|
ALWAYS = "always"
|
|
139
152
|
|
|
140
153
|
|
|
@@ -8,26 +8,27 @@ from typing_extensions import Annotated, NotRequired, TypedDict
|
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class PackInfoTagsTypedDict(TypedDict):
|
|
11
|
-
data_type: List[str]
|
|
12
|
-
technology: List[str]
|
|
11
|
+
data_type: NotRequired[List[str]]
|
|
13
12
|
domain: NotRequired[List[str]]
|
|
14
13
|
streamtags: NotRequired[List[str]]
|
|
14
|
+
technology: NotRequired[List[str]]
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class PackInfoTags(BaseModel):
|
|
18
|
-
data_type: Annotated[List[str], pydantic.Field(alias="dataType")]
|
|
19
|
-
|
|
20
|
-
technology: List[str]
|
|
18
|
+
data_type: Annotated[Optional[List[str]], pydantic.Field(alias="dataType")] = None
|
|
21
19
|
|
|
22
20
|
domain: Optional[List[str]] = None
|
|
23
21
|
|
|
24
22
|
streamtags: Optional[List[str]] = None
|
|
25
23
|
|
|
24
|
+
technology: Optional[List[str]] = None
|
|
25
|
+
|
|
26
26
|
|
|
27
27
|
class PackInfoTypedDict(TypedDict):
|
|
28
28
|
id: str
|
|
29
29
|
source: str
|
|
30
30
|
author: NotRequired[str]
|
|
31
|
+
dependencies: NotRequired[Dict[str, str]]
|
|
31
32
|
description: NotRequired[str]
|
|
32
33
|
display_name: NotRequired[str]
|
|
33
34
|
exports: NotRequired[List[str]]
|
|
@@ -48,6 +49,8 @@ class PackInfo(BaseModel):
|
|
|
48
49
|
|
|
49
50
|
author: Optional[str] = None
|
|
50
51
|
|
|
52
|
+
dependencies: Optional[Dict[str, str]] = None
|
|
53
|
+
|
|
51
54
|
description: Optional[str] = None
|
|
52
55
|
|
|
53
56
|
display_name: Annotated[Optional[str], pydantic.Field(alias="displayName")] = None
|
|
@@ -8,26 +8,27 @@ from typing_extensions import Annotated, NotRequired, TypedDict
|
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class PackInstallInfoTagsTypedDict(TypedDict):
|
|
11
|
-
data_type: List[str]
|
|
12
|
-
technology: List[str]
|
|
11
|
+
data_type: NotRequired[List[str]]
|
|
13
12
|
domain: NotRequired[List[str]]
|
|
14
13
|
streamtags: NotRequired[List[str]]
|
|
14
|
+
technology: NotRequired[List[str]]
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class PackInstallInfoTags(BaseModel):
|
|
18
|
-
data_type: Annotated[List[str], pydantic.Field(alias="dataType")]
|
|
19
|
-
|
|
20
|
-
technology: List[str]
|
|
18
|
+
data_type: Annotated[Optional[List[str]], pydantic.Field(alias="dataType")] = None
|
|
21
19
|
|
|
22
20
|
domain: Optional[List[str]] = None
|
|
23
21
|
|
|
24
22
|
streamtags: Optional[List[str]] = None
|
|
25
23
|
|
|
24
|
+
technology: Optional[List[str]] = None
|
|
25
|
+
|
|
26
26
|
|
|
27
27
|
class PackInstallInfoTypedDict(TypedDict):
|
|
28
28
|
id: str
|
|
29
29
|
source: str
|
|
30
30
|
author: NotRequired[str]
|
|
31
|
+
dependencies: NotRequired[Dict[str, str]]
|
|
31
32
|
description: NotRequired[str]
|
|
32
33
|
display_name: NotRequired[str]
|
|
33
34
|
exports: NotRequired[List[str]]
|
|
@@ -49,6 +50,8 @@ class PackInstallInfo(BaseModel):
|
|
|
49
50
|
|
|
50
51
|
author: Optional[str] = None
|
|
51
52
|
|
|
53
|
+
dependencies: Optional[Dict[str, str]] = None
|
|
54
|
+
|
|
52
55
|
description: Optional[str] = None
|
|
53
56
|
|
|
54
57
|
display_name: Annotated[Optional[str], pydantic.Field(alias="displayName")] = None
|
|
@@ -26,7 +26,7 @@ class PipelineGroups(BaseModel):
|
|
|
26
26
|
r"""Whether this group is disabled"""
|
|
27
27
|
|
|
28
28
|
|
|
29
|
-
class
|
|
29
|
+
class PipelineConfTypedDict(TypedDict):
|
|
30
30
|
async_func_timeout: NotRequired[int]
|
|
31
31
|
r"""Time (in ms) to wait for an async function to complete processing of a data item"""
|
|
32
32
|
output: NotRequired[str]
|
|
@@ -39,7 +39,7 @@ class ConfTypedDict(TypedDict):
|
|
|
39
39
|
groups: NotRequired[Dict[str, PipelineGroupsTypedDict]]
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
class
|
|
42
|
+
class PipelineConf(BaseModel):
|
|
43
43
|
async_func_timeout: Annotated[
|
|
44
44
|
Optional[int], pydantic.Field(alias="asyncFuncTimeout")
|
|
45
45
|
] = None
|
|
@@ -61,10 +61,10 @@ class Conf(BaseModel):
|
|
|
61
61
|
|
|
62
62
|
class PipelineTypedDict(TypedDict):
|
|
63
63
|
id: str
|
|
64
|
-
conf:
|
|
64
|
+
conf: PipelineConfTypedDict
|
|
65
65
|
|
|
66
66
|
|
|
67
67
|
class Pipeline(BaseModel):
|
|
68
68
|
id: str
|
|
69
69
|
|
|
70
|
-
conf:
|
|
70
|
+
conf: PipelineConf
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
5
|
import pydantic
|
|
7
|
-
from typing import List, Optional
|
|
6
|
+
from typing import Dict, List, Optional
|
|
8
7
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
8
|
|
|
10
9
|
|
|
@@ -13,7 +12,7 @@ class RouteConfTypedDict(TypedDict):
|
|
|
13
12
|
id: str
|
|
14
13
|
name: str
|
|
15
14
|
pipeline: str
|
|
16
|
-
clones: NotRequired[List[
|
|
15
|
+
clones: NotRequired[List[Dict[str, str]]]
|
|
17
16
|
context: NotRequired[str]
|
|
18
17
|
description: NotRequired[str]
|
|
19
18
|
disabled: NotRequired[bool]
|
|
@@ -33,7 +32,7 @@ class RouteConf(BaseModel):
|
|
|
33
32
|
|
|
34
33
|
pipeline: str
|
|
35
34
|
|
|
36
|
-
clones: Optional[List[
|
|
35
|
+
clones: Optional[List[Dict[str, str]]] = None
|
|
37
36
|
|
|
38
37
|
context: Optional[str] = None
|
|
39
38
|
|
|
@@ -5,9 +5,9 @@ from cribl_control_plane.types import BaseModel
|
|
|
5
5
|
from typing_extensions import TypedDict
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
class
|
|
9
|
-
|
|
8
|
+
class RulesetIDTypedDict(TypedDict):
|
|
9
|
+
id: str
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
class
|
|
13
|
-
|
|
12
|
+
class RulesetID(BaseModel):
|
|
13
|
+
id: str
|
|
@@ -363,9 +363,13 @@ class RunnableJobCollectionTimeWarning(BaseModel):
|
|
|
363
363
|
|
|
364
364
|
|
|
365
365
|
class WhereToCapture(int, Enum, metaclass=utils.OpenEnumMeta):
|
|
366
|
+
# 1. Before pre-processing Pipeline
|
|
366
367
|
ZERO = 0
|
|
368
|
+
# 2. Before the Routes
|
|
367
369
|
ONE = 1
|
|
370
|
+
# 3. Before post-processing Pipeline
|
|
368
371
|
TWO = 2
|
|
372
|
+
# 4. Before the Destination
|
|
369
373
|
THREE = 3
|
|
370
374
|
|
|
371
375
|
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from .mappingruleset import MappingRuleset, MappingRulesetTypedDict
|
|
5
|
+
from .productscore import ProductsCore
|
|
6
|
+
from cribl_control_plane.types import BaseModel
|
|
7
|
+
from cribl_control_plane.utils import (
|
|
8
|
+
FieldMetadata,
|
|
9
|
+
PathParamMetadata,
|
|
10
|
+
RequestMetadata,
|
|
11
|
+
validate_open_enum,
|
|
12
|
+
)
|
|
13
|
+
import pydantic
|
|
14
|
+
from pydantic.functional_validators import PlainValidator
|
|
15
|
+
from typing import List, Optional
|
|
16
|
+
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class UpdateAdminProductsMappingsByProductAndIDRequestTypedDict(TypedDict):
|
|
20
|
+
product: ProductsCore
|
|
21
|
+
r"""Name of the Cribl product to update the Mapping Ruleset for"""
|
|
22
|
+
id_param: str
|
|
23
|
+
r"""The <code>id</code> of the Mapping Ruleset to update."""
|
|
24
|
+
mapping_ruleset: MappingRulesetTypedDict
|
|
25
|
+
r"""MappingRuleset object"""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class UpdateAdminProductsMappingsByProductAndIDRequest(BaseModel):
|
|
29
|
+
product: Annotated[
|
|
30
|
+
Annotated[ProductsCore, PlainValidator(validate_open_enum(False))],
|
|
31
|
+
FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
|
|
32
|
+
]
|
|
33
|
+
r"""Name of the Cribl product to update the Mapping Ruleset for"""
|
|
34
|
+
|
|
35
|
+
id_param: Annotated[
|
|
36
|
+
str,
|
|
37
|
+
pydantic.Field(alias="id"),
|
|
38
|
+
FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
|
|
39
|
+
]
|
|
40
|
+
r"""The <code>id</code> of the Mapping Ruleset to update."""
|
|
41
|
+
|
|
42
|
+
mapping_ruleset: Annotated[
|
|
43
|
+
MappingRuleset,
|
|
44
|
+
FieldMetadata(request=RequestMetadata(media_type="application/json")),
|
|
45
|
+
]
|
|
46
|
+
r"""MappingRuleset object"""
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class UpdateAdminProductsMappingsByProductAndIDResponseTypedDict(TypedDict):
|
|
50
|
+
r"""A list containing the updated Mapping Ruleset objects"""
|
|
51
|
+
|
|
52
|
+
count: NotRequired[int]
|
|
53
|
+
r"""number of items present in the items array"""
|
|
54
|
+
items: NotRequired[List[MappingRulesetTypedDict]]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class UpdateAdminProductsMappingsByProductAndIDResponse(BaseModel):
|
|
58
|
+
r"""A list containing the updated Mapping Ruleset objects"""
|
|
59
|
+
|
|
60
|
+
count: Optional[int] = None
|
|
61
|
+
r"""number of items present in the items array"""
|
|
62
|
+
|
|
63
|
+
items: Optional[List[MappingRuleset]] = None
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane.types import BaseModel
|
|
5
|
+
from cribl_control_plane.utils import FieldMetadata, QueryParamMetadata
|
|
6
|
+
import io
|
|
7
|
+
from typing import IO, Union
|
|
8
|
+
from typing_extensions import Annotated, TypedDict
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class UpdatePacksRequestTypedDict(TypedDict):
|
|
12
|
+
filename: str
|
|
13
|
+
r"""Filename of the Pack file to upload."""
|
|
14
|
+
request_body: Union[bytes, IO[bytes], io.BufferedReader]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class UpdatePacksRequest(BaseModel):
|
|
18
|
+
filename: Annotated[
|
|
19
|
+
str, FieldMetadata(query=QueryParamMetadata(style="form", explode=True))
|
|
20
|
+
]
|
|
21
|
+
r"""Filename of the Pack file to upload."""
|
|
22
|
+
|
|
23
|
+
request_body: Annotated[
|
|
24
|
+
Union[bytes, IO[bytes], io.BufferedReader], FieldMetadata(request=True)
|
|
25
|
+
]
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane.types import BaseModel
|
|
5
|
+
from typing_extensions import TypedDict
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class UploadPackResponseTypedDict(TypedDict):
|
|
9
|
+
source: str
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class UploadPackResponse(BaseModel):
|
|
13
|
+
source: str
|
cribl_control_plane/packs.py
CHANGED
|
@@ -6,7 +6,8 @@ from cribl_control_plane._hooks import HookContext
|
|
|
6
6
|
from cribl_control_plane.types import BaseModel, OptionalNullable, UNSET
|
|
7
7
|
from cribl_control_plane.utils import get_security_from_env
|
|
8
8
|
from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_response
|
|
9
|
-
|
|
9
|
+
import io
|
|
10
|
+
from typing import Any, IO, Mapping, Optional, Union, cast
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
class Packs(BaseSDK):
|
|
@@ -23,9 +24,9 @@ class Packs(BaseSDK):
|
|
|
23
24
|
timeout_ms: Optional[int] = None,
|
|
24
25
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
25
26
|
) -> models.CreatePacksResponse:
|
|
26
|
-
r"""
|
|
27
|
+
r"""Install a Pack
|
|
27
28
|
|
|
28
|
-
|
|
29
|
+
Install a Pack.<br><br>To install an uploaded Pack, provide the <code>source</code> value from the <code>PUT /packs</code> response as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a URL, provide the direct URL location of the <code>.crbl</code> file for the Pack as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a Git repository, provide <code>git+<repo-url></code> as the <code>source</code> parameter in the request body.<br><br>If you do not include the <code>source</code> parameter in the request body, an empty Pack is created.
|
|
29
30
|
|
|
30
31
|
:param request: The request object to send.
|
|
31
32
|
:param retries: Override the default retry configuration for this method
|
|
@@ -115,9 +116,9 @@ class Packs(BaseSDK):
|
|
|
115
116
|
timeout_ms: Optional[int] = None,
|
|
116
117
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
117
118
|
) -> models.CreatePacksResponse:
|
|
118
|
-
r"""
|
|
119
|
+
r"""Install a Pack
|
|
119
120
|
|
|
120
|
-
|
|
121
|
+
Install a Pack.<br><br>To install an uploaded Pack, provide the <code>source</code> value from the <code>PUT /packs</code> response as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a URL, provide the direct URL location of the <code>.crbl</code> file for the Pack as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a Git repository, provide <code>git+<repo-url></code> as the <code>source</code> parameter in the request body.<br><br>If you do not include the <code>source</code> parameter in the request body, an empty Pack is created.
|
|
121
122
|
|
|
122
123
|
:param request: The request object to send.
|
|
123
124
|
:param retries: Override the default retry configuration for this method
|
|
@@ -370,6 +371,200 @@ class Packs(BaseSDK):
|
|
|
370
371
|
|
|
371
372
|
raise errors.APIError("Unexpected response received", http_res)
|
|
372
373
|
|
|
374
|
+
def upload(
|
|
375
|
+
self,
|
|
376
|
+
*,
|
|
377
|
+
filename: str,
|
|
378
|
+
request_body: Union[bytes, IO[bytes], io.BufferedReader],
|
|
379
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
380
|
+
server_url: Optional[str] = None,
|
|
381
|
+
timeout_ms: Optional[int] = None,
|
|
382
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
383
|
+
) -> models.UploadPackResponse:
|
|
384
|
+
r"""Upload a Pack file
|
|
385
|
+
|
|
386
|
+
Upload a Pack file. Returns the <code>source</code> ID needed to install the Pack with <code>POST /packs source</code>, which you must call separately.
|
|
387
|
+
|
|
388
|
+
:param filename: Filename of the Pack file to upload.
|
|
389
|
+
:param request_body:
|
|
390
|
+
:param retries: Override the default retry configuration for this method
|
|
391
|
+
:param server_url: Override the default server URL for this method
|
|
392
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
393
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
394
|
+
"""
|
|
395
|
+
base_url = None
|
|
396
|
+
url_variables = None
|
|
397
|
+
if timeout_ms is None:
|
|
398
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
399
|
+
|
|
400
|
+
if server_url is not None:
|
|
401
|
+
base_url = server_url
|
|
402
|
+
else:
|
|
403
|
+
base_url = self._get_url(base_url, url_variables)
|
|
404
|
+
|
|
405
|
+
request = models.UpdatePacksRequest(
|
|
406
|
+
filename=filename,
|
|
407
|
+
request_body=request_body,
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
req = self._build_request(
|
|
411
|
+
method="PUT",
|
|
412
|
+
path="/packs",
|
|
413
|
+
base_url=base_url,
|
|
414
|
+
url_variables=url_variables,
|
|
415
|
+
request=request,
|
|
416
|
+
request_body_required=True,
|
|
417
|
+
request_has_path_params=False,
|
|
418
|
+
request_has_query_params=True,
|
|
419
|
+
user_agent_header="user-agent",
|
|
420
|
+
accept_header_value="application/json",
|
|
421
|
+
http_headers=http_headers,
|
|
422
|
+
security=self.sdk_configuration.security,
|
|
423
|
+
get_serialized_body=lambda: utils.serialize_request_body(
|
|
424
|
+
request.request_body,
|
|
425
|
+
False,
|
|
426
|
+
False,
|
|
427
|
+
"raw",
|
|
428
|
+
Union[bytes, IO[bytes], io.BufferedReader],
|
|
429
|
+
),
|
|
430
|
+
timeout_ms=timeout_ms,
|
|
431
|
+
)
|
|
432
|
+
|
|
433
|
+
if retries == UNSET:
|
|
434
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
435
|
+
retries = self.sdk_configuration.retry_config
|
|
436
|
+
|
|
437
|
+
retry_config = None
|
|
438
|
+
if isinstance(retries, utils.RetryConfig):
|
|
439
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
440
|
+
|
|
441
|
+
http_res = self.do_request(
|
|
442
|
+
hook_ctx=HookContext(
|
|
443
|
+
config=self.sdk_configuration,
|
|
444
|
+
base_url=base_url or "",
|
|
445
|
+
operation_id="updatePacks",
|
|
446
|
+
oauth2_scopes=[],
|
|
447
|
+
security_source=get_security_from_env(
|
|
448
|
+
self.sdk_configuration.security, models.Security
|
|
449
|
+
),
|
|
450
|
+
),
|
|
451
|
+
request=req,
|
|
452
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
453
|
+
retry_config=retry_config,
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
response_data: Any = None
|
|
457
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
458
|
+
return unmarshal_json_response(models.UploadPackResponse, http_res)
|
|
459
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
460
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
461
|
+
raise errors.Error(response_data, http_res)
|
|
462
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
463
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
464
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
465
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
466
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
467
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
468
|
+
|
|
469
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
470
|
+
|
|
471
|
+
async def upload_async(
|
|
472
|
+
self,
|
|
473
|
+
*,
|
|
474
|
+
filename: str,
|
|
475
|
+
request_body: Union[bytes, IO[bytes], io.BufferedReader],
|
|
476
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
477
|
+
server_url: Optional[str] = None,
|
|
478
|
+
timeout_ms: Optional[int] = None,
|
|
479
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
480
|
+
) -> models.UploadPackResponse:
|
|
481
|
+
r"""Upload a Pack file
|
|
482
|
+
|
|
483
|
+
Upload a Pack file. Returns the <code>source</code> ID needed to install the Pack with <code>POST /packs source</code>, which you must call separately.
|
|
484
|
+
|
|
485
|
+
:param filename: Filename of the Pack file to upload.
|
|
486
|
+
:param request_body:
|
|
487
|
+
:param retries: Override the default retry configuration for this method
|
|
488
|
+
:param server_url: Override the default server URL for this method
|
|
489
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
490
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
491
|
+
"""
|
|
492
|
+
base_url = None
|
|
493
|
+
url_variables = None
|
|
494
|
+
if timeout_ms is None:
|
|
495
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
496
|
+
|
|
497
|
+
if server_url is not None:
|
|
498
|
+
base_url = server_url
|
|
499
|
+
else:
|
|
500
|
+
base_url = self._get_url(base_url, url_variables)
|
|
501
|
+
|
|
502
|
+
request = models.UpdatePacksRequest(
|
|
503
|
+
filename=filename,
|
|
504
|
+
request_body=request_body,
|
|
505
|
+
)
|
|
506
|
+
|
|
507
|
+
req = self._build_request_async(
|
|
508
|
+
method="PUT",
|
|
509
|
+
path="/packs",
|
|
510
|
+
base_url=base_url,
|
|
511
|
+
url_variables=url_variables,
|
|
512
|
+
request=request,
|
|
513
|
+
request_body_required=True,
|
|
514
|
+
request_has_path_params=False,
|
|
515
|
+
request_has_query_params=True,
|
|
516
|
+
user_agent_header="user-agent",
|
|
517
|
+
accept_header_value="application/json",
|
|
518
|
+
http_headers=http_headers,
|
|
519
|
+
security=self.sdk_configuration.security,
|
|
520
|
+
get_serialized_body=lambda: utils.serialize_request_body(
|
|
521
|
+
request.request_body,
|
|
522
|
+
False,
|
|
523
|
+
False,
|
|
524
|
+
"raw",
|
|
525
|
+
Union[bytes, IO[bytes], io.BufferedReader],
|
|
526
|
+
),
|
|
527
|
+
timeout_ms=timeout_ms,
|
|
528
|
+
)
|
|
529
|
+
|
|
530
|
+
if retries == UNSET:
|
|
531
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
532
|
+
retries = self.sdk_configuration.retry_config
|
|
533
|
+
|
|
534
|
+
retry_config = None
|
|
535
|
+
if isinstance(retries, utils.RetryConfig):
|
|
536
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
537
|
+
|
|
538
|
+
http_res = await self.do_request_async(
|
|
539
|
+
hook_ctx=HookContext(
|
|
540
|
+
config=self.sdk_configuration,
|
|
541
|
+
base_url=base_url or "",
|
|
542
|
+
operation_id="updatePacks",
|
|
543
|
+
oauth2_scopes=[],
|
|
544
|
+
security_source=get_security_from_env(
|
|
545
|
+
self.sdk_configuration.security, models.Security
|
|
546
|
+
),
|
|
547
|
+
),
|
|
548
|
+
request=req,
|
|
549
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
550
|
+
retry_config=retry_config,
|
|
551
|
+
)
|
|
552
|
+
|
|
553
|
+
response_data: Any = None
|
|
554
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
555
|
+
return unmarshal_json_response(models.UploadPackResponse, http_res)
|
|
556
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
557
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
558
|
+
raise errors.Error(response_data, http_res)
|
|
559
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
560
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
561
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
562
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
563
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
564
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
565
|
+
|
|
566
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
567
|
+
|
|
373
568
|
def delete(
|
|
374
569
|
self,
|
|
375
570
|
*,
|
|
@@ -733,7 +928,7 @@ class Packs(BaseSDK):
|
|
|
733
928
|
) -> models.UpdatePacksByIDResponse:
|
|
734
929
|
r"""Upgrade a Pack
|
|
735
930
|
|
|
736
|
-
Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack.
|
|
931
|
+
Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack.Copy the modified files back to the upgraded Pack after you install it with <code>POST /packs</code> to overwrite the default versions in the Pack.</br></br>After you upgrade the Pack, update any Routes, Pipelines, Sources, and Destinations that use the previous Pack version so that they reference the upgraded Pack.
|
|
737
932
|
|
|
738
933
|
:param id: The <code>id</code> of the Pack to upgrade.
|
|
739
934
|
:param source:
|
|
@@ -841,7 +1036,7 @@ class Packs(BaseSDK):
|
|
|
841
1036
|
) -> models.UpdatePacksByIDResponse:
|
|
842
1037
|
r"""Upgrade a Pack
|
|
843
1038
|
|
|
844
|
-
Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack.
|
|
1039
|
+
Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack.Copy the modified files back to the upgraded Pack after you install it with <code>POST /packs</code> to overwrite the default versions in the Pack.</br></br>After you upgrade the Pack, update any Routes, Pipelines, Sources, and Destinations that use the previous Pack version so that they reference the upgraded Pack.
|
|
845
1040
|
|
|
846
1041
|
:param id: The <code>id</code> of the Pack to upgrade.
|
|
847
1042
|
:param source:
|
cribl_control_plane/pipelines.py
CHANGED
|
@@ -176,7 +176,7 @@ class Pipelines(BaseSDK):
|
|
|
176
176
|
self,
|
|
177
177
|
*,
|
|
178
178
|
id: str,
|
|
179
|
-
conf: Union[models.
|
|
179
|
+
conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
|
|
180
180
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
181
181
|
server_url: Optional[str] = None,
|
|
182
182
|
timeout_ms: Optional[int] = None,
|
|
@@ -205,7 +205,7 @@ class Pipelines(BaseSDK):
|
|
|
205
205
|
|
|
206
206
|
request = models.Pipeline(
|
|
207
207
|
id=id,
|
|
208
|
-
conf=utils.get_pydantic_model(conf, models.
|
|
208
|
+
conf=utils.get_pydantic_model(conf, models.PipelineConf),
|
|
209
209
|
)
|
|
210
210
|
|
|
211
211
|
req = self._build_request(
|
|
@@ -269,7 +269,7 @@ class Pipelines(BaseSDK):
|
|
|
269
269
|
self,
|
|
270
270
|
*,
|
|
271
271
|
id: str,
|
|
272
|
-
conf: Union[models.
|
|
272
|
+
conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
|
|
273
273
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
274
274
|
server_url: Optional[str] = None,
|
|
275
275
|
timeout_ms: Optional[int] = None,
|
|
@@ -298,7 +298,7 @@ class Pipelines(BaseSDK):
|
|
|
298
298
|
|
|
299
299
|
request = models.Pipeline(
|
|
300
300
|
id=id,
|
|
301
|
-
conf=utils.get_pydantic_model(conf, models.
|
|
301
|
+
conf=utils.get_pydantic_model(conf, models.PipelineConf),
|
|
302
302
|
)
|
|
303
303
|
|
|
304
304
|
req = self._build_request_async(
|
|
@@ -537,7 +537,7 @@ class Pipelines(BaseSDK):
|
|
|
537
537
|
*,
|
|
538
538
|
id_param: str,
|
|
539
539
|
id: str,
|
|
540
|
-
conf: Union[models.
|
|
540
|
+
conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
|
|
541
541
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
542
542
|
server_url: Optional[str] = None,
|
|
543
543
|
timeout_ms: Optional[int] = None,
|
|
@@ -569,7 +569,7 @@ class Pipelines(BaseSDK):
|
|
|
569
569
|
id_param=id_param,
|
|
570
570
|
pipeline=models.Pipeline(
|
|
571
571
|
id=id,
|
|
572
|
-
conf=utils.get_pydantic_model(conf, models.
|
|
572
|
+
conf=utils.get_pydantic_model(conf, models.PipelineConf),
|
|
573
573
|
),
|
|
574
574
|
)
|
|
575
575
|
|
|
@@ -635,7 +635,7 @@ class Pipelines(BaseSDK):
|
|
|
635
635
|
*,
|
|
636
636
|
id_param: str,
|
|
637
637
|
id: str,
|
|
638
|
-
conf: Union[models.
|
|
638
|
+
conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
|
|
639
639
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
640
640
|
server_url: Optional[str] = None,
|
|
641
641
|
timeout_ms: Optional[int] = None,
|
|
@@ -667,7 +667,7 @@ class Pipelines(BaseSDK):
|
|
|
667
667
|
id_param=id_param,
|
|
668
668
|
pipeline=models.Pipeline(
|
|
669
669
|
id=id,
|
|
670
|
-
conf=utils.get_pydantic_model(conf, models.
|
|
670
|
+
conf=utils.get_pydantic_model(conf, models.PipelineConf),
|
|
671
671
|
),
|
|
672
672
|
)
|
|
673
673
|
|