cribl-control-plane 0.0.23__py3-none-any.whl → 0.0.25__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/auth_sdk.py +4 -4
- cribl_control_plane/{distributed.py → deployments.py} +3 -5
- cribl_control_plane/destinations.py +36 -36
- cribl_control_plane/groups_sdk.py +222 -32
- cribl_control_plane/{health.py → healthinfo.py} +5 -7
- cribl_control_plane/{lake.py → lakedatasets.py} +21 -23
- cribl_control_plane/models/__init__.py +18 -0
- cribl_control_plane/models/createpipelineop.py +2 -2
- cribl_control_plane/models/input.py +4 -4
- cribl_control_plane/models/inputconfluentcloud.py +14 -0
- cribl_control_plane/models/inputgooglepubsub.py +14 -7
- cribl_control_plane/models/inputgrafana.py +14 -0
- cribl_control_plane/models/inputkafka.py +14 -0
- cribl_control_plane/models/inputloki.py +7 -0
- cribl_control_plane/models/inputmsk.py +14 -0
- cribl_control_plane/models/output.py +14 -14
- cribl_control_plane/models/outputconfluentcloud.py +14 -0
- cribl_control_plane/models/outputdls3.py +2 -2
- cribl_control_plane/models/outputgooglecloudstorage.py +2 -2
- cribl_control_plane/models/outputgrafanacloud.py +14 -0
- cribl_control_plane/models/outputkafka.py +14 -0
- cribl_control_plane/models/outputloki.py +14 -0
- cribl_control_plane/models/outputmsk.py +14 -0
- cribl_control_plane/models/outputs3.py +2 -2
- cribl_control_plane/models/updatepipelinebyidop.py +2 -2
- cribl_control_plane/models/updateroutesbyidop.py +2 -2
- cribl_control_plane/nodes.py +379 -0
- cribl_control_plane/packs.py +16 -16
- cribl_control_plane/pipelines.py +30 -30
- cribl_control_plane/routes_sdk.py +10 -10
- cribl_control_plane/sdk.py +15 -19
- cribl_control_plane/sources.py +28 -28
- cribl_control_plane/versioning.py +54 -54
- cribl_control_plane/workers_sdk.py +2 -370
- {cribl_control_plane-0.0.23.dist-info → cribl_control_plane-0.0.25.dist-info}/METADATA +76 -76
- {cribl_control_plane-0.0.23.dist-info → cribl_control_plane-0.0.25.dist-info}/RECORD +38 -38
- cribl_control_plane/teams.py +0 -203
- {cribl_control_plane-0.0.23.dist-info → cribl_control_plane-0.0.25.dist-info}/WHEEL +0 -0
|
@@ -37,6 +37,13 @@ class OutputKafkaCompression(str, Enum):
|
|
|
37
37
|
LZ4 = "lz4"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class OutputKafkaSchemaType(str, Enum):
|
|
41
|
+
r"""The schema format used to encode and decode event data"""
|
|
42
|
+
|
|
43
|
+
AVRO = "avro"
|
|
44
|
+
JSON = "json"
|
|
45
|
+
|
|
46
|
+
|
|
40
47
|
class OutputKafkaAuthTypedDict(TypedDict):
|
|
41
48
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
42
49
|
|
|
@@ -137,6 +144,8 @@ class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
137
144
|
disabled: NotRequired[bool]
|
|
138
145
|
schema_registry_url: NotRequired[str]
|
|
139
146
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
147
|
+
schema_type: NotRequired[OutputKafkaSchemaType]
|
|
148
|
+
r"""The schema format used to encode and decode event data"""
|
|
140
149
|
connection_timeout: NotRequired[float]
|
|
141
150
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
142
151
|
request_timeout: NotRequired[float]
|
|
@@ -160,6 +169,11 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
160
169
|
] = "http://localhost:8081"
|
|
161
170
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
162
171
|
|
|
172
|
+
schema_type: Annotated[
|
|
173
|
+
Optional[OutputKafkaSchemaType], pydantic.Field(alias="schemaType")
|
|
174
|
+
] = OutputKafkaSchemaType.AVRO
|
|
175
|
+
r"""The schema format used to encode and decode event data"""
|
|
176
|
+
|
|
163
177
|
connection_timeout: Annotated[
|
|
164
178
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
165
179
|
] = 30000
|
|
@@ -198,6 +198,10 @@ class OutputLokiTypedDict(TypedDict):
|
|
|
198
198
|
timeout_retry_settings: NotRequired[OutputLokiTimeoutRetrySettingsTypedDict]
|
|
199
199
|
response_honor_retry_after_header: NotRequired[bool]
|
|
200
200
|
r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
|
|
201
|
+
enable_dynamic_headers: NotRequired[bool]
|
|
202
|
+
r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
|
|
203
|
+
send_structured_metadata: NotRequired[bool]
|
|
204
|
+
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
201
205
|
on_backpressure: NotRequired[OutputLokiBackpressureBehavior]
|
|
202
206
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
203
207
|
total_memory_limit_kb: NotRequired[float]
|
|
@@ -335,6 +339,16 @@ class OutputLoki(BaseModel):
|
|
|
335
339
|
] = False
|
|
336
340
|
r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
|
|
337
341
|
|
|
342
|
+
enable_dynamic_headers: Annotated[
|
|
343
|
+
Optional[bool], pydantic.Field(alias="enableDynamicHeaders")
|
|
344
|
+
] = False
|
|
345
|
+
r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
|
|
346
|
+
|
|
347
|
+
send_structured_metadata: Annotated[
|
|
348
|
+
Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
|
|
349
|
+
] = False
|
|
350
|
+
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
351
|
+
|
|
338
352
|
on_backpressure: Annotated[
|
|
339
353
|
Optional[OutputLokiBackpressureBehavior], pydantic.Field(alias="onBackpressure")
|
|
340
354
|
] = OutputLokiBackpressureBehavior.BLOCK
|
|
@@ -37,6 +37,13 @@ class OutputMskCompression(str, Enum):
|
|
|
37
37
|
LZ4 = "lz4"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class OutputMskSchemaType(str, Enum):
|
|
41
|
+
r"""The schema format used to encode and decode event data"""
|
|
42
|
+
|
|
43
|
+
AVRO = "avro"
|
|
44
|
+
JSON = "json"
|
|
45
|
+
|
|
46
|
+
|
|
40
47
|
class OutputMskAuthTypedDict(TypedDict):
|
|
41
48
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
42
49
|
|
|
@@ -137,6 +144,8 @@ class OutputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
137
144
|
disabled: NotRequired[bool]
|
|
138
145
|
schema_registry_url: NotRequired[str]
|
|
139
146
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
147
|
+
schema_type: NotRequired[OutputMskSchemaType]
|
|
148
|
+
r"""The schema format used to encode and decode event data"""
|
|
140
149
|
connection_timeout: NotRequired[float]
|
|
141
150
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
142
151
|
request_timeout: NotRequired[float]
|
|
@@ -160,6 +169,11 @@ class OutputMskKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
160
169
|
] = "http://localhost:8081"
|
|
161
170
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
162
171
|
|
|
172
|
+
schema_type: Annotated[
|
|
173
|
+
Optional[OutputMskSchemaType], pydantic.Field(alias="schemaType")
|
|
174
|
+
] = OutputMskSchemaType.AVRO
|
|
175
|
+
r"""The schema format used to encode and decode event data"""
|
|
176
|
+
|
|
163
177
|
connection_timeout: Annotated[
|
|
164
178
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
165
179
|
] = 30000
|
|
@@ -161,7 +161,7 @@ class OutputS3TypedDict(TypedDict):
|
|
|
161
161
|
add_id_to_stage_path: NotRequired[bool]
|
|
162
162
|
r"""Add the Output ID value to staging location"""
|
|
163
163
|
dest_path: NotRequired[str]
|
|
164
|
-
r"""Prefix to
|
|
164
|
+
r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
|
|
165
165
|
object_acl: NotRequired[OutputS3ObjectACL]
|
|
166
166
|
r"""Object ACL to assign to uploaded objects"""
|
|
167
167
|
storage_class: NotRequired[OutputS3StorageClass]
|
|
@@ -326,7 +326,7 @@ class OutputS3(BaseModel):
|
|
|
326
326
|
r"""Add the Output ID value to staging location"""
|
|
327
327
|
|
|
328
328
|
dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
|
|
329
|
-
r"""Prefix to
|
|
329
|
+
r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
|
|
330
330
|
|
|
331
331
|
object_acl: Annotated[
|
|
332
332
|
Optional[OutputS3ObjectACL], pydantic.Field(alias="objectACL")
|
|
@@ -31,7 +31,7 @@ class UpdatePipelineByIDRequest(BaseModel):
|
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class UpdatePipelineByIDResponseTypedDict(TypedDict):
|
|
34
|
-
r"""a list of
|
|
34
|
+
r"""a list of any objects"""
|
|
35
35
|
|
|
36
36
|
count: NotRequired[int]
|
|
37
37
|
r"""number of items present in the items array"""
|
|
@@ -39,7 +39,7 @@ class UpdatePipelineByIDResponseTypedDict(TypedDict):
|
|
|
39
39
|
|
|
40
40
|
|
|
41
41
|
class UpdatePipelineByIDResponse(BaseModel):
|
|
42
|
-
r"""a list of
|
|
42
|
+
r"""a list of any objects"""
|
|
43
43
|
|
|
44
44
|
count: Optional[int] = None
|
|
45
45
|
r"""number of items present in the items array"""
|
|
@@ -13,7 +13,7 @@ class UpdateRoutesByIDRequestTypedDict(TypedDict):
|
|
|
13
13
|
id_param: str
|
|
14
14
|
r"""Unique ID to PATCH"""
|
|
15
15
|
routes: RoutesTypedDict
|
|
16
|
-
r"""Routes object
|
|
16
|
+
r"""Routes object"""
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
class UpdateRoutesByIDRequest(BaseModel):
|
|
@@ -27,7 +27,7 @@ class UpdateRoutesByIDRequest(BaseModel):
|
|
|
27
27
|
routes: Annotated[
|
|
28
28
|
Routes, FieldMetadata(request=RequestMetadata(media_type="application/json"))
|
|
29
29
|
]
|
|
30
|
-
r"""Routes object
|
|
30
|
+
r"""Routes object"""
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class UpdateRoutesByIDResponseTypedDict(TypedDict):
|
|
@@ -0,0 +1,379 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from .basesdk import BaseSDK
|
|
4
|
+
from cribl_control_plane import errors, models, utils
|
|
5
|
+
from cribl_control_plane._hooks import HookContext
|
|
6
|
+
from cribl_control_plane.types import OptionalNullable, UNSET
|
|
7
|
+
from cribl_control_plane.utils import get_security_from_env
|
|
8
|
+
from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_response
|
|
9
|
+
from typing import Any, Mapping, Optional
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Nodes(BaseSDK):
|
|
13
|
+
def list(
|
|
14
|
+
self,
|
|
15
|
+
*,
|
|
16
|
+
filter_exp: Optional[str] = None,
|
|
17
|
+
sort: Optional[str] = None,
|
|
18
|
+
sort_exp: Optional[str] = None,
|
|
19
|
+
limit: Optional[int] = None,
|
|
20
|
+
offset: Optional[int] = None,
|
|
21
|
+
filter_: Optional[str] = None,
|
|
22
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
23
|
+
server_url: Optional[str] = None,
|
|
24
|
+
timeout_ms: Optional[int] = None,
|
|
25
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
26
|
+
) -> models.GetWorkersResponse:
|
|
27
|
+
r"""Retrieve detailed metadata for Worker and Edge Nodes
|
|
28
|
+
|
|
29
|
+
get worker and edge nodes
|
|
30
|
+
|
|
31
|
+
:param filter_exp: Filter expression evaluated against nodes
|
|
32
|
+
:param sort: Sorting object (JSON stringified) expression evaluated against nodes
|
|
33
|
+
:param sort_exp: Sorting expression evaluated against nodes
|
|
34
|
+
:param limit: Maximum number of nodes to return
|
|
35
|
+
:param offset: Pagination offset
|
|
36
|
+
:param filter_: Filter object (JSON stringified) to select nodes
|
|
37
|
+
:param retries: Override the default retry configuration for this method
|
|
38
|
+
:param server_url: Override the default server URL for this method
|
|
39
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
40
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
41
|
+
"""
|
|
42
|
+
base_url = None
|
|
43
|
+
url_variables = None
|
|
44
|
+
if timeout_ms is None:
|
|
45
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
46
|
+
|
|
47
|
+
if server_url is not None:
|
|
48
|
+
base_url = server_url
|
|
49
|
+
else:
|
|
50
|
+
base_url = self._get_url(base_url, url_variables)
|
|
51
|
+
|
|
52
|
+
request = models.GetWorkersRequest(
|
|
53
|
+
filter_exp=filter_exp,
|
|
54
|
+
sort=sort,
|
|
55
|
+
sort_exp=sort_exp,
|
|
56
|
+
limit=limit,
|
|
57
|
+
offset=offset,
|
|
58
|
+
filter_=filter_,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
req = self._build_request(
|
|
62
|
+
method="GET",
|
|
63
|
+
path="/master/workers",
|
|
64
|
+
base_url=base_url,
|
|
65
|
+
url_variables=url_variables,
|
|
66
|
+
request=request,
|
|
67
|
+
request_body_required=False,
|
|
68
|
+
request_has_path_params=False,
|
|
69
|
+
request_has_query_params=True,
|
|
70
|
+
user_agent_header="user-agent",
|
|
71
|
+
accept_header_value="application/json",
|
|
72
|
+
http_headers=http_headers,
|
|
73
|
+
security=self.sdk_configuration.security,
|
|
74
|
+
timeout_ms=timeout_ms,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
if retries == UNSET:
|
|
78
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
79
|
+
retries = self.sdk_configuration.retry_config
|
|
80
|
+
|
|
81
|
+
retry_config = None
|
|
82
|
+
if isinstance(retries, utils.RetryConfig):
|
|
83
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
84
|
+
|
|
85
|
+
http_res = self.do_request(
|
|
86
|
+
hook_ctx=HookContext(
|
|
87
|
+
config=self.sdk_configuration,
|
|
88
|
+
base_url=base_url or "",
|
|
89
|
+
operation_id="getWorkers",
|
|
90
|
+
oauth2_scopes=[],
|
|
91
|
+
security_source=get_security_from_env(
|
|
92
|
+
self.sdk_configuration.security, models.Security
|
|
93
|
+
),
|
|
94
|
+
),
|
|
95
|
+
request=req,
|
|
96
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
97
|
+
retry_config=retry_config,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
response_data: Any = None
|
|
101
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
102
|
+
return unmarshal_json_response(models.GetWorkersResponse, http_res)
|
|
103
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
104
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
105
|
+
raise errors.Error(response_data, http_res)
|
|
106
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
107
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
108
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
109
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
110
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
111
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
112
|
+
|
|
113
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
114
|
+
|
|
115
|
+
async def list_async(
|
|
116
|
+
self,
|
|
117
|
+
*,
|
|
118
|
+
filter_exp: Optional[str] = None,
|
|
119
|
+
sort: Optional[str] = None,
|
|
120
|
+
sort_exp: Optional[str] = None,
|
|
121
|
+
limit: Optional[int] = None,
|
|
122
|
+
offset: Optional[int] = None,
|
|
123
|
+
filter_: Optional[str] = None,
|
|
124
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
125
|
+
server_url: Optional[str] = None,
|
|
126
|
+
timeout_ms: Optional[int] = None,
|
|
127
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
128
|
+
) -> models.GetWorkersResponse:
|
|
129
|
+
r"""Retrieve detailed metadata for Worker and Edge Nodes
|
|
130
|
+
|
|
131
|
+
get worker and edge nodes
|
|
132
|
+
|
|
133
|
+
:param filter_exp: Filter expression evaluated against nodes
|
|
134
|
+
:param sort: Sorting object (JSON stringified) expression evaluated against nodes
|
|
135
|
+
:param sort_exp: Sorting expression evaluated against nodes
|
|
136
|
+
:param limit: Maximum number of nodes to return
|
|
137
|
+
:param offset: Pagination offset
|
|
138
|
+
:param filter_: Filter object (JSON stringified) to select nodes
|
|
139
|
+
:param retries: Override the default retry configuration for this method
|
|
140
|
+
:param server_url: Override the default server URL for this method
|
|
141
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
142
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
143
|
+
"""
|
|
144
|
+
base_url = None
|
|
145
|
+
url_variables = None
|
|
146
|
+
if timeout_ms is None:
|
|
147
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
148
|
+
|
|
149
|
+
if server_url is not None:
|
|
150
|
+
base_url = server_url
|
|
151
|
+
else:
|
|
152
|
+
base_url = self._get_url(base_url, url_variables)
|
|
153
|
+
|
|
154
|
+
request = models.GetWorkersRequest(
|
|
155
|
+
filter_exp=filter_exp,
|
|
156
|
+
sort=sort,
|
|
157
|
+
sort_exp=sort_exp,
|
|
158
|
+
limit=limit,
|
|
159
|
+
offset=offset,
|
|
160
|
+
filter_=filter_,
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
req = self._build_request_async(
|
|
164
|
+
method="GET",
|
|
165
|
+
path="/master/workers",
|
|
166
|
+
base_url=base_url,
|
|
167
|
+
url_variables=url_variables,
|
|
168
|
+
request=request,
|
|
169
|
+
request_body_required=False,
|
|
170
|
+
request_has_path_params=False,
|
|
171
|
+
request_has_query_params=True,
|
|
172
|
+
user_agent_header="user-agent",
|
|
173
|
+
accept_header_value="application/json",
|
|
174
|
+
http_headers=http_headers,
|
|
175
|
+
security=self.sdk_configuration.security,
|
|
176
|
+
timeout_ms=timeout_ms,
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
if retries == UNSET:
|
|
180
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
181
|
+
retries = self.sdk_configuration.retry_config
|
|
182
|
+
|
|
183
|
+
retry_config = None
|
|
184
|
+
if isinstance(retries, utils.RetryConfig):
|
|
185
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
186
|
+
|
|
187
|
+
http_res = await self.do_request_async(
|
|
188
|
+
hook_ctx=HookContext(
|
|
189
|
+
config=self.sdk_configuration,
|
|
190
|
+
base_url=base_url or "",
|
|
191
|
+
operation_id="getWorkers",
|
|
192
|
+
oauth2_scopes=[],
|
|
193
|
+
security_source=get_security_from_env(
|
|
194
|
+
self.sdk_configuration.security, models.Security
|
|
195
|
+
),
|
|
196
|
+
),
|
|
197
|
+
request=req,
|
|
198
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
199
|
+
retry_config=retry_config,
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
response_data: Any = None
|
|
203
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
204
|
+
return unmarshal_json_response(models.GetWorkersResponse, http_res)
|
|
205
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
206
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
207
|
+
raise errors.Error(response_data, http_res)
|
|
208
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
209
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
210
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
211
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
212
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
213
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
214
|
+
|
|
215
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
216
|
+
|
|
217
|
+
def restart(
|
|
218
|
+
self,
|
|
219
|
+
*,
|
|
220
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
221
|
+
server_url: Optional[str] = None,
|
|
222
|
+
timeout_ms: Optional[int] = None,
|
|
223
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
224
|
+
) -> models.UpdateWorkersRestartResponse:
|
|
225
|
+
r"""Restart Worker and Edge Nodes
|
|
226
|
+
|
|
227
|
+
restarts worker nodes
|
|
228
|
+
|
|
229
|
+
:param retries: Override the default retry configuration for this method
|
|
230
|
+
:param server_url: Override the default server URL for this method
|
|
231
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
232
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
233
|
+
"""
|
|
234
|
+
base_url = None
|
|
235
|
+
url_variables = None
|
|
236
|
+
if timeout_ms is None:
|
|
237
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
238
|
+
|
|
239
|
+
if server_url is not None:
|
|
240
|
+
base_url = server_url
|
|
241
|
+
else:
|
|
242
|
+
base_url = self._get_url(base_url, url_variables)
|
|
243
|
+
req = self._build_request(
|
|
244
|
+
method="PATCH",
|
|
245
|
+
path="/master/workers/restart",
|
|
246
|
+
base_url=base_url,
|
|
247
|
+
url_variables=url_variables,
|
|
248
|
+
request=None,
|
|
249
|
+
request_body_required=False,
|
|
250
|
+
request_has_path_params=False,
|
|
251
|
+
request_has_query_params=True,
|
|
252
|
+
user_agent_header="user-agent",
|
|
253
|
+
accept_header_value="application/json",
|
|
254
|
+
http_headers=http_headers,
|
|
255
|
+
security=self.sdk_configuration.security,
|
|
256
|
+
timeout_ms=timeout_ms,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
if retries == UNSET:
|
|
260
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
261
|
+
retries = self.sdk_configuration.retry_config
|
|
262
|
+
|
|
263
|
+
retry_config = None
|
|
264
|
+
if isinstance(retries, utils.RetryConfig):
|
|
265
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
266
|
+
|
|
267
|
+
http_res = self.do_request(
|
|
268
|
+
hook_ctx=HookContext(
|
|
269
|
+
config=self.sdk_configuration,
|
|
270
|
+
base_url=base_url or "",
|
|
271
|
+
operation_id="updateWorkersRestart",
|
|
272
|
+
oauth2_scopes=[],
|
|
273
|
+
security_source=get_security_from_env(
|
|
274
|
+
self.sdk_configuration.security, models.Security
|
|
275
|
+
),
|
|
276
|
+
),
|
|
277
|
+
request=req,
|
|
278
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
279
|
+
retry_config=retry_config,
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
response_data: Any = None
|
|
283
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
284
|
+
return unmarshal_json_response(
|
|
285
|
+
models.UpdateWorkersRestartResponse, http_res
|
|
286
|
+
)
|
|
287
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
288
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
289
|
+
raise errors.Error(response_data, http_res)
|
|
290
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
291
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
292
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
293
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
294
|
+
http_res_text = utils.stream_to_text(http_res)
|
|
295
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
296
|
+
|
|
297
|
+
raise errors.APIError("Unexpected response received", http_res)
|
|
298
|
+
|
|
299
|
+
async def restart_async(
|
|
300
|
+
self,
|
|
301
|
+
*,
|
|
302
|
+
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
303
|
+
server_url: Optional[str] = None,
|
|
304
|
+
timeout_ms: Optional[int] = None,
|
|
305
|
+
http_headers: Optional[Mapping[str, str]] = None,
|
|
306
|
+
) -> models.UpdateWorkersRestartResponse:
|
|
307
|
+
r"""Restart Worker and Edge Nodes
|
|
308
|
+
|
|
309
|
+
restarts worker nodes
|
|
310
|
+
|
|
311
|
+
:param retries: Override the default retry configuration for this method
|
|
312
|
+
:param server_url: Override the default server URL for this method
|
|
313
|
+
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
314
|
+
:param http_headers: Additional headers to set or replace on requests.
|
|
315
|
+
"""
|
|
316
|
+
base_url = None
|
|
317
|
+
url_variables = None
|
|
318
|
+
if timeout_ms is None:
|
|
319
|
+
timeout_ms = self.sdk_configuration.timeout_ms
|
|
320
|
+
|
|
321
|
+
if server_url is not None:
|
|
322
|
+
base_url = server_url
|
|
323
|
+
else:
|
|
324
|
+
base_url = self._get_url(base_url, url_variables)
|
|
325
|
+
req = self._build_request_async(
|
|
326
|
+
method="PATCH",
|
|
327
|
+
path="/master/workers/restart",
|
|
328
|
+
base_url=base_url,
|
|
329
|
+
url_variables=url_variables,
|
|
330
|
+
request=None,
|
|
331
|
+
request_body_required=False,
|
|
332
|
+
request_has_path_params=False,
|
|
333
|
+
request_has_query_params=True,
|
|
334
|
+
user_agent_header="user-agent",
|
|
335
|
+
accept_header_value="application/json",
|
|
336
|
+
http_headers=http_headers,
|
|
337
|
+
security=self.sdk_configuration.security,
|
|
338
|
+
timeout_ms=timeout_ms,
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
if retries == UNSET:
|
|
342
|
+
if self.sdk_configuration.retry_config is not UNSET:
|
|
343
|
+
retries = self.sdk_configuration.retry_config
|
|
344
|
+
|
|
345
|
+
retry_config = None
|
|
346
|
+
if isinstance(retries, utils.RetryConfig):
|
|
347
|
+
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
348
|
+
|
|
349
|
+
http_res = await self.do_request_async(
|
|
350
|
+
hook_ctx=HookContext(
|
|
351
|
+
config=self.sdk_configuration,
|
|
352
|
+
base_url=base_url or "",
|
|
353
|
+
operation_id="updateWorkersRestart",
|
|
354
|
+
oauth2_scopes=[],
|
|
355
|
+
security_source=get_security_from_env(
|
|
356
|
+
self.sdk_configuration.security, models.Security
|
|
357
|
+
),
|
|
358
|
+
),
|
|
359
|
+
request=req,
|
|
360
|
+
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
361
|
+
retry_config=retry_config,
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
response_data: Any = None
|
|
365
|
+
if utils.match_response(http_res, "200", "application/json"):
|
|
366
|
+
return unmarshal_json_response(
|
|
367
|
+
models.UpdateWorkersRestartResponse, http_res
|
|
368
|
+
)
|
|
369
|
+
if utils.match_response(http_res, "500", "application/json"):
|
|
370
|
+
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
371
|
+
raise errors.Error(response_data, http_res)
|
|
372
|
+
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
373
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
374
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
375
|
+
if utils.match_response(http_res, "5XX", "*"):
|
|
376
|
+
http_res_text = await utils.stream_to_text_async(http_res)
|
|
377
|
+
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
378
|
+
|
|
379
|
+
raise errors.APIError("Unexpected response received", http_res)
|
cribl_control_plane/packs.py
CHANGED
|
@@ -12,7 +12,7 @@ from typing import Any, List, Mapping, Optional, Union
|
|
|
12
12
|
class Packs(BaseSDK):
|
|
13
13
|
r"""Actions related to Packs"""
|
|
14
14
|
|
|
15
|
-
def
|
|
15
|
+
def install(
|
|
16
16
|
self,
|
|
17
17
|
*,
|
|
18
18
|
id: str,
|
|
@@ -36,7 +36,7 @@ class Packs(BaseSDK):
|
|
|
36
36
|
timeout_ms: Optional[int] = None,
|
|
37
37
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
38
38
|
) -> models.CreatePacksResponse:
|
|
39
|
-
r"""Install Pack
|
|
39
|
+
r"""Install a Pack
|
|
40
40
|
|
|
41
41
|
Install Pack
|
|
42
42
|
|
|
@@ -143,7 +143,7 @@ class Packs(BaseSDK):
|
|
|
143
143
|
|
|
144
144
|
raise errors.APIError("Unexpected response received", http_res)
|
|
145
145
|
|
|
146
|
-
async def
|
|
146
|
+
async def install_async(
|
|
147
147
|
self,
|
|
148
148
|
*,
|
|
149
149
|
id: str,
|
|
@@ -167,7 +167,7 @@ class Packs(BaseSDK):
|
|
|
167
167
|
timeout_ms: Optional[int] = None,
|
|
168
168
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
169
169
|
) -> models.CreatePacksResponse:
|
|
170
|
-
r"""Install Pack
|
|
170
|
+
r"""Install a Pack
|
|
171
171
|
|
|
172
172
|
Install Pack
|
|
173
173
|
|
|
@@ -274,7 +274,7 @@ class Packs(BaseSDK):
|
|
|
274
274
|
|
|
275
275
|
raise errors.APIError("Unexpected response received", http_res)
|
|
276
276
|
|
|
277
|
-
def
|
|
277
|
+
def list(
|
|
278
278
|
self,
|
|
279
279
|
*,
|
|
280
280
|
with_: Optional[str] = None,
|
|
@@ -283,7 +283,7 @@ class Packs(BaseSDK):
|
|
|
283
283
|
timeout_ms: Optional[int] = None,
|
|
284
284
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
285
285
|
) -> models.GetPacksResponse:
|
|
286
|
-
r"""
|
|
286
|
+
r"""List all Packs
|
|
287
287
|
|
|
288
288
|
Get info on packs
|
|
289
289
|
|
|
@@ -361,7 +361,7 @@ class Packs(BaseSDK):
|
|
|
361
361
|
|
|
362
362
|
raise errors.APIError("Unexpected response received", http_res)
|
|
363
363
|
|
|
364
|
-
async def
|
|
364
|
+
async def list_async(
|
|
365
365
|
self,
|
|
366
366
|
*,
|
|
367
367
|
with_: Optional[str] = None,
|
|
@@ -370,7 +370,7 @@ class Packs(BaseSDK):
|
|
|
370
370
|
timeout_ms: Optional[int] = None,
|
|
371
371
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
372
372
|
) -> models.GetPacksResponse:
|
|
373
|
-
r"""
|
|
373
|
+
r"""List all Packs
|
|
374
374
|
|
|
375
375
|
Get info on packs
|
|
376
376
|
|
|
@@ -448,7 +448,7 @@ class Packs(BaseSDK):
|
|
|
448
448
|
|
|
449
449
|
raise errors.APIError("Unexpected response received", http_res)
|
|
450
450
|
|
|
451
|
-
def
|
|
451
|
+
def delete(
|
|
452
452
|
self,
|
|
453
453
|
*,
|
|
454
454
|
id: str,
|
|
@@ -457,7 +457,7 @@ class Packs(BaseSDK):
|
|
|
457
457
|
timeout_ms: Optional[int] = None,
|
|
458
458
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
459
459
|
) -> models.DeletePacksByIDResponse:
|
|
460
|
-
r"""Uninstall Pack
|
|
460
|
+
r"""Uninstall a Pack
|
|
461
461
|
|
|
462
462
|
Uninstall Pack from the system
|
|
463
463
|
|
|
@@ -535,7 +535,7 @@ class Packs(BaseSDK):
|
|
|
535
535
|
|
|
536
536
|
raise errors.APIError("Unexpected response received", http_res)
|
|
537
537
|
|
|
538
|
-
async def
|
|
538
|
+
async def delete_async(
|
|
539
539
|
self,
|
|
540
540
|
*,
|
|
541
541
|
id: str,
|
|
@@ -544,7 +544,7 @@ class Packs(BaseSDK):
|
|
|
544
544
|
timeout_ms: Optional[int] = None,
|
|
545
545
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
546
546
|
) -> models.DeletePacksByIDResponse:
|
|
547
|
-
r"""Uninstall Pack
|
|
547
|
+
r"""Uninstall a Pack
|
|
548
548
|
|
|
549
549
|
Uninstall Pack from the system
|
|
550
550
|
|
|
@@ -622,7 +622,7 @@ class Packs(BaseSDK):
|
|
|
622
622
|
|
|
623
623
|
raise errors.APIError("Unexpected response received", http_res)
|
|
624
624
|
|
|
625
|
-
def
|
|
625
|
+
def update(
|
|
626
626
|
self,
|
|
627
627
|
*,
|
|
628
628
|
id: str,
|
|
@@ -634,7 +634,7 @@ class Packs(BaseSDK):
|
|
|
634
634
|
timeout_ms: Optional[int] = None,
|
|
635
635
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
636
636
|
) -> models.UpdatePacksByIDResponse:
|
|
637
|
-
r"""
|
|
637
|
+
r"""Update a Pack
|
|
638
638
|
|
|
639
639
|
Upgrade Pack
|
|
640
640
|
|
|
@@ -718,7 +718,7 @@ class Packs(BaseSDK):
|
|
|
718
718
|
|
|
719
719
|
raise errors.APIError("Unexpected response received", http_res)
|
|
720
720
|
|
|
721
|
-
async def
|
|
721
|
+
async def update_async(
|
|
722
722
|
self,
|
|
723
723
|
*,
|
|
724
724
|
id: str,
|
|
@@ -730,7 +730,7 @@ class Packs(BaseSDK):
|
|
|
730
730
|
timeout_ms: Optional[int] = None,
|
|
731
731
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
732
732
|
) -> models.UpdatePacksByIDResponse:
|
|
733
|
-
r"""
|
|
733
|
+
r"""Update a Pack
|
|
734
734
|
|
|
735
735
|
Upgrade Pack
|
|
736
736
|
|