cribl-control-plane 0.0.24__py3-none-any.whl → 0.0.26a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/models/__init__.py +18 -0
- cribl_control_plane/models/input.py +4 -4
- cribl_control_plane/models/inputconfluentcloud.py +14 -0
- cribl_control_plane/models/inputgooglepubsub.py +14 -7
- cribl_control_plane/models/inputgrafana.py +14 -0
- cribl_control_plane/models/inputkafka.py +14 -0
- cribl_control_plane/models/inputloki.py +7 -0
- cribl_control_plane/models/inputmsk.py +14 -0
- cribl_control_plane/models/output.py +14 -14
- cribl_control_plane/models/outputconfluentcloud.py +14 -0
- cribl_control_plane/models/outputdls3.py +2 -2
- cribl_control_plane/models/outputgooglecloudstorage.py +2 -2
- cribl_control_plane/models/outputgrafanacloud.py +14 -0
- cribl_control_plane/models/outputkafka.py +14 -0
- cribl_control_plane/models/outputloki.py +14 -0
- cribl_control_plane/models/outputmsk.py +14 -0
- cribl_control_plane/models/outputs3.py +2 -2
- cribl_control_plane/nodes.py +0 -174
- cribl_control_plane/pipelines.py +20 -20
- cribl_control_plane/sdk.py +6 -2
- cribl_control_plane/versioning.py +4 -4
- cribl_control_plane/workers_sdk.py +187 -0
- {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26a1.dist-info}/METADATA +10 -7
- {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26a1.dist-info}/RECORD +26 -25
- {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26a1.dist-info}/WHEEL +0 -0
|
@@ -37,6 +37,13 @@ class OutputKafkaCompression(str, Enum):
|
|
|
37
37
|
LZ4 = "lz4"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class OutputKafkaSchemaType(str, Enum):
|
|
41
|
+
r"""The schema format used to encode and decode event data"""
|
|
42
|
+
|
|
43
|
+
AVRO = "avro"
|
|
44
|
+
JSON = "json"
|
|
45
|
+
|
|
46
|
+
|
|
40
47
|
class OutputKafkaAuthTypedDict(TypedDict):
|
|
41
48
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
42
49
|
|
|
@@ -137,6 +144,8 @@ class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
137
144
|
disabled: NotRequired[bool]
|
|
138
145
|
schema_registry_url: NotRequired[str]
|
|
139
146
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
147
|
+
schema_type: NotRequired[OutputKafkaSchemaType]
|
|
148
|
+
r"""The schema format used to encode and decode event data"""
|
|
140
149
|
connection_timeout: NotRequired[float]
|
|
141
150
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
142
151
|
request_timeout: NotRequired[float]
|
|
@@ -160,6 +169,11 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
160
169
|
] = "http://localhost:8081"
|
|
161
170
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
162
171
|
|
|
172
|
+
schema_type: Annotated[
|
|
173
|
+
Optional[OutputKafkaSchemaType], pydantic.Field(alias="schemaType")
|
|
174
|
+
] = OutputKafkaSchemaType.AVRO
|
|
175
|
+
r"""The schema format used to encode and decode event data"""
|
|
176
|
+
|
|
163
177
|
connection_timeout: Annotated[
|
|
164
178
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
165
179
|
] = 30000
|
|
@@ -198,6 +198,10 @@ class OutputLokiTypedDict(TypedDict):
|
|
|
198
198
|
timeout_retry_settings: NotRequired[OutputLokiTimeoutRetrySettingsTypedDict]
|
|
199
199
|
response_honor_retry_after_header: NotRequired[bool]
|
|
200
200
|
r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
|
|
201
|
+
enable_dynamic_headers: NotRequired[bool]
|
|
202
|
+
r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
|
|
203
|
+
send_structured_metadata: NotRequired[bool]
|
|
204
|
+
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
201
205
|
on_backpressure: NotRequired[OutputLokiBackpressureBehavior]
|
|
202
206
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
203
207
|
total_memory_limit_kb: NotRequired[float]
|
|
@@ -335,6 +339,16 @@ class OutputLoki(BaseModel):
|
|
|
335
339
|
] = False
|
|
336
340
|
r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
|
|
337
341
|
|
|
342
|
+
enable_dynamic_headers: Annotated[
|
|
343
|
+
Optional[bool], pydantic.Field(alias="enableDynamicHeaders")
|
|
344
|
+
] = False
|
|
345
|
+
r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
|
|
346
|
+
|
|
347
|
+
send_structured_metadata: Annotated[
|
|
348
|
+
Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
|
|
349
|
+
] = False
|
|
350
|
+
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
351
|
+
|
|
338
352
|
on_backpressure: Annotated[
|
|
339
353
|
Optional[OutputLokiBackpressureBehavior], pydantic.Field(alias="onBackpressure")
|
|
340
354
|
] = OutputLokiBackpressureBehavior.BLOCK
|
|
@@ -37,6 +37,13 @@ class OutputMskCompression(str, Enum):
|
|
|
37
37
|
LZ4 = "lz4"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class OutputMskSchemaType(str, Enum):
|
|
41
|
+
r"""The schema format used to encode and decode event data"""
|
|
42
|
+
|
|
43
|
+
AVRO = "avro"
|
|
44
|
+
JSON = "json"
|
|
45
|
+
|
|
46
|
+
|
|
40
47
|
class OutputMskAuthTypedDict(TypedDict):
|
|
41
48
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
42
49
|
|
|
@@ -137,6 +144,8 @@ class OutputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
137
144
|
disabled: NotRequired[bool]
|
|
138
145
|
schema_registry_url: NotRequired[str]
|
|
139
146
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
147
|
+
schema_type: NotRequired[OutputMskSchemaType]
|
|
148
|
+
r"""The schema format used to encode and decode event data"""
|
|
140
149
|
connection_timeout: NotRequired[float]
|
|
141
150
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
142
151
|
request_timeout: NotRequired[float]
|
|
@@ -160,6 +169,11 @@ class OutputMskKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
160
169
|
] = "http://localhost:8081"
|
|
161
170
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
162
171
|
|
|
172
|
+
schema_type: Annotated[
|
|
173
|
+
Optional[OutputMskSchemaType], pydantic.Field(alias="schemaType")
|
|
174
|
+
] = OutputMskSchemaType.AVRO
|
|
175
|
+
r"""The schema format used to encode and decode event data"""
|
|
176
|
+
|
|
163
177
|
connection_timeout: Annotated[
|
|
164
178
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
165
179
|
] = 30000
|
|
@@ -161,7 +161,7 @@ class OutputS3TypedDict(TypedDict):
|
|
|
161
161
|
add_id_to_stage_path: NotRequired[bool]
|
|
162
162
|
r"""Add the Output ID value to staging location"""
|
|
163
163
|
dest_path: NotRequired[str]
|
|
164
|
-
r"""Prefix to
|
|
164
|
+
r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
|
|
165
165
|
object_acl: NotRequired[OutputS3ObjectACL]
|
|
166
166
|
r"""Object ACL to assign to uploaded objects"""
|
|
167
167
|
storage_class: NotRequired[OutputS3StorageClass]
|
|
@@ -326,7 +326,7 @@ class OutputS3(BaseModel):
|
|
|
326
326
|
r"""Add the Output ID value to staging location"""
|
|
327
327
|
|
|
328
328
|
dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
|
|
329
|
-
r"""Prefix to
|
|
329
|
+
r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
|
|
330
330
|
|
|
331
331
|
object_acl: Annotated[
|
|
332
332
|
Optional[OutputS3ObjectACL], pydantic.Field(alias="objectACL")
|
cribl_control_plane/nodes.py
CHANGED
|
@@ -10,180 +10,6 @@ from typing import Any, Mapping, Optional
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class Nodes(BaseSDK):
|
|
13
|
-
def get_count(
|
|
14
|
-
self,
|
|
15
|
-
*,
|
|
16
|
-
filter_exp: Optional[str] = None,
|
|
17
|
-
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
18
|
-
server_url: Optional[str] = None,
|
|
19
|
-
timeout_ms: Optional[int] = None,
|
|
20
|
-
http_headers: Optional[Mapping[str, str]] = None,
|
|
21
|
-
) -> models.GetSummaryWorkersResponse:
|
|
22
|
-
r"""Retrieve a count of Worker and Edge Nodes
|
|
23
|
-
|
|
24
|
-
get worker and edge nodes count
|
|
25
|
-
|
|
26
|
-
:param filter_exp: Filter expression evaluated against nodes
|
|
27
|
-
:param retries: Override the default retry configuration for this method
|
|
28
|
-
:param server_url: Override the default server URL for this method
|
|
29
|
-
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
30
|
-
:param http_headers: Additional headers to set or replace on requests.
|
|
31
|
-
"""
|
|
32
|
-
base_url = None
|
|
33
|
-
url_variables = None
|
|
34
|
-
if timeout_ms is None:
|
|
35
|
-
timeout_ms = self.sdk_configuration.timeout_ms
|
|
36
|
-
|
|
37
|
-
if server_url is not None:
|
|
38
|
-
base_url = server_url
|
|
39
|
-
else:
|
|
40
|
-
base_url = self._get_url(base_url, url_variables)
|
|
41
|
-
|
|
42
|
-
request = models.GetSummaryWorkersRequest(
|
|
43
|
-
filter_exp=filter_exp,
|
|
44
|
-
)
|
|
45
|
-
|
|
46
|
-
req = self._build_request(
|
|
47
|
-
method="GET",
|
|
48
|
-
path="/master/summary/workers",
|
|
49
|
-
base_url=base_url,
|
|
50
|
-
url_variables=url_variables,
|
|
51
|
-
request=request,
|
|
52
|
-
request_body_required=False,
|
|
53
|
-
request_has_path_params=False,
|
|
54
|
-
request_has_query_params=True,
|
|
55
|
-
user_agent_header="user-agent",
|
|
56
|
-
accept_header_value="application/json",
|
|
57
|
-
http_headers=http_headers,
|
|
58
|
-
security=self.sdk_configuration.security,
|
|
59
|
-
timeout_ms=timeout_ms,
|
|
60
|
-
)
|
|
61
|
-
|
|
62
|
-
if retries == UNSET:
|
|
63
|
-
if self.sdk_configuration.retry_config is not UNSET:
|
|
64
|
-
retries = self.sdk_configuration.retry_config
|
|
65
|
-
|
|
66
|
-
retry_config = None
|
|
67
|
-
if isinstance(retries, utils.RetryConfig):
|
|
68
|
-
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
69
|
-
|
|
70
|
-
http_res = self.do_request(
|
|
71
|
-
hook_ctx=HookContext(
|
|
72
|
-
config=self.sdk_configuration,
|
|
73
|
-
base_url=base_url or "",
|
|
74
|
-
operation_id="getSummaryWorkers",
|
|
75
|
-
oauth2_scopes=[],
|
|
76
|
-
security_source=get_security_from_env(
|
|
77
|
-
self.sdk_configuration.security, models.Security
|
|
78
|
-
),
|
|
79
|
-
),
|
|
80
|
-
request=req,
|
|
81
|
-
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
82
|
-
retry_config=retry_config,
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
response_data: Any = None
|
|
86
|
-
if utils.match_response(http_res, "200", "application/json"):
|
|
87
|
-
return unmarshal_json_response(models.GetSummaryWorkersResponse, http_res)
|
|
88
|
-
if utils.match_response(http_res, "500", "application/json"):
|
|
89
|
-
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
90
|
-
raise errors.Error(response_data, http_res)
|
|
91
|
-
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
92
|
-
http_res_text = utils.stream_to_text(http_res)
|
|
93
|
-
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
94
|
-
if utils.match_response(http_res, "5XX", "*"):
|
|
95
|
-
http_res_text = utils.stream_to_text(http_res)
|
|
96
|
-
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
97
|
-
|
|
98
|
-
raise errors.APIError("Unexpected response received", http_res)
|
|
99
|
-
|
|
100
|
-
async def get_count_async(
|
|
101
|
-
self,
|
|
102
|
-
*,
|
|
103
|
-
filter_exp: Optional[str] = None,
|
|
104
|
-
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
105
|
-
server_url: Optional[str] = None,
|
|
106
|
-
timeout_ms: Optional[int] = None,
|
|
107
|
-
http_headers: Optional[Mapping[str, str]] = None,
|
|
108
|
-
) -> models.GetSummaryWorkersResponse:
|
|
109
|
-
r"""Retrieve a count of Worker and Edge Nodes
|
|
110
|
-
|
|
111
|
-
get worker and edge nodes count
|
|
112
|
-
|
|
113
|
-
:param filter_exp: Filter expression evaluated against nodes
|
|
114
|
-
:param retries: Override the default retry configuration for this method
|
|
115
|
-
:param server_url: Override the default server URL for this method
|
|
116
|
-
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
117
|
-
:param http_headers: Additional headers to set or replace on requests.
|
|
118
|
-
"""
|
|
119
|
-
base_url = None
|
|
120
|
-
url_variables = None
|
|
121
|
-
if timeout_ms is None:
|
|
122
|
-
timeout_ms = self.sdk_configuration.timeout_ms
|
|
123
|
-
|
|
124
|
-
if server_url is not None:
|
|
125
|
-
base_url = server_url
|
|
126
|
-
else:
|
|
127
|
-
base_url = self._get_url(base_url, url_variables)
|
|
128
|
-
|
|
129
|
-
request = models.GetSummaryWorkersRequest(
|
|
130
|
-
filter_exp=filter_exp,
|
|
131
|
-
)
|
|
132
|
-
|
|
133
|
-
req = self._build_request_async(
|
|
134
|
-
method="GET",
|
|
135
|
-
path="/master/summary/workers",
|
|
136
|
-
base_url=base_url,
|
|
137
|
-
url_variables=url_variables,
|
|
138
|
-
request=request,
|
|
139
|
-
request_body_required=False,
|
|
140
|
-
request_has_path_params=False,
|
|
141
|
-
request_has_query_params=True,
|
|
142
|
-
user_agent_header="user-agent",
|
|
143
|
-
accept_header_value="application/json",
|
|
144
|
-
http_headers=http_headers,
|
|
145
|
-
security=self.sdk_configuration.security,
|
|
146
|
-
timeout_ms=timeout_ms,
|
|
147
|
-
)
|
|
148
|
-
|
|
149
|
-
if retries == UNSET:
|
|
150
|
-
if self.sdk_configuration.retry_config is not UNSET:
|
|
151
|
-
retries = self.sdk_configuration.retry_config
|
|
152
|
-
|
|
153
|
-
retry_config = None
|
|
154
|
-
if isinstance(retries, utils.RetryConfig):
|
|
155
|
-
retry_config = (retries, ["429", "500", "502", "503", "504"])
|
|
156
|
-
|
|
157
|
-
http_res = await self.do_request_async(
|
|
158
|
-
hook_ctx=HookContext(
|
|
159
|
-
config=self.sdk_configuration,
|
|
160
|
-
base_url=base_url or "",
|
|
161
|
-
operation_id="getSummaryWorkers",
|
|
162
|
-
oauth2_scopes=[],
|
|
163
|
-
security_source=get_security_from_env(
|
|
164
|
-
self.sdk_configuration.security, models.Security
|
|
165
|
-
),
|
|
166
|
-
),
|
|
167
|
-
request=req,
|
|
168
|
-
error_status_codes=["401", "4XX", "500", "5XX"],
|
|
169
|
-
retry_config=retry_config,
|
|
170
|
-
)
|
|
171
|
-
|
|
172
|
-
response_data: Any = None
|
|
173
|
-
if utils.match_response(http_res, "200", "application/json"):
|
|
174
|
-
return unmarshal_json_response(models.GetSummaryWorkersResponse, http_res)
|
|
175
|
-
if utils.match_response(http_res, "500", "application/json"):
|
|
176
|
-
response_data = unmarshal_json_response(errors.ErrorData, http_res)
|
|
177
|
-
raise errors.Error(response_data, http_res)
|
|
178
|
-
if utils.match_response(http_res, ["401", "4XX"], "*"):
|
|
179
|
-
http_res_text = await utils.stream_to_text_async(http_res)
|
|
180
|
-
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
181
|
-
if utils.match_response(http_res, "5XX", "*"):
|
|
182
|
-
http_res_text = await utils.stream_to_text_async(http_res)
|
|
183
|
-
raise errors.APIError("API error occurred", http_res, http_res_text)
|
|
184
|
-
|
|
185
|
-
raise errors.APIError("Unexpected response received", http_res)
|
|
186
|
-
|
|
187
13
|
def list(
|
|
188
14
|
self,
|
|
189
15
|
*,
|
cribl_control_plane/pipelines.py
CHANGED
|
@@ -12,7 +12,7 @@ from typing import Any, Mapping, Optional, Union
|
|
|
12
12
|
class Pipelines(BaseSDK):
|
|
13
13
|
r"""Actions related to Pipelines"""
|
|
14
14
|
|
|
15
|
-
def
|
|
15
|
+
def list(
|
|
16
16
|
self,
|
|
17
17
|
*,
|
|
18
18
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
@@ -20,9 +20,9 @@ class Pipelines(BaseSDK):
|
|
|
20
20
|
timeout_ms: Optional[int] = None,
|
|
21
21
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
22
22
|
) -> models.ListPipelineResponse:
|
|
23
|
-
r"""
|
|
23
|
+
r"""List all Pipelines
|
|
24
24
|
|
|
25
|
-
|
|
25
|
+
List all Pipelines
|
|
26
26
|
|
|
27
27
|
:param retries: Override the default retry configuration for this method
|
|
28
28
|
:param server_url: Override the default server URL for this method
|
|
@@ -92,7 +92,7 @@ class Pipelines(BaseSDK):
|
|
|
92
92
|
|
|
93
93
|
raise errors.APIError("Unexpected response received", http_res)
|
|
94
94
|
|
|
95
|
-
async def
|
|
95
|
+
async def list_async(
|
|
96
96
|
self,
|
|
97
97
|
*,
|
|
98
98
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
@@ -100,9 +100,9 @@ class Pipelines(BaseSDK):
|
|
|
100
100
|
timeout_ms: Optional[int] = None,
|
|
101
101
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
102
102
|
) -> models.ListPipelineResponse:
|
|
103
|
-
r"""
|
|
103
|
+
r"""List all Pipelines
|
|
104
104
|
|
|
105
|
-
|
|
105
|
+
List all Pipelines
|
|
106
106
|
|
|
107
107
|
:param retries: Override the default retry configuration for this method
|
|
108
108
|
:param server_url: Override the default server URL for this method
|
|
@@ -358,7 +358,7 @@ class Pipelines(BaseSDK):
|
|
|
358
358
|
|
|
359
359
|
raise errors.APIError("Unexpected response received", http_res)
|
|
360
360
|
|
|
361
|
-
def
|
|
361
|
+
def get(
|
|
362
362
|
self,
|
|
363
363
|
*,
|
|
364
364
|
id: str,
|
|
@@ -367,9 +367,9 @@ class Pipelines(BaseSDK):
|
|
|
367
367
|
timeout_ms: Optional[int] = None,
|
|
368
368
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
369
369
|
) -> models.GetPipelineByIDResponse:
|
|
370
|
-
r"""
|
|
370
|
+
r"""Retrieve a Pipeline
|
|
371
371
|
|
|
372
|
-
|
|
372
|
+
Retrieve a Pipeline
|
|
373
373
|
|
|
374
374
|
:param id: Unique ID to GET
|
|
375
375
|
:param retries: Override the default retry configuration for this method
|
|
@@ -445,7 +445,7 @@ class Pipelines(BaseSDK):
|
|
|
445
445
|
|
|
446
446
|
raise errors.APIError("Unexpected response received", http_res)
|
|
447
447
|
|
|
448
|
-
async def
|
|
448
|
+
async def get_async(
|
|
449
449
|
self,
|
|
450
450
|
*,
|
|
451
451
|
id: str,
|
|
@@ -454,9 +454,9 @@ class Pipelines(BaseSDK):
|
|
|
454
454
|
timeout_ms: Optional[int] = None,
|
|
455
455
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
456
456
|
) -> models.GetPipelineByIDResponse:
|
|
457
|
-
r"""
|
|
457
|
+
r"""Retrieve a Pipeline
|
|
458
458
|
|
|
459
|
-
|
|
459
|
+
Retrieve a Pipeline
|
|
460
460
|
|
|
461
461
|
:param id: Unique ID to GET
|
|
462
462
|
:param retries: Override the default retry configuration for this method
|
|
@@ -532,7 +532,7 @@ class Pipelines(BaseSDK):
|
|
|
532
532
|
|
|
533
533
|
raise errors.APIError("Unexpected response received", http_res)
|
|
534
534
|
|
|
535
|
-
def
|
|
535
|
+
def update(
|
|
536
536
|
self,
|
|
537
537
|
*,
|
|
538
538
|
id_param: str,
|
|
@@ -630,7 +630,7 @@ class Pipelines(BaseSDK):
|
|
|
630
630
|
|
|
631
631
|
raise errors.APIError("Unexpected response received", http_res)
|
|
632
632
|
|
|
633
|
-
async def
|
|
633
|
+
async def update_async(
|
|
634
634
|
self,
|
|
635
635
|
*,
|
|
636
636
|
id_param: str,
|
|
@@ -728,7 +728,7 @@ class Pipelines(BaseSDK):
|
|
|
728
728
|
|
|
729
729
|
raise errors.APIError("Unexpected response received", http_res)
|
|
730
730
|
|
|
731
|
-
def
|
|
731
|
+
def delete(
|
|
732
732
|
self,
|
|
733
733
|
*,
|
|
734
734
|
id: str,
|
|
@@ -737,9 +737,9 @@ class Pipelines(BaseSDK):
|
|
|
737
737
|
timeout_ms: Optional[int] = None,
|
|
738
738
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
739
739
|
) -> models.DeletePipelineByIDResponse:
|
|
740
|
-
r"""Delete Pipeline
|
|
740
|
+
r"""Delete a Pipeline
|
|
741
741
|
|
|
742
|
-
Delete Pipeline
|
|
742
|
+
Delete a Pipeline
|
|
743
743
|
|
|
744
744
|
:param id: Unique ID to DELETE
|
|
745
745
|
:param retries: Override the default retry configuration for this method
|
|
@@ -815,7 +815,7 @@ class Pipelines(BaseSDK):
|
|
|
815
815
|
|
|
816
816
|
raise errors.APIError("Unexpected response received", http_res)
|
|
817
817
|
|
|
818
|
-
async def
|
|
818
|
+
async def delete_async(
|
|
819
819
|
self,
|
|
820
820
|
*,
|
|
821
821
|
id: str,
|
|
@@ -824,9 +824,9 @@ class Pipelines(BaseSDK):
|
|
|
824
824
|
timeout_ms: Optional[int] = None,
|
|
825
825
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
826
826
|
) -> models.DeletePipelineByIDResponse:
|
|
827
|
-
r"""Delete Pipeline
|
|
827
|
+
r"""Delete a Pipeline
|
|
828
828
|
|
|
829
|
-
Delete Pipeline
|
|
829
|
+
Delete a Pipeline
|
|
830
830
|
|
|
831
831
|
:param id: Unique ID to DELETE
|
|
832
832
|
:param retries: Override the default retry configuration for this method
|
cribl_control_plane/sdk.py
CHANGED
|
@@ -26,6 +26,7 @@ if TYPE_CHECKING:
|
|
|
26
26
|
from cribl_control_plane.routes_sdk import RoutesSDK
|
|
27
27
|
from cribl_control_plane.sources import Sources
|
|
28
28
|
from cribl_control_plane.versioning import Versioning
|
|
29
|
+
from cribl_control_plane.workers_sdk import WorkersSDK
|
|
29
30
|
|
|
30
31
|
|
|
31
32
|
class CriblControlPlane(BaseSDK):
|
|
@@ -40,13 +41,15 @@ class CriblControlPlane(BaseSDK):
|
|
|
40
41
|
r"""Actions related to Routes"""
|
|
41
42
|
auth: "AuthSDK"
|
|
42
43
|
r"""Actions related to authentication. Do not use the /auth endpoints in Cribl.Cloud deployments. Instead, follow the instructions at https://docs.cribl.io/stream/api-tutorials/#criblcloud to authenticate for Cribl.Cloud."""
|
|
43
|
-
nodes: "Nodes"
|
|
44
44
|
deployments: "Deployments"
|
|
45
45
|
health_info: "HealthInfo"
|
|
46
46
|
packs: "Packs"
|
|
47
47
|
r"""Actions related to Packs"""
|
|
48
48
|
versioning: "Versioning"
|
|
49
49
|
r"""Actions related to Versioning"""
|
|
50
|
+
workers: "WorkersSDK"
|
|
51
|
+
r"""Actions related to Workers"""
|
|
52
|
+
nodes: "Nodes"
|
|
50
53
|
groups: "GroupsSDK"
|
|
51
54
|
r"""Actions related to Groups"""
|
|
52
55
|
_sub_sdk_map = {
|
|
@@ -56,11 +59,12 @@ class CriblControlPlane(BaseSDK):
|
|
|
56
59
|
"pipelines": ("cribl_control_plane.pipelines", "Pipelines"),
|
|
57
60
|
"routes": ("cribl_control_plane.routes_sdk", "RoutesSDK"),
|
|
58
61
|
"auth": ("cribl_control_plane.auth_sdk", "AuthSDK"),
|
|
59
|
-
"nodes": ("cribl_control_plane.nodes", "Nodes"),
|
|
60
62
|
"deployments": ("cribl_control_plane.deployments", "Deployments"),
|
|
61
63
|
"health_info": ("cribl_control_plane.healthinfo", "HealthInfo"),
|
|
62
64
|
"packs": ("cribl_control_plane.packs", "Packs"),
|
|
63
65
|
"versioning": ("cribl_control_plane.versioning", "Versioning"),
|
|
66
|
+
"workers": ("cribl_control_plane.workers_sdk", "WorkersSDK"),
|
|
67
|
+
"nodes": ("cribl_control_plane.nodes", "Nodes"),
|
|
64
68
|
"groups": ("cribl_control_plane.groups_sdk", "GroupsSDK"),
|
|
65
69
|
}
|
|
66
70
|
|
|
@@ -2143,9 +2143,9 @@ class Versioning(BaseSDK):
|
|
|
2143
2143
|
timeout_ms: Optional[int] = None,
|
|
2144
2144
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
2145
2145
|
) -> models.CreateVersionUndoResponse:
|
|
2146
|
-
r"""
|
|
2146
|
+
r"""Discard uncommitted (staged) changes
|
|
2147
2147
|
|
|
2148
|
-
|
|
2148
|
+
Discards all uncommitted (staged) configuration changes, resetting the working directory to the last committed state.
|
|
2149
2149
|
|
|
2150
2150
|
:param group: Group ID
|
|
2151
2151
|
:param retries: Override the default retry configuration for this method
|
|
@@ -2230,9 +2230,9 @@ class Versioning(BaseSDK):
|
|
|
2230
2230
|
timeout_ms: Optional[int] = None,
|
|
2231
2231
|
http_headers: Optional[Mapping[str, str]] = None,
|
|
2232
2232
|
) -> models.CreateVersionUndoResponse:
|
|
2233
|
-
r"""
|
|
2233
|
+
r"""Discard uncommitted (staged) changes
|
|
2234
2234
|
|
|
2235
|
-
|
|
2235
|
+
Discards all uncommitted (staged) configuration changes, resetting the working directory to the last committed state.
|
|
2236
2236
|
|
|
2237
2237
|
:param group: Group ID
|
|
2238
2238
|
:param retries: Override the default retry configuration for this method
|