cribl-control-plane 0.0.21a2__py3-none-any.whl → 0.0.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (27) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/destinations.py +10 -10
  3. cribl_control_plane/groups_sdk.py +572 -60
  4. cribl_control_plane/models/__init__.py +153 -5394
  5. cribl_control_plane/models/createinputop.py +2 -18216
  6. cribl_control_plane/models/createoutputop.py +2 -18415
  7. cribl_control_plane/models/deletegroupsbyidop.py +37 -0
  8. cribl_control_plane/models/input.py +6 -6
  9. cribl_control_plane/models/inputedgeprometheus.py +7 -10
  10. cribl_control_plane/models/{inputgrafana_union.py → inputgrafana.py} +4 -4
  11. cribl_control_plane/models/{inputsyslog_union.py → inputsyslog.py} +4 -4
  12. cribl_control_plane/models/inputwef.py +4 -4
  13. cribl_control_plane/models/outputgooglepubsub.py +3 -3
  14. cribl_control_plane/models/outputsplunklb.py +8 -8
  15. cribl_control_plane/models/routes.py +0 -24
  16. cribl_control_plane/models/updategroupsbyidop.py +48 -0
  17. cribl_control_plane/models/updateinputbyidop.py +2 -2
  18. cribl_control_plane/models/updateoutputbyidop.py +2 -2
  19. cribl_control_plane/models/updateroutesbyidop.py +3 -4
  20. cribl_control_plane/packs.py +0 -204
  21. cribl_control_plane/routes_sdk.py +8 -12
  22. cribl_control_plane/sources.py +10 -10
  23. {cribl_control_plane-0.0.21a2.dist-info → cribl_control_plane-0.0.23.dist-info}/METADATA +209 -44
  24. {cribl_control_plane-0.0.21a2.dist-info → cribl_control_plane-0.0.23.dist-info}/RECORD +25 -25
  25. cribl_control_plane/models/routesroute_input.py +0 -67
  26. cribl_control_plane/models/updatepacksop.py +0 -98
  27. {cribl_control_plane-0.0.21a2.dist-info → cribl_control_plane-0.0.23.dist-info}/WHEEL +0 -0
@@ -0,0 +1,37 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import FieldMetadata, PathParamMetadata
7
+ from typing import List, Optional
8
+ from typing_extensions import Annotated, NotRequired, TypedDict
9
+
10
+
11
+ class DeleteGroupsByIDRequestTypedDict(TypedDict):
12
+ id: str
13
+ r"""Group ID"""
14
+
15
+
16
+ class DeleteGroupsByIDRequest(BaseModel):
17
+ id: Annotated[
18
+ str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
19
+ ]
20
+ r"""Group ID"""
21
+
22
+
23
+ class DeleteGroupsByIDResponseTypedDict(TypedDict):
24
+ r"""a list of ConfigGroup objects"""
25
+
26
+ count: NotRequired[int]
27
+ r"""number of items present in the items array"""
28
+ items: NotRequired[List[ConfigGroupTypedDict]]
29
+
30
+
31
+ class DeleteGroupsByIDResponse(BaseModel):
32
+ r"""a list of ConfigGroup objects"""
33
+
34
+ count: Optional[int] = None
35
+ r"""number of items present in the items array"""
36
+
37
+ items: Optional[List[ConfigGroup]] = None
@@ -20,7 +20,7 @@ from .inputexec import InputExec, InputExecTypedDict
20
20
  from .inputfile import InputFile, InputFileTypedDict
21
21
  from .inputfirehose import InputFirehose, InputFirehoseTypedDict
22
22
  from .inputgooglepubsub import InputGooglePubsub, InputGooglePubsubTypedDict
23
- from .inputgrafana_union import InputGrafanaUnion, InputGrafanaUnionTypedDict
23
+ from .inputgrafana import InputGrafana, InputGrafanaTypedDict
24
24
  from .inputhttp import InputHTTP, InputHTTPTypedDict
25
25
  from .inputhttpraw import InputHTTPRaw, InputHTTPRawTypedDict
26
26
  from .inputjournalfiles import InputJournalFiles, InputJournalFilesTypedDict
@@ -55,7 +55,7 @@ from .inputsplunk import InputSplunk, InputSplunkTypedDict
55
55
  from .inputsplunkhec import InputSplunkHec, InputSplunkHecTypedDict
56
56
  from .inputsplunksearch import InputSplunkSearch, InputSplunkSearchTypedDict
57
57
  from .inputsqs import InputSqs, InputSqsTypedDict
58
- from .inputsyslog_union import InputSyslogUnion, InputSyslogUnionTypedDict
58
+ from .inputsyslog import InputSyslog, InputSyslogTypedDict
59
59
  from .inputsystemmetrics import InputSystemMetrics, InputSystemMetricsTypedDict
60
60
  from .inputsystemstate import InputSystemState, InputSystemStateTypedDict
61
61
  from .inputtcp import InputTCP, InputTCPTypedDict
@@ -128,8 +128,8 @@ InputTypedDict = TypeAliasType(
128
128
  InputS3InventoryTypedDict,
129
129
  InputMskTypedDict,
130
130
  InputSplunkSearchTypedDict,
131
- InputSyslogUnionTypedDict,
132
- InputGrafanaUnionTypedDict,
131
+ InputSyslogTypedDict,
132
+ InputGrafanaTypedDict,
133
133
  ],
134
134
  )
135
135
 
@@ -193,7 +193,7 @@ Input = TypeAliasType(
193
193
  InputS3Inventory,
194
194
  InputMsk,
195
195
  InputSplunkSearch,
196
- InputSyslogUnion,
197
- InputGrafanaUnion,
196
+ InputSyslog,
197
+ InputGrafana,
198
198
  ],
199
199
  )
@@ -155,7 +155,7 @@ class InputEdgePrometheusAuthTypeAuthenticationMethod(str, Enum):
155
155
  KUBERNETES = "kubernetes"
156
156
 
157
157
 
158
- class InputEdgePrometheusTargetProtocol(str, Enum):
158
+ class TargetProtocol(str, Enum):
159
159
  r"""Protocol to use when collecting metrics"""
160
160
 
161
161
  HTTP = "http"
@@ -165,7 +165,7 @@ class InputEdgePrometheusTargetProtocol(str, Enum):
165
165
  class TargetTypedDict(TypedDict):
166
166
  host: str
167
167
  r"""Name of host from which to pull metrics."""
168
- protocol: NotRequired[InputEdgePrometheusTargetProtocol]
168
+ protocol: NotRequired[TargetProtocol]
169
169
  r"""Protocol to use when collecting metrics"""
170
170
  port: NotRequired[float]
171
171
  r"""The port number in the metrics URL for discovered targets."""
@@ -177,9 +177,7 @@ class Target(BaseModel):
177
177
  host: str
178
178
  r"""Name of host from which to pull metrics."""
179
179
 
180
- protocol: Optional[InputEdgePrometheusTargetProtocol] = (
181
- InputEdgePrometheusTargetProtocol.HTTP
182
- )
180
+ protocol: Optional[TargetProtocol] = TargetProtocol.HTTP
183
181
  r"""Protocol to use when collecting metrics"""
184
182
 
185
183
  port: Optional[float] = 9090
@@ -197,7 +195,7 @@ class InputEdgePrometheusRecordType(str, Enum):
197
195
  AAAA = "AAAA"
198
196
 
199
197
 
200
- class InputEdgePrometheusScrapeProtocolProtocol(str, Enum):
198
+ class ScrapeProtocolProtocol(str, Enum):
201
199
  r"""Protocol to use when collecting metrics"""
202
200
 
203
201
  HTTP = "http"
@@ -286,7 +284,7 @@ class InputEdgePrometheusTypedDict(TypedDict):
286
284
  r"""List of DNS names to resolve"""
287
285
  record_type: NotRequired[InputEdgePrometheusRecordType]
288
286
  r"""DNS Record type to resolve"""
289
- scrape_protocol: NotRequired[InputEdgePrometheusScrapeProtocolProtocol]
287
+ scrape_protocol: NotRequired[ScrapeProtocolProtocol]
290
288
  r"""Protocol to use when collecting metrics"""
291
289
  scrape_path: NotRequired[str]
292
290
  r"""Path to use when collecting metrics from discovered targets"""
@@ -410,9 +408,8 @@ class InputEdgePrometheus(BaseModel):
410
408
  r"""DNS Record type to resolve"""
411
409
 
412
410
  scrape_protocol: Annotated[
413
- Optional[InputEdgePrometheusScrapeProtocolProtocol],
414
- pydantic.Field(alias="scrapeProtocol"),
415
- ] = InputEdgePrometheusScrapeProtocolProtocol.HTTP
411
+ Optional[ScrapeProtocolProtocol], pydantic.Field(alias="scrapeProtocol")
412
+ ] = ScrapeProtocolProtocol.HTTP
416
413
  r"""Protocol to use when collecting metrics"""
417
414
 
418
415
  scrape_path: Annotated[Optional[str], pydantic.Field(alias="scrapePath")] = (
@@ -1194,12 +1194,12 @@ class InputGrafanaGrafana1(BaseModel):
1194
1194
  description: Optional[str] = None
1195
1195
 
1196
1196
 
1197
- InputGrafanaUnionTypedDict = TypeAliasType(
1198
- "InputGrafanaUnionTypedDict",
1197
+ InputGrafanaTypedDict = TypeAliasType(
1198
+ "InputGrafanaTypedDict",
1199
1199
  Union[InputGrafanaGrafana1TypedDict, InputGrafanaGrafana2TypedDict],
1200
1200
  )
1201
1201
 
1202
1202
 
1203
- InputGrafanaUnion = TypeAliasType(
1204
- "InputGrafanaUnion", Union[InputGrafanaGrafana1, InputGrafanaGrafana2]
1203
+ InputGrafana = TypeAliasType(
1204
+ "InputGrafana", Union[InputGrafanaGrafana1, InputGrafanaGrafana2]
1205
1205
  )
@@ -724,12 +724,12 @@ class InputSyslogSyslog1(BaseModel):
724
724
  r"""When enabled, parses PROXY protocol headers during the TLS handshake. Disable if compatibility issues arise."""
725
725
 
726
726
 
727
- InputSyslogUnionTypedDict = TypeAliasType(
728
- "InputSyslogUnionTypedDict",
727
+ InputSyslogTypedDict = TypeAliasType(
728
+ "InputSyslogTypedDict",
729
729
  Union[InputSyslogSyslog1TypedDict, InputSyslogSyslog2TypedDict],
730
730
  )
731
731
 
732
732
 
733
- InputSyslogUnion = TypeAliasType(
734
- "InputSyslogUnion", Union[InputSyslogSyslog1, InputSyslogSyslog2]
733
+ InputSyslog = TypeAliasType(
734
+ "InputSyslog", Union[InputSyslogSyslog1, InputSyslogSyslog2]
735
735
  )
@@ -200,13 +200,13 @@ class QueryBuilderMode(str, Enum):
200
200
  XML = "xml"
201
201
 
202
202
 
203
- class InputWefSubscriptionMetadatumTypedDict(TypedDict):
203
+ class SubscriptionMetadatumTypedDict(TypedDict):
204
204
  name: str
205
205
  value: str
206
206
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
207
207
 
208
208
 
209
- class InputWefSubscriptionMetadatum(BaseModel):
209
+ class SubscriptionMetadatum(BaseModel):
210
210
  name: str
211
211
 
212
212
  value: str
@@ -234,7 +234,7 @@ class SubscriptionTypedDict(TypedDict):
234
234
  locale: NotRequired[str]
235
235
  r"""The RFC-3066 locale the Windows clients should use when sending events. Defaults to \"en-US\"."""
236
236
  query_selector: NotRequired[QueryBuilderMode]
237
- metadata: NotRequired[List[InputWefSubscriptionMetadatumTypedDict]]
237
+ metadata: NotRequired[List[SubscriptionMetadatumTypedDict]]
238
238
  r"""Fields to add to events ingested under this subscription"""
239
239
 
240
240
 
@@ -280,7 +280,7 @@ class Subscription(BaseModel):
280
280
  Optional[QueryBuilderMode], pydantic.Field(alias="querySelector")
281
281
  ] = QueryBuilderMode.SIMPLE
282
282
 
283
- metadata: Optional[List[InputWefSubscriptionMetadatum]] = None
283
+ metadata: Optional[List[SubscriptionMetadatum]] = None
284
284
  r"""Fields to add to events ingested under this subscription"""
285
285
 
286
286
 
@@ -20,21 +20,21 @@ class OutputGooglePubsubGoogleAuthenticationMethod(str, Enum):
20
20
  SECRET = "secret"
21
21
 
22
22
 
23
- class OutputGooglePubsubFlushPeriodSecType(str, Enum):
23
+ class FlushPeriodSecType(str, Enum):
24
24
  NUMBER = "number"
25
25
 
26
26
 
27
27
  class FlushPeriodSecTypedDict(TypedDict):
28
28
  r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
29
29
 
30
- type: NotRequired[OutputGooglePubsubFlushPeriodSecType]
30
+ type: NotRequired[FlushPeriodSecType]
31
31
  default: NotRequired[float]
32
32
 
33
33
 
34
34
  class FlushPeriodSec(BaseModel):
35
35
  r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
36
36
 
37
- type: Optional[OutputGooglePubsubFlushPeriodSecType] = None
37
+ type: Optional[FlushPeriodSecType] = None
38
38
 
39
39
  default: Optional[float] = None
40
40
 
@@ -124,7 +124,7 @@ class OutputSplunkLbCompressCompression(str, Enum):
124
124
  ALWAYS = "always"
125
125
 
126
126
 
127
- class OutputSplunkLbAuthTokenAuthenticationMethod(str, Enum):
127
+ class IndexerDiscoveryConfigsAuthTokenAuthenticationMethod(str, Enum):
128
128
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
129
129
 
130
130
  MANUAL = "manual"
@@ -132,19 +132,19 @@ class OutputSplunkLbAuthTokenAuthenticationMethod(str, Enum):
132
132
 
133
133
 
134
134
  class OutputSplunkLbAuthTokenTypedDict(TypedDict):
135
- auth_type: NotRequired[OutputSplunkLbAuthTokenAuthenticationMethod]
135
+ auth_type: NotRequired[IndexerDiscoveryConfigsAuthTokenAuthenticationMethod]
136
136
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
137
137
 
138
138
 
139
139
  class OutputSplunkLbAuthToken(BaseModel):
140
140
  auth_type: Annotated[
141
- Optional[OutputSplunkLbAuthTokenAuthenticationMethod],
141
+ Optional[IndexerDiscoveryConfigsAuthTokenAuthenticationMethod],
142
142
  pydantic.Field(alias="authType"),
143
- ] = OutputSplunkLbAuthTokenAuthenticationMethod.MANUAL
143
+ ] = IndexerDiscoveryConfigsAuthTokenAuthenticationMethod.MANUAL
144
144
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
145
145
 
146
146
 
147
- class OutputSplunkLbIndexerDiscoveryConfigsAuthenticationMethod(str, Enum):
147
+ class IndexerDiscoveryConfigsAuthenticationMethod(str, Enum):
148
148
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
149
149
 
150
150
  MANUAL = "manual"
@@ -164,7 +164,7 @@ class IndexerDiscoveryConfigsTypedDict(TypedDict):
164
164
  r"""During indexer discovery, reject cluster manager certificates that are not authorized by the system's CA. Disable to allow untrusted (for example, self-signed) certificates."""
165
165
  auth_tokens: NotRequired[List[OutputSplunkLbAuthTokenTypedDict]]
166
166
  r"""Tokens required to authenticate to cluster manager for indexer discovery"""
167
- auth_type: NotRequired[OutputSplunkLbIndexerDiscoveryConfigsAuthenticationMethod]
167
+ auth_type: NotRequired[IndexerDiscoveryConfigsAuthenticationMethod]
168
168
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
169
169
  auth_token: NotRequired[str]
170
170
  r"""Shared secret to be provided by any client (in authToken header field). If empty, unauthorized access is permitted."""
@@ -197,9 +197,9 @@ class IndexerDiscoveryConfigs(BaseModel):
197
197
  r"""Tokens required to authenticate to cluster manager for indexer discovery"""
198
198
 
199
199
  auth_type: Annotated[
200
- Optional[OutputSplunkLbIndexerDiscoveryConfigsAuthenticationMethod],
200
+ Optional[IndexerDiscoveryConfigsAuthenticationMethod],
201
201
  pydantic.Field(alias="authType"),
202
- ] = OutputSplunkLbIndexerDiscoveryConfigsAuthenticationMethod.MANUAL
202
+ ] = IndexerDiscoveryConfigsAuthenticationMethod.MANUAL
203
203
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
204
204
 
205
205
  auth_token: Annotated[Optional[str], pydantic.Field(alias="authToken")] = ""
@@ -2,7 +2,6 @@
2
2
 
3
3
  from __future__ import annotations
4
4
  from .routesroute import RoutesRoute, RoutesRouteTypedDict
5
- from .routesroute_input import RoutesRouteInput, RoutesRouteInputTypedDict
6
5
  from cribl_control_plane.types import BaseModel
7
6
  import pydantic
8
7
  from pydantic import ConfigDict
@@ -72,26 +71,3 @@ class Routes(BaseModel):
72
71
 
73
72
  comments: Optional[List[Comment]] = None
74
73
  r"""Comments"""
75
-
76
-
77
- class RoutesInputTypedDict(TypedDict):
78
- routes: List[RoutesRouteInputTypedDict]
79
- r"""Pipeline routing rules"""
80
- id: NotRequired[str]
81
- r"""Routes ID"""
82
- groups: NotRequired[Dict[str, RoutesGroupsTypedDict]]
83
- comments: NotRequired[List[CommentTypedDict]]
84
- r"""Comments"""
85
-
86
-
87
- class RoutesInput(BaseModel):
88
- routes: List[RoutesRouteInput]
89
- r"""Pipeline routing rules"""
90
-
91
- id: Optional[str] = None
92
- r"""Routes ID"""
93
-
94
- groups: Optional[Dict[str, RoutesGroups]] = None
95
-
96
- comments: Optional[List[Comment]] = None
97
- r"""Comments"""
@@ -0,0 +1,48 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import FieldMetadata, PathParamMetadata, RequestMetadata
7
+ import pydantic
8
+ from typing import List, Optional
9
+ from typing_extensions import Annotated, NotRequired, TypedDict
10
+
11
+
12
+ class UpdateGroupsByIDRequestTypedDict(TypedDict):
13
+ id_param: str
14
+ r"""Group ID"""
15
+ config_group: ConfigGroupTypedDict
16
+ r"""ConfigGroup object"""
17
+
18
+
19
+ class UpdateGroupsByIDRequest(BaseModel):
20
+ id_param: Annotated[
21
+ str,
22
+ pydantic.Field(alias="id"),
23
+ FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
24
+ ]
25
+ r"""Group ID"""
26
+
27
+ config_group: Annotated[
28
+ ConfigGroup,
29
+ FieldMetadata(request=RequestMetadata(media_type="application/json")),
30
+ ]
31
+ r"""ConfigGroup object"""
32
+
33
+
34
+ class UpdateGroupsByIDResponseTypedDict(TypedDict):
35
+ r"""a list of ConfigGroup objects"""
36
+
37
+ count: NotRequired[int]
38
+ r"""number of items present in the items array"""
39
+ items: NotRequired[List[ConfigGroupTypedDict]]
40
+
41
+
42
+ class UpdateGroupsByIDResponse(BaseModel):
43
+ r"""a list of ConfigGroup objects"""
44
+
45
+ count: Optional[int] = None
46
+ r"""number of items present in the items array"""
47
+
48
+ items: Optional[List[ConfigGroup]] = None
@@ -12,7 +12,7 @@ class UpdateInputByIDRequestTypedDict(TypedDict):
12
12
  id: str
13
13
  r"""Unique ID to PATCH"""
14
14
  input: InputTypedDict
15
- r"""Source object to be updated"""
15
+ r"""Input object"""
16
16
 
17
17
 
18
18
  class UpdateInputByIDRequest(BaseModel):
@@ -24,7 +24,7 @@ class UpdateInputByIDRequest(BaseModel):
24
24
  input: Annotated[
25
25
  Input, FieldMetadata(request=RequestMetadata(media_type="application/json"))
26
26
  ]
27
- r"""Source object to be updated"""
27
+ r"""Input object"""
28
28
 
29
29
 
30
30
  class UpdateInputByIDResponseTypedDict(TypedDict):
@@ -12,7 +12,7 @@ class UpdateOutputByIDRequestTypedDict(TypedDict):
12
12
  id: str
13
13
  r"""Unique ID to PATCH"""
14
14
  output: OutputTypedDict
15
- r"""Destination object to be updated"""
15
+ r"""Output object"""
16
16
 
17
17
 
18
18
  class UpdateOutputByIDRequest(BaseModel):
@@ -24,7 +24,7 @@ class UpdateOutputByIDRequest(BaseModel):
24
24
  output: Annotated[
25
25
  Output, FieldMetadata(request=RequestMetadata(media_type="application/json"))
26
26
  ]
27
- r"""Destination object to be updated"""
27
+ r"""Output object"""
28
28
 
29
29
 
30
30
  class UpdateOutputByIDResponseTypedDict(TypedDict):
@@ -1,7 +1,7 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from .routes import Routes, RoutesInput, RoutesInputTypedDict, RoutesTypedDict
4
+ from .routes import Routes, RoutesTypedDict
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import FieldMetadata, PathParamMetadata, RequestMetadata
7
7
  import pydantic
@@ -12,7 +12,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict
12
12
  class UpdateRoutesByIDRequestTypedDict(TypedDict):
13
13
  id_param: str
14
14
  r"""Unique ID to PATCH"""
15
- routes: RoutesInputTypedDict
15
+ routes: RoutesTypedDict
16
16
  r"""Routes object to be updated"""
17
17
 
18
18
 
@@ -25,8 +25,7 @@ class UpdateRoutesByIDRequest(BaseModel):
25
25
  r"""Unique ID to PATCH"""
26
26
 
27
27
  routes: Annotated[
28
- RoutesInput,
29
- FieldMetadata(request=RequestMetadata(media_type="application/json")),
28
+ Routes, FieldMetadata(request=RequestMetadata(media_type="application/json"))
30
29
  ]
31
30
  r"""Routes object to be updated"""
32
31
 
@@ -448,210 +448,6 @@ class Packs(BaseSDK):
448
448
 
449
449
  raise errors.APIError("Unexpected response received", http_res)
450
450
 
451
- def update_packs(
452
- self,
453
- *,
454
- filename: str,
455
- size: int,
456
- file: Union[models.UpdatePacksFile, models.UpdatePacksFileTypedDict],
457
- retries: OptionalNullable[utils.RetryConfig] = UNSET,
458
- server_url: Optional[str] = None,
459
- timeout_ms: Optional[int] = None,
460
- http_headers: Optional[Mapping[str, str]] = None,
461
- ) -> models.UpdatePacksResponse:
462
- r"""Upload Pack
463
-
464
- Upload Pack
465
-
466
- :param filename: the file to upload
467
- :param size: Size of the pack file in bytes
468
- :param file: The pack file to upload
469
- :param retries: Override the default retry configuration for this method
470
- :param server_url: Override the default server URL for this method
471
- :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
472
- :param http_headers: Additional headers to set or replace on requests.
473
- """
474
- base_url = None
475
- url_variables = None
476
- if timeout_ms is None:
477
- timeout_ms = self.sdk_configuration.timeout_ms
478
-
479
- if server_url is not None:
480
- base_url = server_url
481
- else:
482
- base_url = self._get_url(base_url, url_variables)
483
-
484
- request = models.UpdatePacksRequest(
485
- filename=filename,
486
- size=size,
487
- request_body=models.UpdatePacksRequestBody(
488
- file=utils.get_pydantic_model(file, models.UpdatePacksFile),
489
- ),
490
- )
491
-
492
- req = self._build_request(
493
- method="PUT",
494
- path="/packs",
495
- base_url=base_url,
496
- url_variables=url_variables,
497
- request=request,
498
- request_body_required=True,
499
- request_has_path_params=False,
500
- request_has_query_params=True,
501
- user_agent_header="user-agent",
502
- accept_header_value="application/json",
503
- http_headers=http_headers,
504
- security=self.sdk_configuration.security,
505
- get_serialized_body=lambda: utils.serialize_request_body(
506
- request.request_body,
507
- False,
508
- False,
509
- "multipart",
510
- models.UpdatePacksRequestBody,
511
- ),
512
- timeout_ms=timeout_ms,
513
- )
514
-
515
- if retries == UNSET:
516
- if self.sdk_configuration.retry_config is not UNSET:
517
- retries = self.sdk_configuration.retry_config
518
-
519
- retry_config = None
520
- if isinstance(retries, utils.RetryConfig):
521
- retry_config = (retries, ["429", "500", "502", "503", "504"])
522
-
523
- http_res = self.do_request(
524
- hook_ctx=HookContext(
525
- config=self.sdk_configuration,
526
- base_url=base_url or "",
527
- operation_id="updatePacks",
528
- oauth2_scopes=[],
529
- security_source=get_security_from_env(
530
- self.sdk_configuration.security, models.Security
531
- ),
532
- ),
533
- request=req,
534
- error_status_codes=["401", "4XX", "500", "5XX"],
535
- retry_config=retry_config,
536
- )
537
-
538
- response_data: Any = None
539
- if utils.match_response(http_res, "200", "application/json"):
540
- return unmarshal_json_response(models.UpdatePacksResponse, http_res)
541
- if utils.match_response(http_res, "500", "application/json"):
542
- response_data = unmarshal_json_response(errors.ErrorData, http_res)
543
- raise errors.Error(response_data, http_res)
544
- if utils.match_response(http_res, ["401", "4XX"], "*"):
545
- http_res_text = utils.stream_to_text(http_res)
546
- raise errors.APIError("API error occurred", http_res, http_res_text)
547
- if utils.match_response(http_res, "5XX", "*"):
548
- http_res_text = utils.stream_to_text(http_res)
549
- raise errors.APIError("API error occurred", http_res, http_res_text)
550
-
551
- raise errors.APIError("Unexpected response received", http_res)
552
-
553
- async def update_packs_async(
554
- self,
555
- *,
556
- filename: str,
557
- size: int,
558
- file: Union[models.UpdatePacksFile, models.UpdatePacksFileTypedDict],
559
- retries: OptionalNullable[utils.RetryConfig] = UNSET,
560
- server_url: Optional[str] = None,
561
- timeout_ms: Optional[int] = None,
562
- http_headers: Optional[Mapping[str, str]] = None,
563
- ) -> models.UpdatePacksResponse:
564
- r"""Upload Pack
565
-
566
- Upload Pack
567
-
568
- :param filename: the file to upload
569
- :param size: Size of the pack file in bytes
570
- :param file: The pack file to upload
571
- :param retries: Override the default retry configuration for this method
572
- :param server_url: Override the default server URL for this method
573
- :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
574
- :param http_headers: Additional headers to set or replace on requests.
575
- """
576
- base_url = None
577
- url_variables = None
578
- if timeout_ms is None:
579
- timeout_ms = self.sdk_configuration.timeout_ms
580
-
581
- if server_url is not None:
582
- base_url = server_url
583
- else:
584
- base_url = self._get_url(base_url, url_variables)
585
-
586
- request = models.UpdatePacksRequest(
587
- filename=filename,
588
- size=size,
589
- request_body=models.UpdatePacksRequestBody(
590
- file=utils.get_pydantic_model(file, models.UpdatePacksFile),
591
- ),
592
- )
593
-
594
- req = self._build_request_async(
595
- method="PUT",
596
- path="/packs",
597
- base_url=base_url,
598
- url_variables=url_variables,
599
- request=request,
600
- request_body_required=True,
601
- request_has_path_params=False,
602
- request_has_query_params=True,
603
- user_agent_header="user-agent",
604
- accept_header_value="application/json",
605
- http_headers=http_headers,
606
- security=self.sdk_configuration.security,
607
- get_serialized_body=lambda: utils.serialize_request_body(
608
- request.request_body,
609
- False,
610
- False,
611
- "multipart",
612
- models.UpdatePacksRequestBody,
613
- ),
614
- timeout_ms=timeout_ms,
615
- )
616
-
617
- if retries == UNSET:
618
- if self.sdk_configuration.retry_config is not UNSET:
619
- retries = self.sdk_configuration.retry_config
620
-
621
- retry_config = None
622
- if isinstance(retries, utils.RetryConfig):
623
- retry_config = (retries, ["429", "500", "502", "503", "504"])
624
-
625
- http_res = await self.do_request_async(
626
- hook_ctx=HookContext(
627
- config=self.sdk_configuration,
628
- base_url=base_url or "",
629
- operation_id="updatePacks",
630
- oauth2_scopes=[],
631
- security_source=get_security_from_env(
632
- self.sdk_configuration.security, models.Security
633
- ),
634
- ),
635
- request=req,
636
- error_status_codes=["401", "4XX", "500", "5XX"],
637
- retry_config=retry_config,
638
- )
639
-
640
- response_data: Any = None
641
- if utils.match_response(http_res, "200", "application/json"):
642
- return unmarshal_json_response(models.UpdatePacksResponse, http_res)
643
- if utils.match_response(http_res, "500", "application/json"):
644
- response_data = unmarshal_json_response(errors.ErrorData, http_res)
645
- raise errors.Error(response_data, http_res)
646
- if utils.match_response(http_res, ["401", "4XX"], "*"):
647
- http_res_text = await utils.stream_to_text_async(http_res)
648
- raise errors.APIError("API error occurred", http_res, http_res_text)
649
- if utils.match_response(http_res, "5XX", "*"):
650
- http_res_text = await utils.stream_to_text_async(http_res)
651
- raise errors.APIError("API error occurred", http_res, http_res_text)
652
-
653
- raise errors.APIError("Unexpected response received", http_res)
654
-
655
451
  def delete_packs_by_id(
656
452
  self,
657
453
  *,