peak-sdk 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- peak/__init__.py +36 -0
- peak/_version.py +21 -0
- peak/auth.py +22 -0
- peak/base_client.py +52 -0
- peak/cli/__init_.py +20 -0
- peak/cli/args.py +84 -0
- peak/cli/cli.py +56 -0
- peak/cli/helpers.py +187 -0
- peak/cli/press/__init__.py +21 -0
- peak/cli/press/apps/__init__.py +40 -0
- peak/cli/press/apps/deployments.py +238 -0
- peak/cli/press/apps/specs.py +387 -0
- peak/cli/press/blocks/__init__.py +40 -0
- peak/cli/press/blocks/deployments.py +240 -0
- peak/cli/press/blocks/specs.py +492 -0
- peak/cli/press/deployments.py +78 -0
- peak/cli/press/specs.py +131 -0
- peak/cli/resources/__init__.py +21 -0
- peak/cli/resources/artifacts.py +310 -0
- peak/cli/resources/images.py +886 -0
- peak/cli/resources/webapps.py +356 -0
- peak/cli/resources/workflows.py +703 -0
- peak/cli/ruff.toml +11 -0
- peak/cli/version.py +49 -0
- peak/compression.py +162 -0
- peak/config.py +24 -0
- peak/constants.py +105 -0
- peak/exceptions.py +217 -0
- peak/handler.py +358 -0
- peak/helpers.py +184 -0
- peak/logger.py +48 -0
- peak/press/__init__.py +28 -0
- peak/press/apps.py +669 -0
- peak/press/blocks.py +707 -0
- peak/press/deployments.py +145 -0
- peak/press/specs.py +260 -0
- peak/py.typed +0 -0
- peak/resources/__init__.py +28 -0
- peak/resources/artifacts.py +343 -0
- peak/resources/images.py +675 -0
- peak/resources/webapps.py +278 -0
- peak/resources/workflows.py +625 -0
- peak/session.py +259 -0
- peak/telemetry.py +201 -0
- peak/template.py +231 -0
- peak/validators.py +48 -0
- peak_sdk-1.0.0.dist-info/LICENSE +201 -0
- peak_sdk-1.0.0.dist-info/METADATA +199 -0
- peak_sdk-1.0.0.dist-info/RECORD +51 -0
- peak_sdk-1.0.0.dist-info/WHEEL +4 -0
- peak_sdk-1.0.0.dist-info/entry_points.txt +3 -0
peak/handler.py
ADDED
@@ -0,0 +1,358 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Handler for sending requests to the API."""
|
22
|
+
from __future__ import annotations
|
23
|
+
|
24
|
+
import contextlib
|
25
|
+
from abc import ABC, ABCMeta, abstractmethod
|
26
|
+
from typing import Any, Callable, ClassVar, Dict, Iterator, List, Optional, Tuple, Type, TypeVar, Union
|
27
|
+
|
28
|
+
import requests
|
29
|
+
from requests.adapters import HTTPAdapter
|
30
|
+
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor, user_agent
|
31
|
+
from rich.progress import BarColumn, Progress, TaskProgressColumn, TextColumn
|
32
|
+
from urllib3.util import Retry
|
33
|
+
|
34
|
+
from peak.compression import compress
|
35
|
+
from peak.constants import ContentType, HttpMethods
|
36
|
+
from peak.exceptions import BaseHttpException
|
37
|
+
from peak.telemetry import telemetry
|
38
|
+
from peak.validators import check_file_size
|
39
|
+
|
40
|
+
from ._version import __version__
|
41
|
+
|
42
|
+
T = TypeVar("T")
|
43
|
+
Serializable = Dict[str, Union[str, int, float, bool]]
|
44
|
+
OptionalSerializable = Optional[Serializable]
|
45
|
+
|
46
|
+
|
47
|
+
class HandlerRegistryMeta(type):
|
48
|
+
"""Metaclass for registering all types of Handler classes."""
|
49
|
+
|
50
|
+
REGISTRY: Dict[ContentType, BaseHandler] = {}
|
51
|
+
|
52
|
+
def __new__(
|
53
|
+
cls: "Type[HandlerRegistryMeta]",
|
54
|
+
name: str,
|
55
|
+
bases: Tuple[Any, ...],
|
56
|
+
attrs: Dict[str, Any],
|
57
|
+
) -> HandlerRegistryMeta:
|
58
|
+
"""This method runs whenever a new class (that uses this class as its metaclass) is defined.
|
59
|
+
|
60
|
+
This method automatically adds the handler classes to its Registry.
|
61
|
+
It uses the `CONTENT_TYPE` attribute of the class as key and the class itself as value in registry.
|
62
|
+
Ref: https://charlesreid1.github.io/python-patterns-the-registry.html
|
63
|
+
|
64
|
+
Args:
|
65
|
+
name (str): Name of the child class
|
66
|
+
bases (tuple): Tuple of the child class's inheritance tree
|
67
|
+
attrs (dict): Name and value pairs of all the attributes defined in the child class
|
68
|
+
|
69
|
+
Returns:
|
70
|
+
HandlerRegistryMeta: the class itself, forward annotated for type checking
|
71
|
+
|
72
|
+
Raises:
|
73
|
+
TypeError: if the child class does not have a `CONTENT_TYPE` attribute
|
74
|
+
"""
|
75
|
+
error_invalid_content_type: str = f"Invalid content type for {name} handler"
|
76
|
+
new_cls: "HandlerRegistryMeta" = type.__new__(cls, name, bases, attrs)
|
77
|
+
|
78
|
+
content_type: Optional[ContentType] = attrs.get("CONTENT_TYPE", None)
|
79
|
+
try:
|
80
|
+
if content_type and ContentType(content_type):
|
81
|
+
cls.REGISTRY[content_type] = new_cls()
|
82
|
+
except ValueError as err:
|
83
|
+
raise TypeError(error_invalid_content_type) from err
|
84
|
+
else:
|
85
|
+
return new_cls
|
86
|
+
|
87
|
+
|
88
|
+
class _CombinedMeta(HandlerRegistryMeta, ABCMeta):
|
89
|
+
"""Utility class for combining multiple meta-classes."""
|
90
|
+
|
91
|
+
...
|
92
|
+
|
93
|
+
|
94
|
+
class AuthRetrySession(requests.Session):
|
95
|
+
"""Session with extra sugar attached."""
|
96
|
+
|
97
|
+
# used in testing, so we can modify the backoff_factor etc. to speed up the tests
|
98
|
+
_DEFAULT_RETRY_CONFIG: Dict[str, Any] = {
|
99
|
+
"backoff_factor": 2,
|
100
|
+
"total": 5,
|
101
|
+
"status_forcelist": [500, 502, 503, 504],
|
102
|
+
}
|
103
|
+
|
104
|
+
def _add_retries(self, retry_config: Optional[Dict[str, Any]] = None) -> None:
|
105
|
+
if retry_config is None:
|
106
|
+
retry_config = self._DEFAULT_RETRY_CONFIG
|
107
|
+
adapter = HTTPAdapter(max_retries=Retry(**retry_config))
|
108
|
+
self.mount("https://", adapter=adapter)
|
109
|
+
|
110
|
+
|
111
|
+
class HandlerUtils(AuthRetrySession):
|
112
|
+
"""Utility class for handling requests."""
|
113
|
+
|
114
|
+
@contextlib.contextmanager
|
115
|
+
def make_artifact(
|
116
|
+
self,
|
117
|
+
path: Optional[str],
|
118
|
+
body: Dict[str, Any],
|
119
|
+
ignore_files: Optional[list[str]],
|
120
|
+
) -> Iterator[MultipartEncoderMonitor]:
|
121
|
+
"""Create a multipart/form-data encoded file with given body and path as file.
|
122
|
+
|
123
|
+
Args:
|
124
|
+
path (Optional[str]): path to the file or folder that will be compressed and used as artifact
|
125
|
+
ignore_files(Optional[list[str]]): Ignore files to be used when creating artifact
|
126
|
+
body (Dict[str, Any]): body content to be sent with artifact
|
127
|
+
|
128
|
+
Yields:
|
129
|
+
MultipartEncoderMonitor: MultipartEncoderMonitor generator object
|
130
|
+
"""
|
131
|
+
if path:
|
132
|
+
with compress(path, ignore_files) as fh:
|
133
|
+
check_file_size(fh)
|
134
|
+
encoder = MultipartEncoder(
|
135
|
+
{
|
136
|
+
**body,
|
137
|
+
"artifact": (
|
138
|
+
"artifact.zip",
|
139
|
+
fh,
|
140
|
+
"application/zip",
|
141
|
+
),
|
142
|
+
},
|
143
|
+
)
|
144
|
+
callback = self._default_callback(encoder)
|
145
|
+
monitor = MultipartEncoderMonitor(encoder, callback)
|
146
|
+
yield monitor
|
147
|
+
else:
|
148
|
+
encoder = MultipartEncoder({**body})
|
149
|
+
monitor = MultipartEncoderMonitor(encoder)
|
150
|
+
yield monitor
|
151
|
+
|
152
|
+
make_artifact.__annotations__["return"] = contextlib.AbstractContextManager
|
153
|
+
|
154
|
+
def _default_callback(self, encoder: MultipartEncoder) -> Callable[[MultipartEncoderMonitor], None]:
|
155
|
+
progress = Progress(
|
156
|
+
TextColumn("[progress.description]{task.description}"),
|
157
|
+
BarColumn(),
|
158
|
+
TaskProgressColumn(),
|
159
|
+
)
|
160
|
+
progress.start()
|
161
|
+
bar = progress.add_task("[green]Uploading", total=encoder.len)
|
162
|
+
|
163
|
+
def callback(monitor: MultipartEncoderMonitor) -> None:
|
164
|
+
progress.update(bar, completed=monitor.bytes_read, refresh=True)
|
165
|
+
progress.refresh()
|
166
|
+
if monitor.bytes_read >= monitor.len:
|
167
|
+
progress.stop()
|
168
|
+
progress.console.clear_live()
|
169
|
+
|
170
|
+
return callback
|
171
|
+
|
172
|
+
@staticmethod
|
173
|
+
def parse_args(arguments: Dict[str, Any]) -> Dict[str, Any]:
|
174
|
+
"""Parse arguments dict and remove the parameters whose values are not provided.
|
175
|
+
|
176
|
+
Args:
|
177
|
+
arguments (Dict[str, Any]): dictionary of arguments
|
178
|
+
|
179
|
+
Returns:
|
180
|
+
Dict[str, Any]: filtered dictionary where value is not None
|
181
|
+
"""
|
182
|
+
return {k: v for k, v in arguments.items() if v is not None}
|
183
|
+
|
184
|
+
@staticmethod
|
185
|
+
def handle_response(response: requests.Response) -> requests.Response:
|
186
|
+
"""Handles the response from the API.
|
187
|
+
|
188
|
+
Args:
|
189
|
+
response (requests.Response): response object from the API
|
190
|
+
|
191
|
+
Returns:
|
192
|
+
requests.Response: response of the request.
|
193
|
+
|
194
|
+
# noqa: DAR401
|
195
|
+
Raises:
|
196
|
+
BaseHttpException: The HTTP exception based on status code
|
197
|
+
"""
|
198
|
+
if 200 <= response.status_code < 300: # noqa: PLR2004
|
199
|
+
return response
|
200
|
+
|
201
|
+
raise BaseHttpException.REGISTRY[response.status_code](response.json())
|
202
|
+
|
203
|
+
|
204
|
+
class BaseHandler(ABC, HandlerUtils, metaclass=_CombinedMeta):
|
205
|
+
"""Abstract base class for all handler type classes."""
|
206
|
+
|
207
|
+
CONTENT_TYPE: ClassVar[ContentType]
|
208
|
+
|
209
|
+
@abstractmethod
|
210
|
+
def handle(
|
211
|
+
self,
|
212
|
+
url: str,
|
213
|
+
method: HttpMethods,
|
214
|
+
*,
|
215
|
+
headers: Dict[str, str],
|
216
|
+
params: Optional[Dict[str, Any]],
|
217
|
+
body: Optional[Dict[str, Any]],
|
218
|
+
path: Optional[str],
|
219
|
+
ignore_files: Optional[list[str]],
|
220
|
+
request_kwargs: OptionalSerializable,
|
221
|
+
) -> requests.Response:
|
222
|
+
"""Placeholder handle method."""
|
223
|
+
...
|
224
|
+
|
225
|
+
|
226
|
+
class MultipartFormDataHandler(BaseHandler):
|
227
|
+
"""Handles requests with multipart/form-data content type."""
|
228
|
+
|
229
|
+
CONTENT_TYPE = ContentType.MULTIPART_FORM_DATA
|
230
|
+
|
231
|
+
def handle(
|
232
|
+
self,
|
233
|
+
url: str,
|
234
|
+
method: HttpMethods,
|
235
|
+
*,
|
236
|
+
path: Optional[str],
|
237
|
+
headers: Dict[str, str],
|
238
|
+
body: Optional[Dict[str, Any]],
|
239
|
+
params: Optional[Dict[str, Any]] = None, # noqa: ARG002
|
240
|
+
ignore_files: Optional[list[str]] = None,
|
241
|
+
request_kwargs: OptionalSerializable = None,
|
242
|
+
) -> requests.Response:
|
243
|
+
"""Handle multipart/form-data requests.
|
244
|
+
|
245
|
+
Args:
|
246
|
+
url (str): url to send the request to
|
247
|
+
method (HttpMethods): method to use for the request, e.g. get, post, put, delete
|
248
|
+
headers (Dict[str, str]): headers to send with the request
|
249
|
+
params (Dict[str, Any]): params to send to the request, not used for multipart/form-data
|
250
|
+
body (Dict[str, Any]): body to send to the request
|
251
|
+
path (Optional[str]): path to the file or folder that will be compressed and used as artifact
|
252
|
+
request_kwargs (OptionalSerializable): extra arguments to be passed when making the request, defaults to None
|
253
|
+
ignore_files(Optional[list[str]]): Ignore files to be used when creating artifact
|
254
|
+
|
255
|
+
Returns:
|
256
|
+
requests.Response: response of the request
|
257
|
+
"""
|
258
|
+
with self.make_artifact(path, self.parse_args(body or {}), ignore_files) as monitor:
|
259
|
+
headers = {**headers, "Content-Type": monitor.content_type}
|
260
|
+
response: Any = getattr(requests, method.value)(url, data=monitor, headers=headers, **request_kwargs)
|
261
|
+
return self.handle_response(response)
|
262
|
+
|
263
|
+
|
264
|
+
class ApplicationJsonHandler(BaseHandler):
|
265
|
+
"""Handles requests with application/json content type."""
|
266
|
+
|
267
|
+
CONTENT_TYPE = ContentType.APPLICATION_JSON
|
268
|
+
|
269
|
+
def handle(
|
270
|
+
self,
|
271
|
+
url: str,
|
272
|
+
method: HttpMethods,
|
273
|
+
*,
|
274
|
+
headers: Dict[str, str],
|
275
|
+
params: Optional[Dict[str, Any]],
|
276
|
+
body: Optional[Dict[str, Any]],
|
277
|
+
path: Optional[str] = None, # noqa: ARG002
|
278
|
+
ignore_files: Optional[list[str]] = None, # noqa: ARG002
|
279
|
+
request_kwargs: OptionalSerializable = None,
|
280
|
+
) -> requests.Response:
|
281
|
+
"""Handle application/json requests.
|
282
|
+
|
283
|
+
Args:
|
284
|
+
url (str): url to send the request to
|
285
|
+
method (HttpMethods): method to use for the request, e.g. get, post, put, delete
|
286
|
+
headers (Dict[str, str]): headers to send with the request
|
287
|
+
params (Dict[str, Any]): params to send to the request
|
288
|
+
body (Dict[str, Any]): body to send to the request
|
289
|
+
path (Optional[str]): path to the file or folder that will be compressed and used as artifact, not used in application/json handler
|
290
|
+
request_kwargs (OptionalSerializable): extra arguments to be passed when making the request, defaults to None
|
291
|
+
ignore_files(Optional[list[str]]): Ignore files to be used when creating artifact
|
292
|
+
|
293
|
+
Returns:
|
294
|
+
requests.Response: response of the request.
|
295
|
+
"""
|
296
|
+
headers = {**headers, "Content-Type": self.CONTENT_TYPE.value}
|
297
|
+
response: Any = getattr(requests, method.value)(
|
298
|
+
url,
|
299
|
+
params=self.parse_args(params or {}),
|
300
|
+
json=body,
|
301
|
+
headers=headers,
|
302
|
+
**request_kwargs,
|
303
|
+
)
|
304
|
+
return self.handle_response(response)
|
305
|
+
|
306
|
+
|
307
|
+
class Handler:
|
308
|
+
"""Handler class to handle requests to the API."""
|
309
|
+
|
310
|
+
USER_AGENT: str = user_agent(__package__ or __name__, __version__)
|
311
|
+
|
312
|
+
@telemetry
|
313
|
+
def make_request(
|
314
|
+
self,
|
315
|
+
url: str,
|
316
|
+
method: HttpMethods,
|
317
|
+
content_type: ContentType,
|
318
|
+
headers: Optional[Dict[str, str]] = None,
|
319
|
+
params: Optional[Dict[str, Any]] = None,
|
320
|
+
body: Optional[Dict[str, Any]] = None,
|
321
|
+
path: Optional[str] = None,
|
322
|
+
request_kwargs: Optional[Dict[str, int | bool | str | float]] = None,
|
323
|
+
ignore_files: Optional[list[str]] = None,
|
324
|
+
) -> requests.Response:
|
325
|
+
"""Redirects the request to the appropriate strategy based on the content type.
|
326
|
+
|
327
|
+
Args:
|
328
|
+
url (str): url to send the request to
|
329
|
+
method (HttpMethods): The HTTP method to use, e.g. get, post, put, delete
|
330
|
+
content_type (ContentType): content type of the request
|
331
|
+
headers (Dict[str, str]): headers to send with the request
|
332
|
+
params (Dict[str, Any]): params to send to the request
|
333
|
+
body (Dict[str, Any]): body to send to the request
|
334
|
+
path (Optional[str]): path to the file or folder that will be compressed and used as artifact, defaults to None
|
335
|
+
request_kwargs(Dict[str, int | bool | str | float] | None): extra arguments to be passed when making the request.
|
336
|
+
ignore_files(Optional[list[str]]): Ignore files to be used when creating artifact
|
337
|
+
|
338
|
+
Returns:
|
339
|
+
requests.Response: response json
|
340
|
+
"""
|
341
|
+
headers = {"User-Agent": self.USER_AGENT} if headers is None else {**headers, "User-Agent": self.USER_AGENT}
|
342
|
+
params = params or {}
|
343
|
+
body = body or {}
|
344
|
+
request_kwargs = request_kwargs or {}
|
345
|
+
|
346
|
+
return BaseHandler.REGISTRY[content_type].handle(
|
347
|
+
url=url,
|
348
|
+
method=method,
|
349
|
+
headers=headers,
|
350
|
+
params=params,
|
351
|
+
body=body,
|
352
|
+
path=path,
|
353
|
+
request_kwargs=request_kwargs,
|
354
|
+
ignore_files=ignore_files,
|
355
|
+
)
|
356
|
+
|
357
|
+
|
358
|
+
__all__: List[str] = ["Handler", "ApplicationJsonHandler", "MultipartFormDataHandler"]
|
peak/helpers.py
ADDED
@@ -0,0 +1,184 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
|
22
|
+
"""Collection of basic helper functions."""
|
23
|
+
from __future__ import annotations
|
24
|
+
|
25
|
+
import inspect
|
26
|
+
import json
|
27
|
+
from types import FrameType
|
28
|
+
from typing import Any, Dict, List, Optional
|
29
|
+
|
30
|
+
|
31
|
+
def parse_body_for_multipart_request(body: Dict[str, Any]) -> Dict[str, str]:
|
32
|
+
"""Parses an object to make it suitable for passing in a multipart request.
|
33
|
+
|
34
|
+
Args:
|
35
|
+
body (Dict[str, Any]): the object to be parsed
|
36
|
+
|
37
|
+
Returns:
|
38
|
+
Dict[str, str]: the parsed object
|
39
|
+
"""
|
40
|
+
return {key: (value if type(value) == str else json.dumps(value)) for (key, value) in body.items()}
|
41
|
+
|
42
|
+
|
43
|
+
def remove_keys(body: Dict[str, Any], keys: List[str]) -> Dict[str, Any]:
|
44
|
+
"""Removes given keys from a dictionary.
|
45
|
+
|
46
|
+
Args:
|
47
|
+
body (Dict[str, Any]): the object to be parsed
|
48
|
+
keys (List[str]): the keys to remove
|
49
|
+
|
50
|
+
Returns:
|
51
|
+
Dict[str, str]: the final object with required keys removed
|
52
|
+
"""
|
53
|
+
return {key: value for (key, value) in body.items() if key not in keys}
|
54
|
+
|
55
|
+
|
56
|
+
def get_base_domain(stage: str, subdomain: Optional[str] = "service") -> str:
|
57
|
+
"""Gets the base domain for a stage with the given subdomain.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
stage (str): the stage
|
61
|
+
subdomain (Optional[str]): the subdomain
|
62
|
+
|
63
|
+
Returns:
|
64
|
+
str: the final base domain
|
65
|
+
"""
|
66
|
+
if stage == "prod":
|
67
|
+
stage = ""
|
68
|
+
elif stage == "latest":
|
69
|
+
stage = "dev"
|
70
|
+
|
71
|
+
domain: str = f"https://{subdomain}.{stage}.peak.ai"
|
72
|
+
domain = domain.replace("..", ".") # for prod domain
|
73
|
+
return domain
|
74
|
+
|
75
|
+
|
76
|
+
def parse_list_of_strings(param: List[str] | None) -> List[str] | None:
|
77
|
+
"""Split comma separated strings in the list and flatten that list.
|
78
|
+
|
79
|
+
Args:
|
80
|
+
param (List[str] | None): List of strings
|
81
|
+
|
82
|
+
Returns:
|
83
|
+
List[Any] | None: The final flattened list
|
84
|
+
"""
|
85
|
+
if param is None or len(param) == 0:
|
86
|
+
return param
|
87
|
+
|
88
|
+
result: List[str] = []
|
89
|
+
for e in param:
|
90
|
+
result = result + e.split(",")
|
91
|
+
|
92
|
+
return result
|
93
|
+
|
94
|
+
|
95
|
+
def snake_case_to_lower_camel_case(snake_case_string: str) -> str:
|
96
|
+
"""Converts underscore string to lower camel case.
|
97
|
+
|
98
|
+
Args:
|
99
|
+
snake_case_string (str): string in underscore
|
100
|
+
|
101
|
+
Returns:
|
102
|
+
str: lower camel case string
|
103
|
+
"""
|
104
|
+
parts = snake_case_string.split("_")
|
105
|
+
return parts[0] + "".join(part.capitalize() for part in parts[1:])
|
106
|
+
|
107
|
+
|
108
|
+
def variables_to_dict(*args: Any, frame: FrameType | None = None) -> Dict[str, str]:
|
109
|
+
"""Converts arbitary variables to a dictonary.
|
110
|
+
|
111
|
+
Args:
|
112
|
+
args (str|int): tuple of string|int variables
|
113
|
+
frame (FrameType|None): Current Frame of caller
|
114
|
+
|
115
|
+
Returns:
|
116
|
+
Dict[str, str]: Dictionary containing key value pair of variables
|
117
|
+
"""
|
118
|
+
if frame is None:
|
119
|
+
frame = inspect.currentframe()
|
120
|
+
if frame:
|
121
|
+
# set frame to previous
|
122
|
+
frame = frame.f_back
|
123
|
+
|
124
|
+
var_dict = {}
|
125
|
+
if frame and frame.f_locals:
|
126
|
+
for var_name, var_value in frame.f_locals.items():
|
127
|
+
if var_value in args and var_value:
|
128
|
+
var_dict[snake_case_to_lower_camel_case(var_name)] = var_value
|
129
|
+
|
130
|
+
del frame # Explicitly release the frame to avoid reference cycles
|
131
|
+
return var_dict
|
132
|
+
|
133
|
+
|
134
|
+
def combine_dictionaries(
|
135
|
+
dict1: Dict[str, Any],
|
136
|
+
dict2: Dict[str, Any],
|
137
|
+
nested_keys_to_skip: Optional[List[str]] = [], # noqa: B006
|
138
|
+
) -> Dict[str, Any]:
|
139
|
+
"""Combines two dictonaries. Values for second dictonary have higer precedence.
|
140
|
+
|
141
|
+
Args:
|
142
|
+
dict1 (Dict[str, Any]): dictionary 1
|
143
|
+
dict2 (Dict[str, Any]): dictonary 2
|
144
|
+
nested_keys_to_skip (List[str] | None): Keys for which nested combining is not required.
|
145
|
+
|
146
|
+
Returns:
|
147
|
+
Dict[str, Any]: Combined dictionary
|
148
|
+
"""
|
149
|
+
if not dict1:
|
150
|
+
return dict2
|
151
|
+
|
152
|
+
combined_dict = dict(dict1)
|
153
|
+
for key in dict2:
|
154
|
+
if key in combined_dict and type(combined_dict[key]) is dict and key not in (nested_keys_to_skip or []):
|
155
|
+
combined_dict[key] = combine_dictionaries(combined_dict[key], dict2[key])
|
156
|
+
else:
|
157
|
+
combined_dict[key] = dict2[key]
|
158
|
+
return combined_dict
|
159
|
+
|
160
|
+
|
161
|
+
def map_user_options(
|
162
|
+
user_options: Dict[str, Any],
|
163
|
+
mapping: Dict[str, str],
|
164
|
+
dict_type_keys: List[str] = [], # noqa: B006
|
165
|
+
) -> Dict[str, Any]:
|
166
|
+
"""Maps user provided inputs to a specific format.
|
167
|
+
|
168
|
+
Args:
|
169
|
+
user_options (Dict[str, Any]): Dictionary containing user inputs
|
170
|
+
mapping (Dict[str, Any]): Mapping to be used for conversion
|
171
|
+
dict_type_keys (List[str]): List of keys which have json type values
|
172
|
+
|
173
|
+
Returns:
|
174
|
+
Dict[str, str]: Mappe dictionary
|
175
|
+
"""
|
176
|
+
result: Dict[str, Any] = {}
|
177
|
+
for key in user_options:
|
178
|
+
if key in mapping:
|
179
|
+
nested_dict = result[mapping[key]] if mapping[key] in result else {}
|
180
|
+
nested_dict[key] = json.loads(user_options[key]) if key in dict_type_keys else user_options[key]
|
181
|
+
result[mapping[key]] = nested_dict
|
182
|
+
else:
|
183
|
+
result[key] = json.loads(user_options[key]) if key in dict_type_keys else user_options[key]
|
184
|
+
return result
|
peak/logger.py
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Logger for the Peak SDK."""
|
22
|
+
from __future__ import annotations
|
23
|
+
|
24
|
+
import logging
|
25
|
+
import sys
|
26
|
+
from typing import List
|
27
|
+
|
28
|
+
from peak.constants import LOG_FORMAT, LOG_LEVELS
|
29
|
+
|
30
|
+
logger: logging.Logger = logging.getLogger("peak-sdk")
|
31
|
+
stream_handler = logging.StreamHandler(sys.stdout)
|
32
|
+
stream_handler.setFormatter(LOG_FORMAT)
|
33
|
+
logger.addHandler(stream_handler)
|
34
|
+
logger.propagate = False
|
35
|
+
|
36
|
+
|
37
|
+
def set_log_level(log_level: LOG_LEVELS) -> None:
|
38
|
+
"""Update log level for Peak logger.
|
39
|
+
|
40
|
+
Args:
|
41
|
+
log_level (LOG_LEVELS): new logging level for the logger.
|
42
|
+
"""
|
43
|
+
logger.setLevel(log_level)
|
44
|
+
for handler in logger.handlers:
|
45
|
+
handler.setLevel(log_level)
|
46
|
+
|
47
|
+
|
48
|
+
__all__: List[str] = ["logger"]
|
peak/press/__init__.py
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""This module exports all press services from the `Peak-Platform`."""
|
22
|
+
from __future__ import annotations
|
23
|
+
|
24
|
+
from typing import List
|
25
|
+
|
26
|
+
from peak.press import apps, blocks, deployments, specs
|
27
|
+
|
28
|
+
__all__: List[str] = ["apps", "blocks", "deployments", "specs"]
|