peak-sdk 1.7.0__py3-none-any.whl → 1.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- peak/_metadata.py +57 -57
- peak/_version.py +1 -1
- peak/callbacks.py +22 -2
- peak/cli/args.py +18 -0
- peak/cli/helpers.py +12 -8
- peak/cli/press/apps/deployments.py +72 -18
- peak/cli/press/apps/specs.py +27 -11
- peak/cli/press/blocks/deployments.py +71 -18
- peak/cli/press/blocks/specs.py +29 -13
- peak/cli/press/deployments.py +2 -4
- peak/cli/press/specs.py +2 -2
- peak/cli/resources/alerts/emails.py +4 -5
- peak/cli/resources/artifacts.py +9 -9
- peak/cli/resources/images.py +11 -12
- peak/cli/resources/services.py +6 -7
- peak/cli/resources/tenants.py +53 -4
- peak/cli/resources/users.py +3 -3
- peak/cli/resources/webapps.py +6 -6
- peak/cli/resources/workflows.py +12 -13
- peak/compression.py +28 -13
- peak/exceptions.py +15 -1
- peak/handler.py +5 -1
- peak/helpers.py +38 -0
- peak/output.py +13 -6
- peak/press/apps.py +33 -0
- peak/press/blocks.py +63 -0
- peak/press/deployments.py +0 -3
- peak/resources/tenants.py +37 -2
- peak/template.py +21 -2
- {peak_sdk-1.7.0.dist-info → peak_sdk-1.9.0.dist-info}/METADATA +5 -5
- {peak_sdk-1.7.0.dist-info → peak_sdk-1.9.0.dist-info}/RECORD +34 -34
- {peak_sdk-1.7.0.dist-info → peak_sdk-1.9.0.dist-info}/LICENSE +0 -0
- {peak_sdk-1.7.0.dist-info → peak_sdk-1.9.0.dist-info}/WHEEL +0 -0
- {peak_sdk-1.7.0.dist-info → peak_sdk-1.9.0.dist-info}/entry_points.txt +0 -0
peak/cli/resources/workflows.py
CHANGED
@@ -93,7 +93,7 @@ _COUNT = typer.Option(
|
|
93
93
|
)
|
94
94
|
|
95
95
|
|
96
|
-
@app.command(short_help="Create a new workflow."
|
96
|
+
@app.command(short_help="Create a new workflow.")
|
97
97
|
def create(
|
98
98
|
ctx: typer.Context,
|
99
99
|
file: str = args.TEMPLATE_PATH,
|
@@ -216,7 +216,7 @@ def create(
|
|
216
216
|
writer.write(response)
|
217
217
|
|
218
218
|
|
219
|
-
@app.command(short_help="Update an existing workflow."
|
219
|
+
@app.command(short_help="Update an existing workflow.")
|
220
220
|
def update(
|
221
221
|
ctx: typer.Context,
|
222
222
|
workflow_id: int = _WORKFLOW_ID,
|
@@ -342,7 +342,6 @@ def update(
|
|
342
342
|
|
343
343
|
@app.command(
|
344
344
|
short_help="Create a new workflow or Update an existing workflow.",
|
345
|
-
options_metavar="create_or_update_workflow",
|
346
345
|
)
|
347
346
|
def create_or_update(
|
348
347
|
ctx: typer.Context,
|
@@ -464,7 +463,7 @@ def create_or_update(
|
|
464
463
|
writer.write(response)
|
465
464
|
|
466
465
|
|
467
|
-
@app.command(short_help="Update required fields of an existing workflow."
|
466
|
+
@app.command(short_help="Update required fields of an existing workflow.")
|
468
467
|
def patch(
|
469
468
|
ctx: typer.Context,
|
470
469
|
workflow_id: int = _WORKFLOW_ID,
|
@@ -640,7 +639,7 @@ def patch(
|
|
640
639
|
writer.write(response)
|
641
640
|
|
642
641
|
|
643
|
-
@app.command("list", short_help="List workflows."
|
642
|
+
@app.command("list", short_help="List workflows.")
|
644
643
|
def list_workflows(
|
645
644
|
ctx: typer.Context,
|
646
645
|
page_size: Optional[int] = args.PAGE_SIZE,
|
@@ -690,7 +689,7 @@ def list_workflows(
|
|
690
689
|
writer.write(response)
|
691
690
|
|
692
691
|
|
693
|
-
@app.command(short_help="Describe details of a workflow."
|
692
|
+
@app.command(short_help="Describe details of a workflow.")
|
694
693
|
def describe(
|
695
694
|
ctx: typer.Context,
|
696
695
|
workflow_id: int = _WORKFLOW_ID,
|
@@ -734,7 +733,7 @@ def describe(
|
|
734
733
|
writer.write(response)
|
735
734
|
|
736
735
|
|
737
|
-
@app.command(short_help="Delete a workflow."
|
736
|
+
@app.command(short_help="Delete a workflow.")
|
738
737
|
def delete(
|
739
738
|
ctx: typer.Context,
|
740
739
|
workflow_id: int = _WORKFLOW_ID,
|
@@ -766,7 +765,7 @@ def delete(
|
|
766
765
|
writer.write(response)
|
767
766
|
|
768
767
|
|
769
|
-
@app.command(short_help="Start a workflow run."
|
768
|
+
@app.command(short_help="Start a workflow run.")
|
770
769
|
def execute(
|
771
770
|
ctx: typer.Context,
|
772
771
|
workflow_id: int = _WORKFLOW_ID,
|
@@ -828,7 +827,7 @@ def execute(
|
|
828
827
|
writer.write(response)
|
829
828
|
|
830
829
|
|
831
|
-
@app.command(short_help="List all available resources."
|
830
|
+
@app.command(short_help="List all available resources.")
|
832
831
|
def list_resources(
|
833
832
|
ctx: typer.Context,
|
834
833
|
paging: Optional[bool] = PAGING, # noqa: ARG001
|
@@ -866,7 +865,7 @@ def list_resources(
|
|
866
865
|
writer.write(response)
|
867
866
|
|
868
867
|
|
869
|
-
@app.command(short_help="List default resources."
|
868
|
+
@app.command(short_help="List default resources.")
|
870
869
|
def list_default_resources(
|
871
870
|
ctx: typer.Context,
|
872
871
|
paging: Optional[bool] = PAGING, # noqa: ARG001
|
@@ -899,7 +898,7 @@ def list_default_resources(
|
|
899
898
|
writer.write(response)
|
900
899
|
|
901
900
|
|
902
|
-
@app.command(short_help="List executions for the given workflow."
|
901
|
+
@app.command(short_help="List executions for the given workflow.")
|
903
902
|
def list_executions(
|
904
903
|
ctx: typer.Context,
|
905
904
|
workflow_id: int = _WORKFLOW_ID,
|
@@ -951,7 +950,7 @@ def list_executions(
|
|
951
950
|
writer.write(response)
|
952
951
|
|
953
952
|
|
954
|
-
@app.command(short_help="Get workflow execution logs."
|
953
|
+
@app.command(short_help="Get workflow execution logs.")
|
955
954
|
def get_execution_logs(
|
956
955
|
ctx: typer.Context,
|
957
956
|
workflow_id: int = _WORKFLOW_ID_OPTION,
|
@@ -1039,7 +1038,7 @@ def get_execution_logs(
|
|
1039
1038
|
next_token = response.get("nextToken", None)
|
1040
1039
|
|
1041
1040
|
|
1042
|
-
@app.command(short_help="Get workflow execution details."
|
1041
|
+
@app.command(short_help="Get workflow execution details.")
|
1043
1042
|
def get_execution_details(
|
1044
1043
|
ctx: typer.Context,
|
1045
1044
|
workflow_id: int = _WORKFLOW_ID_OPTION,
|
peak/compression.py
CHANGED
@@ -19,6 +19,7 @@
|
|
19
19
|
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
20
|
#
|
21
21
|
"""Compression module to create zip file to be used as artifact."""
|
22
|
+
|
22
23
|
from __future__ import annotations
|
23
24
|
|
24
25
|
import contextlib
|
@@ -26,7 +27,7 @@ import os
|
|
26
27
|
import tempfile
|
27
28
|
import zipfile
|
28
29
|
from pathlib import Path
|
29
|
-
from typing import Iterable, Iterator, Optional, Set
|
30
|
+
from typing import Any, Dict, Iterable, Iterator, Optional, Set
|
30
31
|
|
31
32
|
from pathspec import PathSpec
|
32
33
|
|
@@ -163,7 +164,7 @@ def _load_ignore_patterns(path_obj: Path, ignore_files: Optional[list[str]]) ->
|
|
163
164
|
|
164
165
|
|
165
166
|
def print_file_tree(files: Iterable[str]) -> None:
|
166
|
-
"""Prints list of files in tree format.
|
167
|
+
"""Prints list of files in tree format with specific limits per level.
|
167
168
|
|
168
169
|
Args:
|
169
170
|
files (list[str]): List of file paths
|
@@ -171,33 +172,47 @@ def print_file_tree(files: Iterable[str]) -> None:
|
|
171
172
|
writer = output.Writer(ignore_debug_mode=True)
|
172
173
|
files_dict = _build_files_dict(files)
|
173
174
|
|
174
|
-
|
175
|
+
limits = {1: 100, 2: 100, 3: 50}
|
176
|
+
default_limit = 25
|
177
|
+
|
178
|
+
def _print_tree(files_dict: Dict[str, Any], indent: str, level: int) -> None:
|
179
|
+
limit = limits.get(level, default_limit)
|
180
|
+
count = 0
|
181
|
+
|
182
|
+
for key, value in files_dict.items():
|
183
|
+
if isinstance(value, dict):
|
184
|
+
writer.write(f"{indent}{key}/")
|
185
|
+
new_indent = indent + ("| " if indent else "├── ")
|
186
|
+
_print_tree(value, new_indent, level + 1)
|
187
|
+
|
175
188
|
for key, value in files_dict.items():
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
189
|
+
if not isinstance(value, dict):
|
190
|
+
if count >= limit:
|
191
|
+
writer.write(f"{indent}...")
|
192
|
+
break
|
193
|
+
writer.write(f"{indent}{key}")
|
194
|
+
count += 1
|
180
195
|
|
181
|
-
_print_tree(files_dict, "")
|
196
|
+
_print_tree(files_dict, "", 1)
|
182
197
|
|
183
198
|
|
184
|
-
def _build_files_dict(files: Iterable[str]) ->
|
199
|
+
def _build_files_dict(files: Iterable[str]) -> Dict[str, Any]:
|
185
200
|
"""Builds a nested dictionary from list of files.
|
186
201
|
|
187
202
|
Args:
|
188
203
|
files (list[str]): List of file paths to process.
|
189
204
|
|
190
205
|
Returns:
|
191
|
-
dict[str,
|
206
|
+
dict[str, Any]: Nested dict file tree structure.
|
192
207
|
"""
|
193
|
-
files_dict:
|
208
|
+
files_dict: Dict[str, Any] = {}
|
194
209
|
for f in files:
|
195
210
|
components = Path(os.path.normpath(f)).parts
|
196
211
|
current_dir = files_dict
|
197
212
|
for directory in components[:-1]:
|
198
|
-
if directory not in current_dir:
|
213
|
+
if directory not in current_dir or current_dir[directory] is None:
|
199
214
|
current_dir[directory] = {}
|
200
215
|
current_dir = current_dir[directory]
|
201
216
|
if components[-1] not in current_dir:
|
202
|
-
current_dir[components[-1]] =
|
217
|
+
current_dir[components[-1]] = None
|
203
218
|
return files_dict
|
peak/exceptions.py
CHANGED
@@ -19,6 +19,7 @@
|
|
19
19
|
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
20
|
#
|
21
21
|
"""Exceptions for the Peak API."""
|
22
|
+
|
22
23
|
from __future__ import annotations
|
23
24
|
|
24
25
|
from collections import defaultdict
|
@@ -103,6 +104,17 @@ class PayloadTooLargeException(BaseHttpException):
|
|
103
104
|
|
104
105
|
STATUS_CODE = 413
|
105
106
|
|
107
|
+
def __init__(self, message: str = "") -> None:
|
108
|
+
"""Throw exception with custom message.
|
109
|
+
|
110
|
+
Args:
|
111
|
+
message (str): Additional message to add to exception.
|
112
|
+
"""
|
113
|
+
error_message: str = (
|
114
|
+
"Please use '--dry-run' with your command to preview the file tree and optimize the file contents."
|
115
|
+
)
|
116
|
+
super().__init__(f"{message}. {error_message}")
|
117
|
+
|
106
118
|
|
107
119
|
class UnprocessableEntityException(BaseHttpException):
|
108
120
|
"""The server understands the request, but it was unable to process the contained instructions."""
|
@@ -154,7 +166,9 @@ class FileLimitExceededException(PeakBaseException):
|
|
154
166
|
message (str): Additional message to add to exception.
|
155
167
|
units (str): Units of the maximum size.
|
156
168
|
"""
|
157
|
-
error_message: str =
|
169
|
+
error_message: str = (
|
170
|
+
f"Compressed directory size is over {max_size}{units}. Please use '--dry-run' with your command to preview the file tree and optimize the file contents."
|
171
|
+
)
|
158
172
|
super().__init__(f"{error_message} {message}")
|
159
173
|
|
160
174
|
|
peak/handler.py
CHANGED
@@ -19,6 +19,7 @@
|
|
19
19
|
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
20
|
#
|
21
21
|
"""Handler for sending requests to the API."""
|
22
|
+
|
22
23
|
from __future__ import annotations
|
23
24
|
|
24
25
|
import contextlib
|
@@ -35,7 +36,7 @@ from urllib3.util import Retry
|
|
35
36
|
import peak.config
|
36
37
|
from peak.compression import compress, get_files_to_include, print_file_tree
|
37
38
|
from peak.constants import ContentType, HttpMethods
|
38
|
-
from peak.exceptions import BaseHttpException
|
39
|
+
from peak.exceptions import BaseHttpException, PayloadTooLargeException
|
39
40
|
from peak.output import Writer
|
40
41
|
from peak.telemetry import telemetry
|
41
42
|
from peak.validators import check_file_size
|
@@ -200,6 +201,9 @@ class HandlerUtils(AuthRetrySession):
|
|
200
201
|
if 200 <= response.status_code < 300: # noqa: PLR2004
|
201
202
|
return response
|
202
203
|
|
204
|
+
if response.status_code == 413: # noqa: PLR2004
|
205
|
+
raise PayloadTooLargeException(response.json().get("detail", ""))
|
206
|
+
|
203
207
|
raise BaseHttpException.REGISTRY[response.status_code](response.json())
|
204
208
|
|
205
209
|
|
peak/helpers.py
CHANGED
@@ -20,10 +20,12 @@
|
|
20
20
|
#
|
21
21
|
|
22
22
|
"""Collection of basic helper functions."""
|
23
|
+
|
23
24
|
from __future__ import annotations
|
24
25
|
|
25
26
|
import inspect
|
26
27
|
import json
|
28
|
+
import re
|
27
29
|
from datetime import datetime, timezone
|
28
30
|
from types import FrameType
|
29
31
|
from typing import Any, Dict, List, Optional
|
@@ -205,6 +207,42 @@ def remove_none_values(data: Any) -> Dict[str, Any]:
|
|
205
207
|
return data # type: ignore[no-any-return]
|
206
208
|
|
207
209
|
|
210
|
+
def convert_to_snake_case(s: str) -> str:
|
211
|
+
"""Converts a given string from camelCase or TitleCase to snake case.
|
212
|
+
|
213
|
+
Args:
|
214
|
+
s (str): The string to be converted.
|
215
|
+
|
216
|
+
Returns:
|
217
|
+
str: The converted string in snake case.
|
218
|
+
"""
|
219
|
+
return re.sub(r"(?<!^)(?=[A-Z])", "_", s).lower()
|
220
|
+
|
221
|
+
|
222
|
+
def convert_keys_to_snake_case(data: Dict[str, Any], *, convert_nested: bool = False) -> Dict[str, Any]:
|
223
|
+
"""Converts keys of a dictionary to snake case.
|
224
|
+
|
225
|
+
Args:
|
226
|
+
data (Dict[str, Any]): Dictionary to be converted.
|
227
|
+
convert_nested (bool): Whether to convert nested keys as well. Default is False.
|
228
|
+
|
229
|
+
Returns:
|
230
|
+
Dict[str, Any]: Dictionary with keys converted to snake case.
|
231
|
+
"""
|
232
|
+
|
233
|
+
def convert_dict(d: Dict[str, Any]) -> Dict[str, Any]:
|
234
|
+
new_dict = {}
|
235
|
+
for k, v in d.items():
|
236
|
+
new_key = convert_to_snake_case(k)
|
237
|
+
if convert_nested and isinstance(v, dict):
|
238
|
+
new_dict[new_key] = convert_dict(v)
|
239
|
+
else:
|
240
|
+
new_dict[new_key] = v
|
241
|
+
return new_dict
|
242
|
+
|
243
|
+
return convert_dict(data)
|
244
|
+
|
245
|
+
|
208
246
|
def format_date(timestamp: str, time_format: str = "%Y/%m/%d %H:%M:%S") -> str:
|
209
247
|
"""Format a timestamp to a given format.
|
210
248
|
|
peak/output.py
CHANGED
@@ -56,6 +56,7 @@ class Writer:
|
|
56
56
|
self,
|
57
57
|
data: Any,
|
58
58
|
deprecation_message: str | None = None,
|
59
|
+
output_type: OutputTypes | None = None,
|
59
60
|
) -> None:
|
60
61
|
"""Write logs to the terminal.
|
61
62
|
|
@@ -63,27 +64,33 @@ class Writer:
|
|
63
64
|
data (Any): Data to be printed on the terminal.
|
64
65
|
This handles dry-run, debug mode and exit code for the CLI.
|
65
66
|
deprecation_message (str, optional): Deprecation message to be printed on the terminal.
|
67
|
+
output_type (OutputTypes, optional): Override for the output type set in the config.
|
66
68
|
"""
|
67
|
-
|
69
|
+
output_type_parsed = output_type or config.OUTPUT_TYPE
|
68
70
|
table_params = config.TABLE_PARAMS
|
69
71
|
|
70
72
|
if not config.DEBUG_MODE or self.ignore_debug_mode:
|
71
73
|
if deprecation_message:
|
72
74
|
self._print_deprecation_warning(deprecation_message)
|
73
|
-
if
|
75
|
+
if output_type_parsed == OutputTypes.yaml.value:
|
74
76
|
self.__yaml(data)
|
75
|
-
elif
|
77
|
+
elif output_type_parsed == OutputTypes.table.value:
|
76
78
|
self.__table(data, table_params)
|
77
79
|
else:
|
78
|
-
self.__json(data)
|
80
|
+
self.__json(data, output_type)
|
79
81
|
|
80
|
-
def __json(self, data: Any) -> None:
|
82
|
+
def __json(self, data: Any, output_type: OutputTypes | None = None) -> None:
|
81
83
|
"""Write logs to the terminal in JSON format.
|
82
84
|
|
83
85
|
Args:
|
84
86
|
data (Any): Data to be printed on the terminal.
|
87
|
+
output_type (OutputTypes): If passed, JSON parser would be used to print output
|
88
|
+
even if the data is not a dictionary.
|
85
89
|
"""
|
86
|
-
|
90
|
+
if isinstance(data, dict) or output_type == OutputTypes.json:
|
91
|
+
console.print_json(data=data)
|
92
|
+
else:
|
93
|
+
console.print(data)
|
87
94
|
|
88
95
|
def __table(self, data: dict[Any, Any], params: dict[str, Any]) -> None:
|
89
96
|
"""Write logs to the terminal in a tabular format.
|
peak/press/apps.py
CHANGED
@@ -709,6 +709,39 @@ class App(BaseClient):
|
|
709
709
|
subdomain="press",
|
710
710
|
)
|
711
711
|
|
712
|
+
def redeploy(self, deployment_id: str) -> Dict[str, Any]:
|
713
|
+
"""Redeploy latest revision of an existing App deployment.
|
714
|
+
|
715
|
+
This function allows you to redeploy an App deployment that is in a `failed` or `warning` state, provided at least one of its block deployments is also in a `failed` or `warning` state.
|
716
|
+
|
717
|
+
REFERENCE:
|
718
|
+
🔗 `API Documentation <https://press.peak.ai/api-docs/index.htm#/App%20Deployments/post_v1_apps_deployments__deploymentId__redeploy>`__
|
719
|
+
|
720
|
+
Args:
|
721
|
+
deployment_id (str): The ID of the App deployment to redeploy.
|
722
|
+
|
723
|
+
Returns:
|
724
|
+
Dict[str, Any]: A dictionary containing details of the deployment.
|
725
|
+
|
726
|
+
Raises:
|
727
|
+
BadRequestException: The given parameters are invalid.
|
728
|
+
UnauthorizedException: The credentials are invalid.
|
729
|
+
ForbiddenException: The user does not have permission to perform the operation.
|
730
|
+
NotFoundException: The given App deployment or its revisions do not exist.
|
731
|
+
ConflictException: There is a conflict with the current state of the target resource.
|
732
|
+
UnprocessableEntityException: The server was unable to process the request.
|
733
|
+
InternalServerErrorException: the server encountered an unexpected condition that
|
734
|
+
prevented it from fulfilling the request.
|
735
|
+
"""
|
736
|
+
method, endpoint = HttpMethods.POST, f"{self.DEPLOYMENTS_BASE_ENDPOINT}/{deployment_id}/redeploy"
|
737
|
+
|
738
|
+
return self.session.create_request( # type: ignore[no-any-return]
|
739
|
+
endpoint,
|
740
|
+
method,
|
741
|
+
content_type=ContentType.APPLICATION_JSON,
|
742
|
+
subdomain="press",
|
743
|
+
)
|
744
|
+
|
712
745
|
def delete_deployment(self, deployment_id: str) -> Dict[None, None]:
|
713
746
|
"""Deletes an App deployment.
|
714
747
|
|
peak/press/blocks.py
CHANGED
@@ -397,6 +397,8 @@ class Block(BaseClient):
|
|
397
397
|
}
|
398
398
|
|
399
399
|
SCHEMA(Parameters):
|
400
|
+
The valid types for parameters are `boolean`, `string`, `string_array`, `number`, `number_array`, `object` and `object_array`.
|
401
|
+
|
400
402
|
.. code-block:: json
|
401
403
|
|
402
404
|
{ "build": [
|
@@ -790,6 +792,35 @@ class Block(BaseClient):
|
|
790
792
|
}
|
791
793
|
}
|
792
794
|
|
795
|
+
SCHEMA(Parameters):
|
796
|
+
The valid types for parameters are `boolean`, `string`, `string_array`, `number`, `number_array`, `object` and `object_array`.
|
797
|
+
|
798
|
+
.. code-block:: json
|
799
|
+
|
800
|
+
{ "build": [
|
801
|
+
{
|
802
|
+
"defaultValue": "string(required)",
|
803
|
+
"description": "string",
|
804
|
+
"hideValue": "boolean",
|
805
|
+
"name": "string(required)",
|
806
|
+
"required": "boolean(required)",
|
807
|
+
"title": "string",
|
808
|
+
"type": "string(required)",
|
809
|
+
}
|
810
|
+
],
|
811
|
+
"run": [
|
812
|
+
{
|
813
|
+
"defaultValue": "string(required)",
|
814
|
+
"description": "string",
|
815
|
+
"hideValue": "boolean",
|
816
|
+
"name": "string(required)",
|
817
|
+
"required": "boolean(required)",
|
818
|
+
"title": "string",
|
819
|
+
"type": "string(required)",
|
820
|
+
}
|
821
|
+
]
|
822
|
+
}
|
823
|
+
|
793
824
|
Raises:
|
794
825
|
BadRequestException: The given parameters are invalid.
|
795
826
|
UnauthorizedException: The credentials are invalid.
|
@@ -1187,6 +1218,38 @@ class Block(BaseClient):
|
|
1187
1218
|
subdomain="press",
|
1188
1219
|
)
|
1189
1220
|
|
1221
|
+
def redeploy(self, deployment_id: str) -> Dict[str, Any]:
|
1222
|
+
"""Redeploy latest revision of an existing Block deployment.
|
1223
|
+
|
1224
|
+
This function allows you to redeploy a Block deployment that is in `failed` or `warning` state.
|
1225
|
+
|
1226
|
+
REFERENCE:
|
1227
|
+
🔗 `API Documentation <https://press.peak.ai/api-docs/index.htm#/Block%20Deployments/post_v1_blocks_deployments__deploymentId__redeploy>`__
|
1228
|
+
|
1229
|
+
Args:
|
1230
|
+
deployment_id (str): The ID of the Block deployment to redeploy.
|
1231
|
+
|
1232
|
+
Returns:
|
1233
|
+
Dict[str, Any]: A dictionary containing details of the deployment.
|
1234
|
+
|
1235
|
+
Raises:
|
1236
|
+
BadRequestException: The given parameters are invalid.
|
1237
|
+
ConflictException: There is a conflict with the current state of the target resource.
|
1238
|
+
ForbiddenException: The user does not have permission to perform the operation.
|
1239
|
+
InternalServerErrorException: The server encountered an unexpected condition that
|
1240
|
+
prevented it from fulfilling the request.
|
1241
|
+
NotFoundException: The given block deployment or its revisions do not exist.
|
1242
|
+
UnauthorizedException: The credentials are invalid.
|
1243
|
+
"""
|
1244
|
+
method, endpoint = HttpMethods.POST, f"{self.DEPLOYMENTS_BASE_ENDPOINT}/{deployment_id}/redeploy"
|
1245
|
+
|
1246
|
+
return self.session.create_request( # type: ignore[no-any-return]
|
1247
|
+
endpoint,
|
1248
|
+
method,
|
1249
|
+
content_type=ContentType.APPLICATION_JSON,
|
1250
|
+
subdomain="press",
|
1251
|
+
)
|
1252
|
+
|
1190
1253
|
def delete_deployment(self, deployment_id: str) -> Dict[None, None]:
|
1191
1254
|
"""Deletes the Block deployment.
|
1192
1255
|
|
peak/press/deployments.py
CHANGED
@@ -132,9 +132,6 @@ class Deployment(BaseClient):
|
|
132
132
|
def execute_resources(self, deployment_id: str) -> Dict[str, Any]:
|
133
133
|
"""Trigger the resources of an app or block deployment. This will execute the resources of the latest revision of the deployment.
|
134
134
|
|
135
|
-
REFERENCE:
|
136
|
-
🔗 `API Documentation <https://press.peak.ai/api-docs/index.htm#/Deployments/post_v1_deployments__deploymentId__trigger>`__
|
137
|
-
|
138
135
|
Args:
|
139
136
|
deployment_id (str): ID of the app or block deployment to trigger resources for.
|
140
137
|
|
peak/resources/tenants.py
CHANGED
@@ -31,7 +31,8 @@ from peak.session import Session
|
|
31
31
|
class Tenant(BaseClient):
|
32
32
|
"""Tenant client class."""
|
33
33
|
|
34
|
-
|
34
|
+
QUOTA_BASE_ENDPOINT = "quota/api/v1"
|
35
|
+
CONNECTIONS_BASE_ENDPOINT = "connections/api/v1"
|
35
36
|
|
36
37
|
def list_instance_options(
|
37
38
|
self,
|
@@ -56,7 +57,7 @@ class Tenant(BaseClient):
|
|
56
57
|
NotFoundException: The given image does not exist.
|
57
58
|
InternalServerErrorException: The server failed to process the request.
|
58
59
|
"""
|
59
|
-
method, endpoint = HttpMethods.GET, f"{self.
|
60
|
+
method, endpoint = HttpMethods.GET, f"{self.QUOTA_BASE_ENDPOINT}/settings/tenant-instance-options"
|
60
61
|
params = {"entityType": entity_type}
|
61
62
|
|
62
63
|
return self.session.create_request( # type: ignore[no-any-return]
|
@@ -66,6 +67,40 @@ class Tenant(BaseClient):
|
|
66
67
|
params=params,
|
67
68
|
)
|
68
69
|
|
70
|
+
def get_credentials(
|
71
|
+
self,
|
72
|
+
data_store_type: Optional[str] = None,
|
73
|
+
) -> Dict[str, Any]:
|
74
|
+
"""Retrieve credentials for a given data store type.
|
75
|
+
|
76
|
+
REFERENCE:
|
77
|
+
🔗 `API Documentation <https://service.peak.ai/connections/api-docs/index.htm#/connections/get_api_v1_connections_credentials>`__
|
78
|
+
|
79
|
+
Args:
|
80
|
+
data_store_type (str): The type of the data store.
|
81
|
+
Allowed values are - data-warehouse.
|
82
|
+
Default - data-warehouse
|
83
|
+
|
84
|
+
Returns:
|
85
|
+
Dict[str, Any]: a dictionary containing the credentials for the data store.
|
86
|
+
|
87
|
+
Raises:
|
88
|
+
BadRequestException: The given request parameters are invalid.
|
89
|
+
UnauthorizedException: The credentials are invalid.
|
90
|
+
ForbiddenException: The user does not have permission to perform the operation.
|
91
|
+
NotFoundException: The given image does not exist.
|
92
|
+
InternalServerErrorException: The server failed to process the request.
|
93
|
+
"""
|
94
|
+
method, endpoint = HttpMethods.GET, f"{self.CONNECTIONS_BASE_ENDPOINT}/connections/credentials"
|
95
|
+
params = {"type": data_store_type}
|
96
|
+
|
97
|
+
return self.session.create_request( # type: ignore[no-any-return]
|
98
|
+
endpoint,
|
99
|
+
method,
|
100
|
+
content_type=ContentType.APPLICATION_JSON,
|
101
|
+
params=params,
|
102
|
+
)
|
103
|
+
|
69
104
|
|
70
105
|
def get_client(session: Optional[Session] = None) -> Tenant:
|
71
106
|
"""Returns a Tenant client, If no session is provided, a default session is used.
|
peak/template.py
CHANGED
@@ -19,6 +19,7 @@
|
|
19
19
|
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
20
|
#
|
21
21
|
"""Template module which handles all things related to templates."""
|
22
|
+
|
22
23
|
from __future__ import annotations
|
23
24
|
|
24
25
|
import os
|
@@ -32,7 +33,7 @@ from jinja2 import Environment
|
|
32
33
|
from jinja2.ext import Extension
|
33
34
|
|
34
35
|
from peak import exceptions
|
35
|
-
from peak.helpers import remove_none_values
|
36
|
+
from peak.helpers import convert_keys_to_snake_case, remove_none_values
|
36
37
|
|
37
38
|
|
38
39
|
def _parse_jinja_template(template_path: Path, params: Dict[str, Any]) -> str:
|
@@ -53,6 +54,9 @@ def load_template(
|
|
53
54
|
file: Union[Path, str],
|
54
55
|
params: Optional[Dict[str, Any]] = None,
|
55
56
|
description: Optional[str] = None,
|
57
|
+
markdown_data: Optional[Dict[str, str]] = None,
|
58
|
+
*,
|
59
|
+
convert_to_snake_case: Optional[bool] = False,
|
56
60
|
) -> Dict[str, Any]:
|
57
61
|
"""Load a template file through `Jinja` into a dictionary.
|
58
62
|
|
@@ -66,7 +70,10 @@ def load_template(
|
|
66
70
|
Args:
|
67
71
|
file (Union[Path, str]): Path to the templated `YAML` file to be loaded.
|
68
72
|
params (Dict[str, Any] | None, optional): Named parameters to be passed to Jinja. Defaults to `{}`.
|
69
|
-
description (
|
73
|
+
description (str, optional): Description of press entities in markdown format. Defaults to `None`. Note that this parameter will soon be deprecated. Use `markdown_data` instead.
|
74
|
+
markdown_data (Dict[str, str] | None, optional): Dictionary containing the markdown data to be inserted into the template.
|
75
|
+
The key is a colon-separated string representing the nested key path (e.g., "body:metadata:description"), and the value is the markdown content.
|
76
|
+
convert_to_snake_case (Optional[bool], optional): Convert the keys of the dictionary to snake_case. Defaults to `False`.
|
70
77
|
|
71
78
|
Returns:
|
72
79
|
Dict[str, Any]: Dictionary containing the rendered YAML file
|
@@ -75,9 +82,21 @@ def load_template(
|
|
75
82
|
file = Path(file)
|
76
83
|
template: str = _parse_jinja_template(file, params)
|
77
84
|
parsed_data: Dict[str, Any] = yaml.safe_load(template)
|
85
|
+
|
86
|
+
if convert_to_snake_case:
|
87
|
+
parsed_data = convert_keys_to_snake_case(parsed_data)
|
88
|
+
|
78
89
|
if description:
|
79
90
|
parsed_data["body"]["metadata"]["description"] = description
|
80
91
|
|
92
|
+
if markdown_data:
|
93
|
+
for key, markdown_content in markdown_data.items():
|
94
|
+
keys = key.split(":")
|
95
|
+
target_dict = parsed_data
|
96
|
+
for k in keys[:-1]:
|
97
|
+
target_dict = target_dict.setdefault(k, {})
|
98
|
+
target_dict[keys[-1]] = markdown_content
|
99
|
+
|
81
100
|
return remove_none_values(parsed_data)
|
82
101
|
|
83
102
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: peak-sdk
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.9.0
|
4
4
|
Summary: Python SDK for interacting with the Peak platform
|
5
5
|
Home-page: https://docs.peak.ai/sdk/latest/
|
6
6
|
License: Apache-2.0
|
@@ -21,14 +21,14 @@ Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
21
21
|
Classifier: Typing :: Typed
|
22
22
|
Requires-Dist: certifi (>=2024.2.2)
|
23
23
|
Requires-Dist: jinja2 (>=3.1,<4.0)
|
24
|
-
Requires-Dist: orjson (>=3.
|
24
|
+
Requires-Dist: orjson (>=3.10,<4.0)
|
25
25
|
Requires-Dist: pathspec
|
26
26
|
Requires-Dist: pyyaml (>=6.0,<7.0)
|
27
|
-
Requires-Dist: requests (>=2.
|
27
|
+
Requires-Dist: requests (>=2.32,<3.0)
|
28
28
|
Requires-Dist: requests-toolbelt (>=1.0,<2.0)
|
29
29
|
Requires-Dist: shellingham (<1.5.4)
|
30
|
-
Requires-Dist: structlog (>=24.
|
31
|
-
Requires-Dist: typer
|
30
|
+
Requires-Dist: structlog (>=24.2.0,<25.0.0)
|
31
|
+
Requires-Dist: typer (>=0.12.3,<0.13.0)
|
32
32
|
Requires-Dist: urllib3 (<2)
|
33
33
|
Project-URL: Documentation, https://docs.peak.ai/sdk/latest/
|
34
34
|
Description-Content-Type: text/markdown
|