peak-sdk 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- peak/__init__.py +36 -0
- peak/_version.py +21 -0
- peak/auth.py +22 -0
- peak/base_client.py +52 -0
- peak/cli/__init_.py +20 -0
- peak/cli/args.py +84 -0
- peak/cli/cli.py +56 -0
- peak/cli/helpers.py +187 -0
- peak/cli/press/__init__.py +21 -0
- peak/cli/press/apps/__init__.py +40 -0
- peak/cli/press/apps/deployments.py +238 -0
- peak/cli/press/apps/specs.py +387 -0
- peak/cli/press/blocks/__init__.py +40 -0
- peak/cli/press/blocks/deployments.py +240 -0
- peak/cli/press/blocks/specs.py +492 -0
- peak/cli/press/deployments.py +78 -0
- peak/cli/press/specs.py +131 -0
- peak/cli/resources/__init__.py +21 -0
- peak/cli/resources/artifacts.py +310 -0
- peak/cli/resources/images.py +886 -0
- peak/cli/resources/webapps.py +356 -0
- peak/cli/resources/workflows.py +703 -0
- peak/cli/ruff.toml +11 -0
- peak/cli/version.py +49 -0
- peak/compression.py +162 -0
- peak/config.py +24 -0
- peak/constants.py +105 -0
- peak/exceptions.py +217 -0
- peak/handler.py +358 -0
- peak/helpers.py +184 -0
- peak/logger.py +48 -0
- peak/press/__init__.py +28 -0
- peak/press/apps.py +669 -0
- peak/press/blocks.py +707 -0
- peak/press/deployments.py +145 -0
- peak/press/specs.py +260 -0
- peak/py.typed +0 -0
- peak/resources/__init__.py +28 -0
- peak/resources/artifacts.py +343 -0
- peak/resources/images.py +675 -0
- peak/resources/webapps.py +278 -0
- peak/resources/workflows.py +625 -0
- peak/session.py +259 -0
- peak/telemetry.py +201 -0
- peak/template.py +231 -0
- peak/validators.py +48 -0
- peak_sdk-1.0.0.dist-info/LICENSE +201 -0
- peak_sdk-1.0.0.dist-info/METADATA +199 -0
- peak_sdk-1.0.0.dist-info/RECORD +51 -0
- peak_sdk-1.0.0.dist-info/WHEEL +4 -0
- peak_sdk-1.0.0.dist-info/entry_points.txt +3 -0
peak/cli/ruff.toml
ADDED
peak/cli/version.py
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Version command for peak-cli."""
|
22
|
+
import platform
|
23
|
+
import sys
|
24
|
+
|
25
|
+
import typer
|
26
|
+
from peak import __version__
|
27
|
+
from rich.console import Console
|
28
|
+
|
29
|
+
console = Console()
|
30
|
+
|
31
|
+
|
32
|
+
def display_version(print_version: bool) -> None:
|
33
|
+
"""Display version of this tool.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
print_version (bool): Print version and exit
|
37
|
+
|
38
|
+
Raises:
|
39
|
+
Exit: Exit the program
|
40
|
+
"""
|
41
|
+
if print_version:
|
42
|
+
peak_version = __version__
|
43
|
+
major = sys.version_info.major
|
44
|
+
minor = sys.version_info.minor
|
45
|
+
micro = sys.version_info.micro
|
46
|
+
python_version = f"Python=={major}.{minor}.{micro}"
|
47
|
+
platform_version = f"System=={platform.system()}({platform.release()})"
|
48
|
+
console.print(f"peak-cli=={peak_version}\n{python_version}\n{platform_version}")
|
49
|
+
raise typer.Exit()
|
peak/compression.py
ADDED
@@ -0,0 +1,162 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Compression module to create zip file to be used as artifact."""
|
22
|
+
from __future__ import annotations
|
23
|
+
|
24
|
+
import contextlib
|
25
|
+
import os
|
26
|
+
import tempfile
|
27
|
+
import zipfile
|
28
|
+
from pathlib import Path
|
29
|
+
from typing import Iterator, Optional, Set
|
30
|
+
|
31
|
+
from pathspec import PathSpec
|
32
|
+
|
33
|
+
from peak import constants, exceptions
|
34
|
+
|
35
|
+
ZIP_COMPRESSION = zipfile.ZIP_DEFLATED
|
36
|
+
|
37
|
+
|
38
|
+
@contextlib.contextmanager
|
39
|
+
def compress(path: str, ignore_files: Optional[list[str]] = None) -> Iterator[tempfile.SpooledTemporaryFile[bytes]]:
|
40
|
+
"""Creates compressed zip of the files in path.
|
41
|
+
|
42
|
+
Args:
|
43
|
+
path (str): Path of the folder to create zip file.
|
44
|
+
ignore_files (Optional[list[str]]): Ignorefiles to use when creating zip file.
|
45
|
+
|
46
|
+
Yields:
|
47
|
+
Iterator[tempfile.SpooledTemporaryFile[bytes]]: Bytes of the zip file in chunks.
|
48
|
+
|
49
|
+
Raises:
|
50
|
+
InvalidPathException: Given path is invalid and cannot be traversed.
|
51
|
+
FileLimitExceededException: Compressed directory size exceeded the max limits.
|
52
|
+
"""
|
53
|
+
path_obj = Path(path)
|
54
|
+
if not path_obj.is_dir():
|
55
|
+
raise exceptions.InvalidPathException(path, "Either the path does not exist or is not a directory.")
|
56
|
+
included_files = get_files_to_include(path, ignore_files)
|
57
|
+
parent_directories: Set[Path] = set()
|
58
|
+
|
59
|
+
with tempfile.SpooledTemporaryFile() as tmp_file:
|
60
|
+
with zipfile.ZipFile(tmp_file, "w", compression=ZIP_COMPRESSION) as zf:
|
61
|
+
for file in included_files:
|
62
|
+
zf.write(path_obj / file, file)
|
63
|
+
parent_directories.update(Path(file).parents)
|
64
|
+
if tmp_file.tell() > constants.MAX_ARTIFACT_SIZE_MB * constants.MB:
|
65
|
+
raise exceptions.FileLimitExceededException(constants.MAX_ARTIFACT_SIZE_MB)
|
66
|
+
|
67
|
+
# include directories as API backend need directories explicitly included
|
68
|
+
relative_root_path = Path(".")
|
69
|
+
if relative_root_path in parent_directories:
|
70
|
+
parent_directories.remove(relative_root_path)
|
71
|
+
for directory in parent_directories:
|
72
|
+
zf.write(path_obj / directory, directory)
|
73
|
+
|
74
|
+
tmp_file.seek(0)
|
75
|
+
yield tmp_file
|
76
|
+
|
77
|
+
|
78
|
+
def print_zip_content(zip_file: tempfile.SpooledTemporaryFile[bytes]) -> None:
|
79
|
+
"""Prints content of the zip file to stdout.
|
80
|
+
|
81
|
+
Args:
|
82
|
+
zip_file (BinaryIO): Opened zip file in binary format.
|
83
|
+
"""
|
84
|
+
with zipfile.ZipFile(zip_file, "r") as zf:
|
85
|
+
zf.printdir()
|
86
|
+
|
87
|
+
|
88
|
+
def get_files_to_include(path: str, ignore_files: Optional[list[str]] = None) -> Iterator[str]:
|
89
|
+
"""Get a list of file paths to be included.
|
90
|
+
|
91
|
+
Args:
|
92
|
+
path (str): Root path from where to start the traversal.
|
93
|
+
ignore_files (Optional[list[str]]): Path of ignorefiles to use, defaults to .dockerignore.
|
94
|
+
|
95
|
+
Yields:
|
96
|
+
Iterator[str]: Path of files to be included.
|
97
|
+
"""
|
98
|
+
path_obj = Path(path)
|
99
|
+
ignore_pathspec = _load_ignore_patterns(path_obj, ignore_files)
|
100
|
+
yield from ignore_pathspec.match_tree(path, follow_links=True)
|
101
|
+
|
102
|
+
|
103
|
+
def _reverse_pattern(pattern: str) -> str:
|
104
|
+
"""Converts the inclusion pattern to exclusion and vice-versa.
|
105
|
+
|
106
|
+
This is needed because PathSpec will return the files that matches the patterns
|
107
|
+
but we want the opposite, i.e. we want to exclude the files which match the
|
108
|
+
patterns in ignorefiles.
|
109
|
+
|
110
|
+
Args:
|
111
|
+
pattern (str): Pattern to process
|
112
|
+
|
113
|
+
Returns:
|
114
|
+
str: Processed pattern
|
115
|
+
"""
|
116
|
+
pattern = pattern.strip()
|
117
|
+
if not pattern or pattern.startswith("#"):
|
118
|
+
return pattern
|
119
|
+
if pattern.startswith("!"):
|
120
|
+
return pattern[1:]
|
121
|
+
return "!" + pattern
|
122
|
+
|
123
|
+
|
124
|
+
def _load_ignore_patterns(path_obj: Path, ignore_files: Optional[list[str]]) -> PathSpec:
|
125
|
+
"""Reads ignorefiles and loads all patterns into PathSpec.
|
126
|
+
|
127
|
+
Args:
|
128
|
+
path_obj (Path): Root path relative to which ignore_files would be searched.
|
129
|
+
ignore_files (list[str]): Path of ignorefiles relative to where the script is running
|
130
|
+
|
131
|
+
Returns:
|
132
|
+
PathSpec: PathSpec object with all patterns
|
133
|
+
|
134
|
+
Raises:
|
135
|
+
InvalidPathException: Given path is invalid and cannot be traversed.
|
136
|
+
"""
|
137
|
+
if ignore_files is None or len(ignore_files) == 0:
|
138
|
+
# use .dockerignore as default if no ignore file is provided
|
139
|
+
default_ignore = path_obj / ".dockerignore"
|
140
|
+
ignore_files = [os.fspath(default_ignore)] if default_ignore.exists() else []
|
141
|
+
|
142
|
+
all_patterns: list[str] = [
|
143
|
+
"*",
|
144
|
+
] # include everything, apply ignore patterns on top of it, if this is given nothing will be included
|
145
|
+
for ignore_file in ignore_files:
|
146
|
+
ignore_file_path = path_obj / ignore_file
|
147
|
+
normalized_ignore_path = ignore_file_path.resolve().relative_to(path_obj.resolve())
|
148
|
+
# ignorefiles should only be at root level
|
149
|
+
if len(normalized_ignore_path.parents) != 1:
|
150
|
+
raise exceptions.InvalidPathException(
|
151
|
+
ignore_file,
|
152
|
+
"Ignore file should be present at root level of given path.",
|
153
|
+
)
|
154
|
+
|
155
|
+
patterns = ignore_file_path.open("r").read().splitlines()
|
156
|
+
all_patterns.extend(
|
157
|
+
map(
|
158
|
+
_reverse_pattern,
|
159
|
+
patterns,
|
160
|
+
),
|
161
|
+
)
|
162
|
+
return PathSpec.from_lines("gitwildmatch", all_patterns)
|
peak/config.py
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Basic Configuration details for the SDK/CLI."""
|
22
|
+
from peak.constants import Sources
|
23
|
+
|
24
|
+
SOURCE = Sources.SDK
|
peak/constants.py
ADDED
@@ -0,0 +1,105 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Contains constants which are used across SDK modules."""
|
22
|
+
from __future__ import annotations
|
23
|
+
|
24
|
+
from enum import Enum, auto
|
25
|
+
from logging import Formatter
|
26
|
+
from typing import Any, List, Literal, TypedDict
|
27
|
+
|
28
|
+
MB = 2**20
|
29
|
+
MAX_ARTIFACT_SIZE_MB: int = 10
|
30
|
+
DOWNLOAD_CHUNK_SIZE = 128
|
31
|
+
|
32
|
+
LOG_FORMAT = Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s")
|
33
|
+
LOG_LEVELS = Literal["CRITICAL", "FATAL", "ERROR", "WARNING", "WARN", "INFO", "DEBUG"]
|
34
|
+
|
35
|
+
|
36
|
+
class _ArtifactGlob(TypedDict, total=False):
|
37
|
+
ignore_files: list[str]
|
38
|
+
|
39
|
+
|
40
|
+
class ArtifactInfo(_ArtifactGlob):
|
41
|
+
"""TypedDict with all required fields for artifact creation.
|
42
|
+
|
43
|
+
path: Path to the file or folder that will be compressed and used as artifact.
|
44
|
+
ignore_files: Ignore files to be used when creating artifact.
|
45
|
+
"""
|
46
|
+
|
47
|
+
path: str
|
48
|
+
|
49
|
+
|
50
|
+
class AutoName(Enum):
|
51
|
+
"""Enum with automatic name() values."""
|
52
|
+
|
53
|
+
@staticmethod
|
54
|
+
def _generate_next_value_(name: str, start: int, count: int, last_values: List[Any]) -> str: # noqa: ARG004
|
55
|
+
"""Automatically generate enum values from names."""
|
56
|
+
return str(name).lower()
|
57
|
+
|
58
|
+
|
59
|
+
class Stage(AutoName):
|
60
|
+
"""Enum of all supported platform stages."""
|
61
|
+
|
62
|
+
DEV = auto()
|
63
|
+
LATEST = auto()
|
64
|
+
TEST = auto()
|
65
|
+
BETA = auto()
|
66
|
+
PROD = auto()
|
67
|
+
PARVATI = auto()
|
68
|
+
|
69
|
+
|
70
|
+
class ContentType(Enum):
|
71
|
+
"""Enum of supported content type for http request to API."""
|
72
|
+
|
73
|
+
APPLICATION_JSON = "application/json"
|
74
|
+
MULTIPART_FORM_DATA = "multipart/form-data"
|
75
|
+
|
76
|
+
|
77
|
+
class HttpMethods(AutoName):
|
78
|
+
"""Enum of supported HTTP methods."""
|
79
|
+
|
80
|
+
GET = auto()
|
81
|
+
POST = auto()
|
82
|
+
PUT = auto()
|
83
|
+
PATCH = auto()
|
84
|
+
DELETE = auto()
|
85
|
+
|
86
|
+
|
87
|
+
class Sources(AutoName):
|
88
|
+
"""Enum of the sources for telemetry call."""
|
89
|
+
|
90
|
+
SDK = auto()
|
91
|
+
CLI = auto()
|
92
|
+
|
93
|
+
|
94
|
+
__all__: List[str] = [
|
95
|
+
"MB",
|
96
|
+
"MAX_ARTIFACT_SIZE_MB",
|
97
|
+
"DOWNLOAD_CHUNK_SIZE",
|
98
|
+
"LOG_FORMAT",
|
99
|
+
"ArtifactInfo",
|
100
|
+
"Stage",
|
101
|
+
"ContentType",
|
102
|
+
"HttpMethods",
|
103
|
+
"LOG_LEVELS",
|
104
|
+
"Sources",
|
105
|
+
]
|
peak/exceptions.py
ADDED
@@ -0,0 +1,217 @@
|
|
1
|
+
#
|
2
|
+
# # Copyright © 2023 Peak AI Limited. or its affiliates. All Rights Reserved.
|
3
|
+
# #
|
4
|
+
# # Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# # may not use this file except in compliance with the License. A copy of
|
6
|
+
# # the License is located at:
|
7
|
+
# #
|
8
|
+
# # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
|
9
|
+
# #
|
10
|
+
# # or in the "license" file accompanying this file. This file is
|
11
|
+
# # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# # ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# # language governing permissions and limitations under the License.
|
14
|
+
# #
|
15
|
+
# # This file is part of the peak-sdk.
|
16
|
+
# # see (https://github.com/PeakBI/peak-sdk)
|
17
|
+
# #
|
18
|
+
# # You should have received a copy of the APACHE LICENSE, VERSION 2.0
|
19
|
+
# # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
|
20
|
+
#
|
21
|
+
"""Exceptions for the Peak API."""
|
22
|
+
from __future__ import annotations
|
23
|
+
|
24
|
+
from collections import defaultdict
|
25
|
+
from pathlib import Path
|
26
|
+
from typing import Any, ClassVar, Dict, List, Optional, Tuple, Type
|
27
|
+
|
28
|
+
|
29
|
+
class PeakBaseException(Exception):
|
30
|
+
"""Base exception class for the Peak SDK."""
|
31
|
+
|
32
|
+
...
|
33
|
+
|
34
|
+
|
35
|
+
class HttpExceptionsRegistryMeta(type):
|
36
|
+
"""Registry metaclass."""
|
37
|
+
|
38
|
+
REGISTRY: Dict[int, Any] = defaultdict(lambda: Exception)
|
39
|
+
|
40
|
+
def __new__(
|
41
|
+
cls: "Type[HttpExceptionsRegistryMeta]",
|
42
|
+
name: str,
|
43
|
+
bases: Tuple[Any, ...],
|
44
|
+
attrs: Dict[str, Any],
|
45
|
+
) -> HttpExceptionsRegistryMeta:
|
46
|
+
"""This method runs whenever a new class (that uses this class as its metaclass) is defined.
|
47
|
+
|
48
|
+
This method automatically adds the exception classes to its Registry.
|
49
|
+
It uses the `STATUS_CODE` attribute of the class as key and the class itself as value in registry.
|
50
|
+
Ref: https://charlesreid1.github.io/python-patterns-the-registry.html
|
51
|
+
|
52
|
+
Args:
|
53
|
+
name (str): Name of the child class
|
54
|
+
bases (tuple): Tuple of the child class's inheritance tree
|
55
|
+
attrs (dict): Name and value pairs of all the attributes defined in the child class
|
56
|
+
|
57
|
+
Returns:
|
58
|
+
HttpExceptionsRegistryMeta: the child class itself, forward annotated for type checking
|
59
|
+
"""
|
60
|
+
new_cls: "HttpExceptionsRegistryMeta" = type.__new__(cls, name, bases, attrs)
|
61
|
+
status_code: Optional[int] = attrs.get("STATUS_CODE", None)
|
62
|
+
if status_code:
|
63
|
+
cls.REGISTRY[status_code] = new_cls
|
64
|
+
return new_cls
|
65
|
+
|
66
|
+
|
67
|
+
class BaseHttpException(PeakBaseException, metaclass=HttpExceptionsRegistryMeta):
|
68
|
+
"""Base registry class for registering all exceptions."""
|
69
|
+
|
70
|
+
STATUS_CODE: ClassVar[int]
|
71
|
+
|
72
|
+
|
73
|
+
class BadRequestException(BaseHttpException):
|
74
|
+
"""The provided inputs are invalid."""
|
75
|
+
|
76
|
+
STATUS_CODE = 400
|
77
|
+
|
78
|
+
|
79
|
+
class UnauthorizedException(BaseHttpException):
|
80
|
+
"""The authentication credentials are invalid or expired."""
|
81
|
+
|
82
|
+
STATUS_CODE = 401
|
83
|
+
|
84
|
+
|
85
|
+
class ForbiddenException(BaseHttpException):
|
86
|
+
"""User does not have permissions to perform the operation."""
|
87
|
+
|
88
|
+
STATUS_CODE = 403
|
89
|
+
|
90
|
+
|
91
|
+
class NotFoundException(BaseHttpException):
|
92
|
+
"""Resource does not exist."""
|
93
|
+
|
94
|
+
STATUS_CODE = 404
|
95
|
+
|
96
|
+
|
97
|
+
class ConflictException(BaseHttpException):
|
98
|
+
"""There is a conflict with the current state of the target resource."""
|
99
|
+
|
100
|
+
STATUS_CODE = 409
|
101
|
+
|
102
|
+
|
103
|
+
class PayloadTooLargeException(BaseHttpException):
|
104
|
+
"""The provided file size is larger than the maximum limits."""
|
105
|
+
|
106
|
+
STATUS_CODE = 413
|
107
|
+
|
108
|
+
|
109
|
+
class UnprocessableEntityException(BaseHttpException):
|
110
|
+
"""The server understands the request, but it was unable to process the contained instructions."""
|
111
|
+
|
112
|
+
STATUS_CODE = 422
|
113
|
+
|
114
|
+
|
115
|
+
class InternalServerErrorException(BaseHttpException):
|
116
|
+
"""The server encountered an unexpected condition that prevented it from fulfilling the request."""
|
117
|
+
|
118
|
+
STATUS_CODE = 500
|
119
|
+
|
120
|
+
|
121
|
+
class InvalidPathException(PeakBaseException):
|
122
|
+
"""The provided path is invalid and cannot be processed."""
|
123
|
+
|
124
|
+
def __init__(self, path: str | Path, message: str = "") -> None:
|
125
|
+
"""Throw exception with custom message.
|
126
|
+
|
127
|
+
Args:
|
128
|
+
path (str | Path): Path which is invalid.
|
129
|
+
message (str): Any extra message to add to exception.
|
130
|
+
"""
|
131
|
+
super().__init__(f"Invalid path: {path!r}. {message}")
|
132
|
+
|
133
|
+
|
134
|
+
class MissingEnvironmentVariableException(PeakBaseException):
|
135
|
+
"""Required environment variable not found."""
|
136
|
+
|
137
|
+
def __init__(self, env_var: str, *, message: str = "") -> None:
|
138
|
+
"""Throw exception with custom message.
|
139
|
+
|
140
|
+
Args:
|
141
|
+
env_var (str): Name of env variable which is not present.
|
142
|
+
message (str): Any extra message to add to exception.
|
143
|
+
"""
|
144
|
+
error_message: str = f"{env_var} environment variable is not set or is empty."
|
145
|
+
super().__init__(f"{error_message} {message}")
|
146
|
+
|
147
|
+
|
148
|
+
class FileLimitExceededException(PeakBaseException):
|
149
|
+
"""Limits on the file are exceeded."""
|
150
|
+
|
151
|
+
def __init__(self, max_size: int | float, *, message: str = "", units: str = "MB") -> None:
|
152
|
+
"""Throw exception with custom message.
|
153
|
+
|
154
|
+
Args:
|
155
|
+
max_size (int): Maximum size of the file.
|
156
|
+
message (str): Additional message to add to exception.
|
157
|
+
units (str): Units of the maximum size.
|
158
|
+
"""
|
159
|
+
error_message: str = f"Compressed directory size is over {max_size}{units}."
|
160
|
+
super().__init__(f"{error_message} {message}")
|
161
|
+
|
162
|
+
|
163
|
+
class InvalidTemplateException(PeakBaseException):
|
164
|
+
"""The given template is invalid and could not be compiled."""
|
165
|
+
|
166
|
+
def __init__(self, message: str) -> None:
|
167
|
+
"""Throw exception with custom message.
|
168
|
+
|
169
|
+
Args:
|
170
|
+
message (str): Message of the exception.
|
171
|
+
"""
|
172
|
+
super().__init__(message)
|
173
|
+
|
174
|
+
|
175
|
+
class BadParameterException(PeakBaseException):
|
176
|
+
"""Raises exception for invalid parameters."""
|
177
|
+
|
178
|
+
def __init__(self, param: str, *, message: str = "") -> None:
|
179
|
+
"""Throw exception with custom message.
|
180
|
+
|
181
|
+
Args:
|
182
|
+
param (str): Raw parameter input from the CLI.
|
183
|
+
message (str): Additional message to add to exception.
|
184
|
+
"""
|
185
|
+
error_message: str = f"Unable to parse: {param}"
|
186
|
+
super().__init__(f"{error_message} {message}")
|
187
|
+
|
188
|
+
|
189
|
+
class InvalidParameterException(PeakBaseException):
|
190
|
+
"""Raises exception for invalid parameters."""
|
191
|
+
|
192
|
+
def __init__(self, *, message: str = "") -> None:
|
193
|
+
"""Throw exception with custom message.
|
194
|
+
|
195
|
+
Args:
|
196
|
+
message (str): Additional message to add to exception.
|
197
|
+
"""
|
198
|
+
super().__init__(message)
|
199
|
+
|
200
|
+
|
201
|
+
__all__: List[str] = [
|
202
|
+
"HttpExceptionsRegistryMeta",
|
203
|
+
"BaseHttpException",
|
204
|
+
"BadRequestException",
|
205
|
+
"UnauthorizedException",
|
206
|
+
"ForbiddenException",
|
207
|
+
"NotFoundException",
|
208
|
+
"ConflictException",
|
209
|
+
"PayloadTooLargeException",
|
210
|
+
"UnprocessableEntityException",
|
211
|
+
"InternalServerErrorException",
|
212
|
+
"InvalidPathException",
|
213
|
+
"MissingEnvironmentVariableException",
|
214
|
+
"FileLimitExceededException",
|
215
|
+
"BadParameterException",
|
216
|
+
"InvalidParameterException",
|
217
|
+
]
|