kleinkram 0.38.1.dev20241212075157__py3-none-any.whl → 0.38.1.dev20250207122632__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kleinkram might be problematic. Click here for more details.
- kleinkram/__init__.py +33 -2
- kleinkram/api/client.py +21 -16
- kleinkram/api/deser.py +165 -0
- kleinkram/api/file_transfer.py +13 -24
- kleinkram/api/pagination.py +56 -0
- kleinkram/api/query.py +111 -0
- kleinkram/api/routes.py +266 -97
- kleinkram/auth.py +21 -20
- kleinkram/cli/__init__.py +0 -0
- kleinkram/{commands/download.py → cli/_download.py} +18 -44
- kleinkram/cli/_endpoint.py +58 -0
- kleinkram/{commands/list.py → cli/_list.py} +25 -38
- kleinkram/cli/_mission.py +153 -0
- kleinkram/cli/_project.py +99 -0
- kleinkram/cli/_upload.py +84 -0
- kleinkram/cli/_verify.py +56 -0
- kleinkram/{app.py → cli/app.py} +57 -25
- kleinkram/cli/error_handling.py +67 -0
- kleinkram/config.py +141 -107
- kleinkram/core.py +251 -3
- kleinkram/errors.py +13 -45
- kleinkram/main.py +1 -1
- kleinkram/models.py +48 -149
- kleinkram/printing.py +325 -0
- kleinkram/py.typed +0 -0
- kleinkram/types.py +9 -0
- kleinkram/utils.py +88 -29
- kleinkram/wrappers.py +401 -0
- {kleinkram-0.38.1.dev20241212075157.dist-info → kleinkram-0.38.1.dev20250207122632.dist-info}/METADATA +3 -3
- kleinkram-0.38.1.dev20250207122632.dist-info/RECORD +49 -0
- {kleinkram-0.38.1.dev20241212075157.dist-info → kleinkram-0.38.1.dev20250207122632.dist-info}/WHEEL +1 -1
- {kleinkram-0.38.1.dev20241212075157.dist-info → kleinkram-0.38.1.dev20250207122632.dist-info}/top_level.txt +1 -0
- testing/__init__.py +0 -0
- testing/backend_fixtures.py +67 -0
- tests/conftest.py +7 -0
- tests/test_config.py +115 -0
- tests/test_core.py +165 -0
- tests/test_end_to_end.py +29 -39
- tests/test_error_handling.py +44 -0
- tests/test_fixtures.py +34 -0
- tests/test_printing.py +62 -0
- tests/test_query.py +138 -0
- tests/test_utils.py +46 -24
- tests/test_wrappers.py +71 -0
- kleinkram/api/parsing.py +0 -86
- kleinkram/commands/__init__.py +0 -1
- kleinkram/commands/endpoint.py +0 -62
- kleinkram/commands/mission.py +0 -69
- kleinkram/commands/project.py +0 -24
- kleinkram/commands/upload.py +0 -164
- kleinkram/commands/verify.py +0 -142
- kleinkram/consts.py +0 -8
- kleinkram/enums.py +0 -10
- kleinkram/resources.py +0 -158
- kleinkram-0.38.1.dev20241212075157.dist-info/LICENSE +0 -674
- kleinkram-0.38.1.dev20241212075157.dist-info/RECORD +0 -37
- tests/test_resources.py +0 -137
- {kleinkram-0.38.1.dev20241212075157.dist-info → kleinkram-0.38.1.dev20250207122632.dist-info}/entry_points.txt +0 -0
kleinkram/__init__.py
CHANGED
|
@@ -1,6 +1,37 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from kleinkram._version import __version__
|
|
4
|
+
from kleinkram.wrappers import create_mission
|
|
5
|
+
from kleinkram.wrappers import create_project
|
|
6
|
+
from kleinkram.wrappers import delete_file
|
|
7
|
+
from kleinkram.wrappers import delete_files
|
|
8
|
+
from kleinkram.wrappers import delete_mission
|
|
9
|
+
from kleinkram.wrappers import delete_project
|
|
10
|
+
from kleinkram.wrappers import download
|
|
11
|
+
from kleinkram.wrappers import list_files
|
|
12
|
+
from kleinkram.wrappers import list_missions
|
|
13
|
+
from kleinkram.wrappers import list_projects
|
|
14
|
+
from kleinkram.wrappers import update_file
|
|
15
|
+
from kleinkram.wrappers import update_mission
|
|
16
|
+
from kleinkram.wrappers import update_project
|
|
17
|
+
from kleinkram.wrappers import upload
|
|
18
|
+
from kleinkram.wrappers import verify
|
|
4
19
|
|
|
5
|
-
|
|
6
|
-
|
|
20
|
+
__all__ = [
|
|
21
|
+
"__version__",
|
|
22
|
+
"upload",
|
|
23
|
+
"verify",
|
|
24
|
+
"download",
|
|
25
|
+
"list_files",
|
|
26
|
+
"list_missions",
|
|
27
|
+
"list_projects",
|
|
28
|
+
"update_file",
|
|
29
|
+
"update_mission",
|
|
30
|
+
"update_project",
|
|
31
|
+
"delete_files",
|
|
32
|
+
"delete_file",
|
|
33
|
+
"delete_mission",
|
|
34
|
+
"delete_project",
|
|
35
|
+
"create_mission",
|
|
36
|
+
"create_project",
|
|
37
|
+
]
|
kleinkram/api/client.py
CHANGED
|
@@ -5,8 +5,11 @@ from threading import Lock
|
|
|
5
5
|
from typing import Any
|
|
6
6
|
|
|
7
7
|
import httpx
|
|
8
|
-
|
|
8
|
+
|
|
9
|
+
from kleinkram.config import Config
|
|
9
10
|
from kleinkram.config import Credentials
|
|
11
|
+
from kleinkram.config import get_config
|
|
12
|
+
from kleinkram.config import save_config
|
|
10
13
|
from kleinkram.errors import NotAuthenticated
|
|
11
14
|
|
|
12
15
|
logger = logging.getLogger(__name__)
|
|
@@ -27,27 +30,28 @@ class AuthenticatedClient(httpx.Client):
|
|
|
27
30
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
28
31
|
super().__init__(*args, **kwargs)
|
|
29
32
|
|
|
30
|
-
self._config =
|
|
33
|
+
self._config = get_config()
|
|
31
34
|
self._config_lock = Lock()
|
|
32
35
|
|
|
33
|
-
if self._config.
|
|
34
|
-
assert self._config.cli_key, "unreachable"
|
|
35
|
-
logger.info("using cli key...")
|
|
36
|
-
self.cookies.set(COOKIE_CLI_KEY, self._config.cli_key)
|
|
37
|
-
|
|
38
|
-
elif self._config.has_refresh_token:
|
|
39
|
-
logger.info("using refresh token...")
|
|
40
|
-
assert self._config.auth_token is not None, "unreachable"
|
|
41
|
-
self.cookies.set(COOKIE_AUTH_TOKEN, self._config.auth_token)
|
|
42
|
-
else:
|
|
36
|
+
if self._config.credentials is None:
|
|
43
37
|
logger.info("not authenticated...")
|
|
44
38
|
raise NotAuthenticated
|
|
39
|
+
elif (cli_key := self._config.credentials.cli_key) is not None:
|
|
40
|
+
logger.info("using cli key...")
|
|
41
|
+
self.cookies.set(COOKIE_CLI_KEY, cli_key)
|
|
42
|
+
else:
|
|
43
|
+
logger.info("using refresh token...")
|
|
44
|
+
assert self._config.credentials.auth_token is not None, "unreachable"
|
|
45
|
+
self.cookies.set(COOKIE_AUTH_TOKEN, self._config.credentials.auth_token)
|
|
45
46
|
|
|
46
47
|
def _refresh_token(self) -> None:
|
|
47
|
-
if self._config.
|
|
48
|
+
if self._config.credentials is None:
|
|
49
|
+
raise NotAuthenticated
|
|
50
|
+
|
|
51
|
+
if self._config.credentials.cli_key is not None:
|
|
48
52
|
raise RuntimeError("cannot refresh token when using cli key auth")
|
|
49
53
|
|
|
50
|
-
refresh_token = self._config.refresh_token
|
|
54
|
+
refresh_token = self._config.credentials.refresh_token
|
|
51
55
|
if refresh_token is None:
|
|
52
56
|
raise RuntimeError("no refresh token found")
|
|
53
57
|
self.cookies.set(COOKIE_REFRESH_TOKEN, refresh_token)
|
|
@@ -63,7 +67,8 @@ class AuthenticatedClient(httpx.Client):
|
|
|
63
67
|
logger.info("saving new tokens...")
|
|
64
68
|
|
|
65
69
|
with self._config_lock:
|
|
66
|
-
self._config.
|
|
70
|
+
self._config.credentials = creds
|
|
71
|
+
save_config(self._config)
|
|
67
72
|
|
|
68
73
|
self.cookies.set(COOKIE_AUTH_TOKEN, new_access_token)
|
|
69
74
|
|
|
@@ -76,7 +81,7 @@ class AuthenticatedClient(httpx.Client):
|
|
|
76
81
|
url = f"/{url}"
|
|
77
82
|
|
|
78
83
|
# try to do a request
|
|
79
|
-
full_url = f"{self._config.endpoint}{url}"
|
|
84
|
+
full_url = f"{self._config.endpoint.api}{url}"
|
|
80
85
|
logger.info(f"requesting {method} {full_url}")
|
|
81
86
|
response = super().request(method, full_url, *args, **kwargs)
|
|
82
87
|
|
kleinkram/api/deser.py
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import Any
|
|
6
|
+
from typing import Dict
|
|
7
|
+
from typing import Literal
|
|
8
|
+
from typing import NewType
|
|
9
|
+
from typing import Tuple
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
|
|
12
|
+
import dateutil.parser
|
|
13
|
+
|
|
14
|
+
from kleinkram.errors import ParsingError
|
|
15
|
+
from kleinkram.models import File
|
|
16
|
+
from kleinkram.models import FileState
|
|
17
|
+
from kleinkram.models import Mission
|
|
18
|
+
from kleinkram.models import Project
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"_parse_project",
|
|
22
|
+
"_parse_mission",
|
|
23
|
+
"_parse_file",
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
ProjectObject = NewType("ProjectObject", Dict[str, Any])
|
|
28
|
+
MissionObject = NewType("MissionObject", Dict[str, Any])
|
|
29
|
+
FileObject = NewType("FileObject", Dict[str, Any])
|
|
30
|
+
|
|
31
|
+
PROJECT_OBJECT_KEYS = []
|
|
32
|
+
MISSION_OBJECT_KEYS = []
|
|
33
|
+
|
|
34
|
+
MISSION = "mission"
|
|
35
|
+
PROJECT = "project"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class FileObjectKeys(str, Enum):
|
|
39
|
+
UUID = "uuid"
|
|
40
|
+
FILENAME = "filename"
|
|
41
|
+
DATE = "date" # at some point this will become a metadata
|
|
42
|
+
CREATED_AT = "createdAt"
|
|
43
|
+
UPDATED_AT = "updatedAt"
|
|
44
|
+
STATE = "state"
|
|
45
|
+
SIZE = "size"
|
|
46
|
+
HASH = "hash"
|
|
47
|
+
TYPE = "type"
|
|
48
|
+
CATEGORIES = "categories"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class MissionObjectKeys(str, Enum):
|
|
52
|
+
UUID = "uuid"
|
|
53
|
+
NAME = "name"
|
|
54
|
+
DESCRIPTION = "description"
|
|
55
|
+
CREATED_AT = "createdAt"
|
|
56
|
+
UPDATED_AT = "updatedAt"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class ProjectObjectKeys(str, Enum):
|
|
60
|
+
UUID = "uuid"
|
|
61
|
+
NAME = "name"
|
|
62
|
+
DESCRIPTION = "description"
|
|
63
|
+
CREATED_AT = "createdAt"
|
|
64
|
+
UPDATED_AT = "updatedAt"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _get_nested_info(data, key: Literal["mission", "project"]) -> Tuple[UUID, str]:
|
|
68
|
+
nested_data = data[key]
|
|
69
|
+
return (
|
|
70
|
+
UUID(nested_data[ProjectObjectKeys.UUID], version=4),
|
|
71
|
+
nested_data[ProjectObjectKeys.NAME],
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _parse_datetime(date: str) -> datetime:
|
|
76
|
+
try:
|
|
77
|
+
return dateutil.parser.isoparse(date)
|
|
78
|
+
except ValueError as e:
|
|
79
|
+
raise ParsingError(f"error parsing date: {date}") from e
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _parse_file_state(state: str) -> FileState:
|
|
83
|
+
try:
|
|
84
|
+
return FileState(state)
|
|
85
|
+
except ValueError as e:
|
|
86
|
+
raise ParsingError(f"error parsing file state: {state}") from e
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _parse_project(project_object: ProjectObject) -> Project:
|
|
90
|
+
try:
|
|
91
|
+
id_ = UUID(project_object[ProjectObjectKeys.UUID], version=4)
|
|
92
|
+
name = project_object[ProjectObjectKeys.NAME]
|
|
93
|
+
description = project_object[ProjectObjectKeys.DESCRIPTION]
|
|
94
|
+
created_at = _parse_datetime(project_object[ProjectObjectKeys.CREATED_AT])
|
|
95
|
+
updated_at = _parse_datetime(project_object[ProjectObjectKeys.UPDATED_AT])
|
|
96
|
+
except Exception as e:
|
|
97
|
+
raise ParsingError(f"error parsing project: {project_object}") from e
|
|
98
|
+
return Project(
|
|
99
|
+
id=id_,
|
|
100
|
+
name=name,
|
|
101
|
+
description=description,
|
|
102
|
+
created_at=created_at,
|
|
103
|
+
updated_at=updated_at,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _parse_mission(mission: MissionObject) -> Mission:
|
|
108
|
+
try:
|
|
109
|
+
id_ = UUID(mission[MissionObjectKeys.UUID], version=4)
|
|
110
|
+
name = mission[MissionObjectKeys.NAME]
|
|
111
|
+
created_at = _parse_datetime(mission[MissionObjectKeys.CREATED_AT])
|
|
112
|
+
updated_at = _parse_datetime(mission[MissionObjectKeys.UPDATED_AT])
|
|
113
|
+
metadata = {} # TODO: this crap is really bad to parse
|
|
114
|
+
|
|
115
|
+
project_id, project_name = _get_nested_info(mission, PROJECT)
|
|
116
|
+
|
|
117
|
+
parsed = Mission(
|
|
118
|
+
id=id_,
|
|
119
|
+
name=name,
|
|
120
|
+
created_at=created_at,
|
|
121
|
+
updated_at=updated_at,
|
|
122
|
+
metadata=metadata,
|
|
123
|
+
project_id=project_id,
|
|
124
|
+
project_name=project_name,
|
|
125
|
+
)
|
|
126
|
+
except Exception as e:
|
|
127
|
+
raise ParsingError(f"error parsing mission: {mission}") from e
|
|
128
|
+
return parsed
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _parse_file(file: FileObject) -> File:
|
|
132
|
+
try:
|
|
133
|
+
name = file[FileObjectKeys.FILENAME]
|
|
134
|
+
id_ = UUID(file[FileObjectKeys.UUID], version=4)
|
|
135
|
+
fsize = file[FileObjectKeys.SIZE]
|
|
136
|
+
fhash = file[FileObjectKeys.HASH]
|
|
137
|
+
ftype = file[FileObjectKeys.TYPE].split(".")[-1]
|
|
138
|
+
fdate = file[FileObjectKeys.DATE]
|
|
139
|
+
created_at = _parse_datetime(file[FileObjectKeys.CREATED_AT])
|
|
140
|
+
updated_at = _parse_datetime(file[FileObjectKeys.UPDATED_AT])
|
|
141
|
+
state = _parse_file_state(file[FileObjectKeys.STATE])
|
|
142
|
+
categories = file[FileObjectKeys.CATEGORIES]
|
|
143
|
+
|
|
144
|
+
mission_id, mission_name = _get_nested_info(file, MISSION)
|
|
145
|
+
project_id, project_name = _get_nested_info(file[MISSION], PROJECT)
|
|
146
|
+
|
|
147
|
+
parsed = File(
|
|
148
|
+
id=id_,
|
|
149
|
+
name=name,
|
|
150
|
+
hash=fhash,
|
|
151
|
+
size=fsize,
|
|
152
|
+
type_=ftype,
|
|
153
|
+
date=fdate,
|
|
154
|
+
categories=categories,
|
|
155
|
+
state=state,
|
|
156
|
+
created_at=created_at,
|
|
157
|
+
updated_at=updated_at,
|
|
158
|
+
mission_id=mission_id,
|
|
159
|
+
mission_name=mission_name,
|
|
160
|
+
project_id=project_id,
|
|
161
|
+
project_name=project_name,
|
|
162
|
+
)
|
|
163
|
+
except Exception as e:
|
|
164
|
+
raise ParsingError(f"error parsing file: {file}") from e
|
|
165
|
+
return parsed
|
kleinkram/api/file_transfer.py
CHANGED
|
@@ -2,9 +2,9 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
import sys
|
|
5
|
-
from concurrent.futures import as_completed
|
|
6
5
|
from concurrent.futures import Future
|
|
7
6
|
from concurrent.futures import ThreadPoolExecutor
|
|
7
|
+
from concurrent.futures import as_completed
|
|
8
8
|
from enum import Enum
|
|
9
9
|
from pathlib import Path
|
|
10
10
|
from time import monotonic
|
|
@@ -17,9 +17,11 @@ from uuid import UUID
|
|
|
17
17
|
import boto3.s3.transfer
|
|
18
18
|
import botocore.config
|
|
19
19
|
import httpx
|
|
20
|
+
from rich.console import Console
|
|
21
|
+
from tqdm import tqdm
|
|
22
|
+
|
|
20
23
|
from kleinkram.api.client import AuthenticatedClient
|
|
21
|
-
from kleinkram.config import
|
|
22
|
-
from kleinkram.config import LOCAL_S3
|
|
24
|
+
from kleinkram.config import get_config
|
|
23
25
|
from kleinkram.errors import AccessDenied
|
|
24
26
|
from kleinkram.models import File
|
|
25
27
|
from kleinkram.models import FileState
|
|
@@ -27,9 +29,6 @@ from kleinkram.utils import b64_md5
|
|
|
27
29
|
from kleinkram.utils import format_error
|
|
28
30
|
from kleinkram.utils import format_traceback
|
|
29
31
|
from kleinkram.utils import styled_string
|
|
30
|
-
from rich.console import Console
|
|
31
|
-
from tqdm import tqdm
|
|
32
|
-
|
|
33
32
|
|
|
34
33
|
logger = logging.getLogger(__name__)
|
|
35
34
|
|
|
@@ -52,16 +51,6 @@ class UploadCredentials(NamedTuple):
|
|
|
52
51
|
bucket: str
|
|
53
52
|
|
|
54
53
|
|
|
55
|
-
def _get_s3_endpoint() -> str:
|
|
56
|
-
config = Config()
|
|
57
|
-
endpoint = config.endpoint
|
|
58
|
-
|
|
59
|
-
if "localhost" in endpoint:
|
|
60
|
-
return LOCAL_S3
|
|
61
|
-
else:
|
|
62
|
-
return endpoint.replace("api", "minio")
|
|
63
|
-
|
|
64
|
-
|
|
65
54
|
def _confirm_file_upload(
|
|
66
55
|
client: AuthenticatedClient, file_id: UUID, file_hash: str
|
|
67
56
|
) -> None:
|
|
@@ -106,7 +95,7 @@ def _get_upload_creditials(
|
|
|
106
95
|
resp = client.post(UPLOAD_CREDS, json=dct)
|
|
107
96
|
resp.raise_for_status()
|
|
108
97
|
|
|
109
|
-
data = resp.json()[0]
|
|
98
|
+
data = resp.json()["data"][0]
|
|
110
99
|
|
|
111
100
|
if data.get("error") == FILE_EXISTS_ERROR:
|
|
112
101
|
return None
|
|
@@ -170,10 +159,13 @@ def upload_file(
|
|
|
170
159
|
filename: str,
|
|
171
160
|
path: Path,
|
|
172
161
|
verbose: bool = False,
|
|
162
|
+
s3_endpoint: Optional[str] = None,
|
|
173
163
|
) -> UploadState:
|
|
174
164
|
"""\
|
|
175
165
|
returns bytes uploaded
|
|
176
166
|
"""
|
|
167
|
+
if s3_endpoint is None:
|
|
168
|
+
s3_endpoint = get_config().endpoint.s3
|
|
177
169
|
|
|
178
170
|
total_size = path.stat().st_size
|
|
179
171
|
with tqdm(
|
|
@@ -184,8 +176,6 @@ def upload_file(
|
|
|
184
176
|
leave=False,
|
|
185
177
|
disable=not verbose,
|
|
186
178
|
) as pbar:
|
|
187
|
-
endpoint = _get_s3_endpoint()
|
|
188
|
-
|
|
189
179
|
# get per file upload credentials
|
|
190
180
|
creds = _get_upload_creditials(
|
|
191
181
|
client, internal_filename=filename, mission_id=mission_id
|
|
@@ -194,12 +184,11 @@ def upload_file(
|
|
|
194
184
|
return UploadState.EXISTS
|
|
195
185
|
|
|
196
186
|
try:
|
|
197
|
-
_s3_upload(path, endpoint=
|
|
187
|
+
_s3_upload(path, endpoint=s3_endpoint, credentials=creds, pbar=pbar)
|
|
198
188
|
except Exception as e:
|
|
199
189
|
logger.error(format_traceback(e))
|
|
200
190
|
_cancel_file_upload(client, creds.file_id, mission_id)
|
|
201
191
|
return UploadState.CANCELED
|
|
202
|
-
|
|
203
192
|
else:
|
|
204
193
|
_confirm_file_upload(client, creds.file_id, b64_md5(path))
|
|
205
194
|
return UploadState.UPLOADED
|
|
@@ -379,14 +368,14 @@ def _download_handler(
|
|
|
379
368
|
|
|
380
369
|
def upload_files(
|
|
381
370
|
client: AuthenticatedClient,
|
|
382
|
-
|
|
371
|
+
files: Dict[str, Path],
|
|
383
372
|
mission_id: UUID,
|
|
384
373
|
*,
|
|
385
374
|
verbose: bool = False,
|
|
386
375
|
n_workers: int = 2,
|
|
387
376
|
) -> None:
|
|
388
377
|
with tqdm(
|
|
389
|
-
total=len(
|
|
378
|
+
total=len(files),
|
|
390
379
|
unit="files",
|
|
391
380
|
desc="uploading files",
|
|
392
381
|
disable=not verbose,
|
|
@@ -395,7 +384,7 @@ def upload_files(
|
|
|
395
384
|
start = monotonic()
|
|
396
385
|
futures: Dict[Future[UploadState], Path] = {}
|
|
397
386
|
with ThreadPoolExecutor(max_workers=n_workers) as executor:
|
|
398
|
-
for name, path in
|
|
387
|
+
for name, path in files.items():
|
|
399
388
|
future = executor.submit(
|
|
400
389
|
upload_file,
|
|
401
390
|
client=client,
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Any
|
|
5
|
+
from typing import Dict
|
|
6
|
+
from typing import Generator
|
|
7
|
+
from typing import List
|
|
8
|
+
from typing import Mapping
|
|
9
|
+
from typing import Optional
|
|
10
|
+
from typing import cast
|
|
11
|
+
|
|
12
|
+
from kleinkram.api.client import AuthenticatedClient
|
|
13
|
+
|
|
14
|
+
DataPage = Dict[str, Any]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
PAGE_SIZE = 128
|
|
18
|
+
SKIP = "skip"
|
|
19
|
+
TAKE = "take"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def paginated_request(
|
|
23
|
+
client: AuthenticatedClient,
|
|
24
|
+
endpoint: str,
|
|
25
|
+
params: Optional[Mapping[str, Any]] = None,
|
|
26
|
+
max_entries: Optional[int] = None,
|
|
27
|
+
page_size: int = PAGE_SIZE,
|
|
28
|
+
) -> Generator[DataPage, None, None]:
|
|
29
|
+
total_entries_count = 0
|
|
30
|
+
|
|
31
|
+
params = dict(params or {})
|
|
32
|
+
|
|
33
|
+
params[TAKE] = page_size
|
|
34
|
+
params[SKIP] = 0
|
|
35
|
+
|
|
36
|
+
while True:
|
|
37
|
+
resp = client.get(endpoint, params=params)
|
|
38
|
+
resp.raise_for_status() # TODO: this is fine for now
|
|
39
|
+
|
|
40
|
+
paged_data = resp.json()
|
|
41
|
+
data_page = cast(List[DataPage], paged_data["data"])
|
|
42
|
+
|
|
43
|
+
for entry in data_page:
|
|
44
|
+
total_entries_count += 1
|
|
45
|
+
yield entry
|
|
46
|
+
if max_entries is not None and max_entries <= total_entries_count:
|
|
47
|
+
return
|
|
48
|
+
|
|
49
|
+
count = cast(int, paged_data["count"])
|
|
50
|
+
skip = cast(int, paged_data["skip"])
|
|
51
|
+
take = cast(int, paged_data["take"])
|
|
52
|
+
|
|
53
|
+
if count - skip - take <= 0:
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
params[SKIP] = total_entries_count
|
kleinkram/api/query.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""\
|
|
2
|
+
this file contains dataclasses for specifying remote resources on kleinkram
|
|
3
|
+
here we also provide some helper functions to validate certain properties
|
|
4
|
+
of these specifications
|
|
5
|
+
|
|
6
|
+
additionally we provide wrappers around the api for fetching the specified
|
|
7
|
+
resources (TODO: move this part to another file)
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from dataclasses import field
|
|
14
|
+
from typing import List
|
|
15
|
+
from uuid import UUID
|
|
16
|
+
|
|
17
|
+
from kleinkram.errors import InvalidMissionQuery
|
|
18
|
+
from kleinkram.errors import InvalidProjectQuery
|
|
19
|
+
|
|
20
|
+
MAX_PARALLEL_REQUESTS = 32
|
|
21
|
+
SPECIAL_PATTERN_CHARS = ["*", "?", "[", "]"]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class ProjectQuery:
|
|
26
|
+
patterns: List[str] = field(default_factory=list)
|
|
27
|
+
ids: List[UUID] = field(default_factory=list)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class MissionQuery:
|
|
32
|
+
patterns: List[str] = field(default_factory=list)
|
|
33
|
+
ids: List[UUID] = field(default_factory=list)
|
|
34
|
+
project_query: ProjectQuery = field(default=ProjectQuery())
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class FileQuery:
|
|
39
|
+
patterns: List[str] = field(default_factory=list)
|
|
40
|
+
ids: List[UUID] = field(default_factory=list)
|
|
41
|
+
mission_query: MissionQuery = field(default=MissionQuery())
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def check_mission_query_is_creatable(query: MissionQuery) -> str:
|
|
45
|
+
"""\
|
|
46
|
+
check if a query is unique and can be used to create a mission
|
|
47
|
+
returns: the mission name
|
|
48
|
+
"""
|
|
49
|
+
if not mission_query_is_unique(query):
|
|
50
|
+
raise InvalidMissionQuery(f"Mission query is not unique: {query}")
|
|
51
|
+
# cant create a missing by id
|
|
52
|
+
if query.ids:
|
|
53
|
+
raise InvalidMissionQuery(f"cant create mission by id: {query}")
|
|
54
|
+
return query.patterns[0]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def check_project_query_is_creatable(query: ProjectQuery) -> str:
|
|
58
|
+
if not project_query_is_unique(query):
|
|
59
|
+
raise InvalidProjectQuery(f"Project query is not unique: {query}")
|
|
60
|
+
# cant create a missing by id
|
|
61
|
+
if query.ids:
|
|
62
|
+
raise InvalidProjectQuery(f"cant create project by id: {query}")
|
|
63
|
+
return query.patterns[0]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _pattern_is_unique(pattern: str) -> bool:
|
|
67
|
+
for char in SPECIAL_PATTERN_CHARS:
|
|
68
|
+
if char in pattern:
|
|
69
|
+
return False
|
|
70
|
+
return True
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def project_query_is_unique(query: ProjectQuery) -> bool:
|
|
74
|
+
# a single project id is specified
|
|
75
|
+
if len(query.ids) == 1 and not query.patterns:
|
|
76
|
+
return True
|
|
77
|
+
|
|
78
|
+
# a single project name is specified
|
|
79
|
+
if len(query.patterns) == 1 and _pattern_is_unique(query.patterns[0]):
|
|
80
|
+
return True
|
|
81
|
+
return False
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def mission_query_is_unique(query: MissionQuery) -> bool:
|
|
85
|
+
# a single mission id is specified
|
|
86
|
+
if len(query.ids) == 1 and not query.patterns:
|
|
87
|
+
return True
|
|
88
|
+
|
|
89
|
+
# a single mission name a unique project spec are specified
|
|
90
|
+
if (
|
|
91
|
+
project_query_is_unique(query.project_query)
|
|
92
|
+
and len(query.patterns) == 1
|
|
93
|
+
and _pattern_is_unique(query.patterns[0])
|
|
94
|
+
):
|
|
95
|
+
return True
|
|
96
|
+
return False
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def file_query_is_unique(query: FileQuery) -> bool:
|
|
100
|
+
# a single file id is specified
|
|
101
|
+
if len(query.ids) == 1 and not query.patterns:
|
|
102
|
+
return True
|
|
103
|
+
|
|
104
|
+
# a single file name a unique mission spec are specified
|
|
105
|
+
if (
|
|
106
|
+
mission_query_is_unique(query.mission_query)
|
|
107
|
+
and len(query.patterns) == 1
|
|
108
|
+
and _pattern_is_unique(query.patterns[0])
|
|
109
|
+
):
|
|
110
|
+
return True
|
|
111
|
+
return False
|