divbase-cli 0.1.0.dev2__py3-none-any.whl → 0.1.0.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- divbase_cli/__init__.py +1 -1
- divbase_cli/cli_commands/auth_cli.py +4 -9
- divbase_cli/cli_commands/dimensions_cli.py +4 -8
- divbase_cli/cli_commands/file_cli.py +284 -70
- divbase_cli/cli_commands/query_cli.py +3 -7
- divbase_cli/cli_commands/shared_args_options.py +20 -0
- divbase_cli/cli_commands/task_history_cli.py +3 -8
- divbase_cli/cli_commands/user_config_cli.py +14 -44
- divbase_cli/cli_commands/version_cli.py +16 -24
- divbase_cli/cli_config.py +18 -7
- divbase_cli/cli_exceptions.py +37 -22
- divbase_cli/config_resolver.py +10 -10
- divbase_cli/divbase_cli.py +1 -1
- divbase_cli/retries.py +34 -0
- divbase_cli/services/__init__.py +0 -0
- divbase_cli/services/pre_signed_urls.py +446 -0
- divbase_cli/services/project_versions.py +77 -0
- divbase_cli/services/s3_files.py +355 -0
- divbase_cli/user_auth.py +26 -13
- divbase_cli/user_config.py +20 -9
- divbase_cli/utils.py +47 -0
- {divbase_cli-0.1.0.dev2.dist-info → divbase_cli-0.1.0.dev3.dist-info}/METADATA +4 -3
- divbase_cli-0.1.0.dev3.dist-info/RECORD +27 -0
- divbase_cli/pre_signed_urls.py +0 -169
- divbase_cli/services.py +0 -219
- divbase_cli-0.1.0.dev2.dist-info/RECORD +0 -22
- {divbase_cli-0.1.0.dev2.dist-info → divbase_cli-0.1.0.dev3.dist-info}/WHEEL +0 -0
- {divbase_cli-0.1.0.dev2.dist-info → divbase_cli-0.1.0.dev3.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Service layer for DivBase CLI S3 file operations.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
import httpx
|
|
9
|
+
import typer
|
|
10
|
+
|
|
11
|
+
from divbase_cli.cli_exceptions import (
|
|
12
|
+
FileDoesNotExistInSpecifiedVersionError,
|
|
13
|
+
FilesAlreadyInProjectError,
|
|
14
|
+
)
|
|
15
|
+
from divbase_cli.services.pre_signed_urls import (
|
|
16
|
+
DownloadOutcome,
|
|
17
|
+
FailedUpload,
|
|
18
|
+
SuccessfulUpload,
|
|
19
|
+
UploadOutcome,
|
|
20
|
+
download_multiple_pre_signed_urls,
|
|
21
|
+
perform_multipart_upload,
|
|
22
|
+
upload_multiple_singlepart_pre_signed_urls,
|
|
23
|
+
)
|
|
24
|
+
from divbase_cli.services.project_versions import get_version_details_command
|
|
25
|
+
from divbase_cli.user_auth import make_authenticated_request
|
|
26
|
+
from divbase_lib.api_schemas.s3 import (
|
|
27
|
+
FileChecksumResponse,
|
|
28
|
+
ListObjectsRequest,
|
|
29
|
+
ListObjectsResponse,
|
|
30
|
+
ObjectDetails,
|
|
31
|
+
ObjectInfoResponse,
|
|
32
|
+
PreSignedDownloadResponse,
|
|
33
|
+
PreSignedSinglePartUploadResponse,
|
|
34
|
+
RestoreObjectsResponse,
|
|
35
|
+
)
|
|
36
|
+
from divbase_lib.divbase_constants import (
|
|
37
|
+
MAX_S3_API_BATCH_SIZE,
|
|
38
|
+
QUERY_RESULTS_FILE_PREFIX,
|
|
39
|
+
S3_MULTIPART_UPLOAD_THRESHOLD,
|
|
40
|
+
)
|
|
41
|
+
from divbase_lib.s3_checksums import (
|
|
42
|
+
MD5CheckSumFormat,
|
|
43
|
+
calculate_composite_md5_s3_etag,
|
|
44
|
+
calculate_md5_checksum,
|
|
45
|
+
convert_checksum_hex_to_base64,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def list_files_command(
|
|
50
|
+
divbase_base_url: str,
|
|
51
|
+
project_name: str,
|
|
52
|
+
prefix_filter: str | None = None,
|
|
53
|
+
include_results_files: bool = False,
|
|
54
|
+
) -> list[ObjectDetails]:
|
|
55
|
+
"""
|
|
56
|
+
List all files in a project optionally filtered by a prefix.
|
|
57
|
+
We page through results if there are more than can be returned in a single API call.
|
|
58
|
+
|
|
59
|
+
NOTE: The current implementation is not very efficient as we page through all results before returning any.
|
|
60
|
+
Keeping simple for now as we don't expect projects to have huge numbers of files.
|
|
61
|
+
But could be revisted later if performance becomes an issue.
|
|
62
|
+
"""
|
|
63
|
+
api_route = f"v1/s3/list?project_name={project_name}"
|
|
64
|
+
initial_request = ListObjectsRequest(
|
|
65
|
+
prefix=prefix_filter,
|
|
66
|
+
next_token=None,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
response = make_authenticated_request(
|
|
70
|
+
method="POST",
|
|
71
|
+
divbase_base_url=divbase_base_url,
|
|
72
|
+
api_route=api_route,
|
|
73
|
+
json=initial_request.model_dump(),
|
|
74
|
+
)
|
|
75
|
+
response_data = ListObjectsResponse(**response.json())
|
|
76
|
+
all_matches = response_data.objects
|
|
77
|
+
|
|
78
|
+
# page through any remaining results
|
|
79
|
+
while response_data.next_token:
|
|
80
|
+
next_request = ListObjectsRequest(prefix=prefix_filter, next_token=response_data.next_token)
|
|
81
|
+
response = make_authenticated_request(
|
|
82
|
+
method="POST",
|
|
83
|
+
divbase_base_url=divbase_base_url,
|
|
84
|
+
api_route=api_route,
|
|
85
|
+
json=next_request.model_dump(),
|
|
86
|
+
)
|
|
87
|
+
next_page_data = ListObjectsResponse(**response.json())
|
|
88
|
+
|
|
89
|
+
all_matches.extend(next_page_data.objects)
|
|
90
|
+
response_data.next_token = next_page_data.next_token
|
|
91
|
+
|
|
92
|
+
# To enable us to both search by prefix and optionally hide/include query results files,
|
|
93
|
+
# we hide DivBase query results/job files now instead of via S3.
|
|
94
|
+
# so the prefix param can be used for the optional filter the user wants.
|
|
95
|
+
if not include_results_files:
|
|
96
|
+
all_matches = [obj for obj in all_matches if not obj.name.startswith(QUERY_RESULTS_FILE_PREFIX)]
|
|
97
|
+
|
|
98
|
+
return all_matches
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def get_file_info_command(divbase_base_url: str, project_name: str, object_name: str) -> ObjectInfoResponse:
|
|
102
|
+
"""Get detailed information about a specific file/object in a project."""
|
|
103
|
+
response = make_authenticated_request(
|
|
104
|
+
method="GET",
|
|
105
|
+
divbase_base_url=divbase_base_url,
|
|
106
|
+
api_route=f"v1/s3/info?project_name={project_name}&object_name={object_name}",
|
|
107
|
+
)
|
|
108
|
+
return ObjectInfoResponse(**response.json())
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def soft_delete_objects_command(divbase_base_url: str, project_name: str, all_files: list[str]) -> list[str]:
|
|
112
|
+
"""
|
|
113
|
+
Soft delete objects from the project's bucket.
|
|
114
|
+
Returns a list of the soft deleted objects
|
|
115
|
+
"""
|
|
116
|
+
response = make_authenticated_request(
|
|
117
|
+
method="DELETE",
|
|
118
|
+
divbase_base_url=divbase_base_url,
|
|
119
|
+
api_route=f"v1/s3/?project_name={project_name}",
|
|
120
|
+
json=all_files,
|
|
121
|
+
)
|
|
122
|
+
return response.json()
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def restore_objects_command(divbase_base_url: str, project_name: str, all_files: list[str]) -> RestoreObjectsResponse:
|
|
126
|
+
"""
|
|
127
|
+
Restore soft_deleted objects in the project's bucket.
|
|
128
|
+
Returns an object containing a list of the restored objects, and those that were not restored.
|
|
129
|
+
"""
|
|
130
|
+
response = make_authenticated_request(
|
|
131
|
+
method="POST",
|
|
132
|
+
divbase_base_url=divbase_base_url,
|
|
133
|
+
api_route=f"v1/s3/restore?project_name={project_name}",
|
|
134
|
+
json=all_files,
|
|
135
|
+
)
|
|
136
|
+
return RestoreObjectsResponse(**response.json())
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def download_files_command(
|
|
140
|
+
divbase_base_url: str,
|
|
141
|
+
project_name: str,
|
|
142
|
+
raw_files_input: list[str],
|
|
143
|
+
download_dir: Path,
|
|
144
|
+
verify_checksums: bool,
|
|
145
|
+
project_version: str | None = None,
|
|
146
|
+
) -> DownloadOutcome:
|
|
147
|
+
"""
|
|
148
|
+
Download files from the given project's S3 bucket.
|
|
149
|
+
"""
|
|
150
|
+
if not download_dir.is_dir():
|
|
151
|
+
raise NotADirectoryError(
|
|
152
|
+
f"The specified download directory '{download_dir}' is not a directory. Please create it or specify a valid directory before continuing."
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
if project_version is not None:
|
|
156
|
+
offending_files = [file for file in raw_files_input if ":" in file]
|
|
157
|
+
if offending_files:
|
|
158
|
+
print(
|
|
159
|
+
"[red] ERROR: bad Input: If you provide a global project version (using --project-version) "
|
|
160
|
+
"you cannot also specify specific versions of individual files to download. \n"
|
|
161
|
+
"offending files in your input: \n"
|
|
162
|
+
f"{'\n'.join(offending_files)} \n"
|
|
163
|
+
"Exiting..."
|
|
164
|
+
)
|
|
165
|
+
raise typer.Exit(1)
|
|
166
|
+
|
|
167
|
+
if project_version:
|
|
168
|
+
project_version_details = get_version_details_command(
|
|
169
|
+
project_name=project_name, divbase_base_url=divbase_base_url, version_name=project_version
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# check if all files specified exist for download exist at this project version
|
|
173
|
+
missing_objects = [f for f in raw_files_input if f not in project_version_details.files]
|
|
174
|
+
if missing_objects:
|
|
175
|
+
raise FileDoesNotExistInSpecifiedVersionError(
|
|
176
|
+
project_name=project_name,
|
|
177
|
+
project_version=project_version,
|
|
178
|
+
missing_files=missing_objects,
|
|
179
|
+
)
|
|
180
|
+
# we validated in CLI command that
|
|
181
|
+
to_download = {file: project_version_details.files[file] for file in raw_files_input}
|
|
182
|
+
json_data = [{"name": obj, "version_id": to_download[obj]} for obj in raw_files_input]
|
|
183
|
+
else:
|
|
184
|
+
# parse raw file inputs to see if any specific version ids were provided using format:
|
|
185
|
+
# file_name:version_id (not possible when using project_version)
|
|
186
|
+
json_data = []
|
|
187
|
+
for file_input in raw_files_input:
|
|
188
|
+
if ":" in file_input:
|
|
189
|
+
name, version_id = file_input.split(sep=":", maxsplit=1)
|
|
190
|
+
json_data.append({"name": name, "version_id": version_id})
|
|
191
|
+
else:
|
|
192
|
+
json_data.append({"name": file_input, "version_id": None})
|
|
193
|
+
|
|
194
|
+
successful_downloads, failed_downloads = [], []
|
|
195
|
+
for i in range(0, len(json_data), MAX_S3_API_BATCH_SIZE):
|
|
196
|
+
batch_json_data = json_data[i : i + MAX_S3_API_BATCH_SIZE]
|
|
197
|
+
response = make_authenticated_request(
|
|
198
|
+
method="POST",
|
|
199
|
+
divbase_base_url=divbase_base_url,
|
|
200
|
+
api_route=f"v1/s3/download?project_name={project_name}",
|
|
201
|
+
json=batch_json_data,
|
|
202
|
+
)
|
|
203
|
+
pre_signed_urls = [PreSignedDownloadResponse(**item) for item in response.json()]
|
|
204
|
+
|
|
205
|
+
batch_download_success, batch_download_failed = download_multiple_pre_signed_urls(
|
|
206
|
+
pre_signed_urls=pre_signed_urls,
|
|
207
|
+
download_dir=download_dir,
|
|
208
|
+
verify_checksums=verify_checksums,
|
|
209
|
+
)
|
|
210
|
+
successful_downloads.extend(batch_download_success)
|
|
211
|
+
failed_downloads.extend(batch_download_failed)
|
|
212
|
+
|
|
213
|
+
return DownloadOutcome(successful=successful_downloads, failed=failed_downloads)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def stream_file_command(
|
|
217
|
+
divbase_base_url: str, project_name: str, file_name: str, version_id: str | None = None
|
|
218
|
+
) -> None:
|
|
219
|
+
"""Stream the contents of a single file in the project's S3 bucket to stdout."""
|
|
220
|
+
json_data = [{"name": file_name, "version_id": version_id}]
|
|
221
|
+
response = make_authenticated_request(
|
|
222
|
+
method="POST",
|
|
223
|
+
divbase_base_url=divbase_base_url,
|
|
224
|
+
api_route=f"v1/s3/download?project_name={project_name}",
|
|
225
|
+
json=json_data,
|
|
226
|
+
)
|
|
227
|
+
pre_signed_url = PreSignedDownloadResponse(**response.json()[0]).pre_signed_url
|
|
228
|
+
|
|
229
|
+
try:
|
|
230
|
+
with httpx.stream("GET", pre_signed_url, timeout=None) as response:
|
|
231
|
+
response.raise_for_status()
|
|
232
|
+
for chunk in response.iter_bytes():
|
|
233
|
+
sys.stdout.buffer.write(chunk)
|
|
234
|
+
except BrokenPipeError:
|
|
235
|
+
# This happens when the user pipes to a command that closes early
|
|
236
|
+
# (e.g., `[divbase-cli stream command] | head -n 10`).
|
|
237
|
+
pass
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def upload_files_command(
|
|
241
|
+
project_name: str, divbase_base_url: str, all_files: list[Path], safe_mode: bool
|
|
242
|
+
) -> UploadOutcome:
|
|
243
|
+
"""
|
|
244
|
+
Upload files to the project's S3 bucket.
|
|
245
|
+
Returns an UploadOutcome object containing details of which files were successfully uploaded and which failed.
|
|
246
|
+
|
|
247
|
+
- Safe mode:
|
|
248
|
+
1. checks if any of the files that are to be uploaded already exist in the bucket (by comparing checksums)
|
|
249
|
+
2. Adds checksum to upload request to allow server to verify upload.
|
|
250
|
+
"""
|
|
251
|
+
if safe_mode:
|
|
252
|
+
# mapping of file name to hex-encoded checksum
|
|
253
|
+
file_checksums_hex = compare_local_to_s3_checksums(
|
|
254
|
+
project_name=project_name,
|
|
255
|
+
divbase_base_url=divbase_base_url,
|
|
256
|
+
all_files=all_files,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
files_below_threshold, files_above_threshold = [], []
|
|
260
|
+
for file in all_files:
|
|
261
|
+
if file.stat().st_size <= S3_MULTIPART_UPLOAD_THRESHOLD:
|
|
262
|
+
files_below_threshold.append(file)
|
|
263
|
+
else:
|
|
264
|
+
files_above_threshold.append(file)
|
|
265
|
+
|
|
266
|
+
all_successful_uploads: list[SuccessfulUpload] = []
|
|
267
|
+
all_failed_uploads: list[FailedUpload] = []
|
|
268
|
+
|
|
269
|
+
# P1. Process all single-part uploads in batches of max size allowed by divbase server.
|
|
270
|
+
for i in range(0, len(files_below_threshold), MAX_S3_API_BATCH_SIZE):
|
|
271
|
+
batch_files = files_below_threshold[i : i + MAX_S3_API_BATCH_SIZE]
|
|
272
|
+
batch_of_objects_to_upload = []
|
|
273
|
+
for file in batch_files:
|
|
274
|
+
upload_object = {
|
|
275
|
+
"name": file.name,
|
|
276
|
+
"content_length": file.stat().st_size,
|
|
277
|
+
}
|
|
278
|
+
if safe_mode:
|
|
279
|
+
hex_checksum = file_checksums_hex[file.name]
|
|
280
|
+
upload_object["md5_hash"] = convert_checksum_hex_to_base64(hex_checksum)
|
|
281
|
+
batch_of_objects_to_upload.append(upload_object)
|
|
282
|
+
|
|
283
|
+
response = make_authenticated_request(
|
|
284
|
+
method="POST",
|
|
285
|
+
divbase_base_url=divbase_base_url,
|
|
286
|
+
api_route=f"v1/s3/upload/single-part?project_name={project_name}",
|
|
287
|
+
json=batch_of_objects_to_upload,
|
|
288
|
+
)
|
|
289
|
+
pre_signed_urls = [PreSignedSinglePartUploadResponse(**item) for item in response.json()]
|
|
290
|
+
single_part_upload_outcome = upload_multiple_singlepart_pre_signed_urls(
|
|
291
|
+
pre_signed_urls=pre_signed_urls, all_files=batch_files
|
|
292
|
+
)
|
|
293
|
+
all_successful_uploads.extend(single_part_upload_outcome.successful)
|
|
294
|
+
all_failed_uploads.extend(single_part_upload_outcome.failed)
|
|
295
|
+
|
|
296
|
+
# P2. process all multipart uploads.
|
|
297
|
+
for file_path in files_above_threshold:
|
|
298
|
+
outcome = perform_multipart_upload(
|
|
299
|
+
project_name=project_name,
|
|
300
|
+
divbase_base_url=divbase_base_url,
|
|
301
|
+
file_path=file_path,
|
|
302
|
+
safe_mode=safe_mode,
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
if isinstance(outcome, SuccessfulUpload):
|
|
306
|
+
all_successful_uploads.append(outcome)
|
|
307
|
+
else:
|
|
308
|
+
all_failed_uploads.append(outcome)
|
|
309
|
+
|
|
310
|
+
return UploadOutcome(successful=all_successful_uploads, failed=all_failed_uploads)
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def compare_local_to_s3_checksums(project_name: str, divbase_base_url: str, all_files: list[Path]) -> dict[str, str]:
|
|
314
|
+
"""
|
|
315
|
+
Calculate the checksums of all local files (to be uploaded) and compares them to the checksums of all files in the project's S3 bucket.
|
|
316
|
+
Raises error if any files already exist in the project's S3 bucket with identical checksums.
|
|
317
|
+
|
|
318
|
+
Here, we are catching an attempt to upload an identical file twice.
|
|
319
|
+
This is only ran if 'safe_mode' is enabled for uploads.
|
|
320
|
+
We do not catch an attempt to upload an identical object if it has a different name.
|
|
321
|
+
|
|
322
|
+
Return a dict of file names with hex-encoded checksums for all files to be uploaded (including those that are not in S3).
|
|
323
|
+
These checksums are later used when uploading to the server so the server can verify the upload.
|
|
324
|
+
"""
|
|
325
|
+
already_uploaded_files: dict[Path, str] = {} # files that already exist in S3 with identical checksum
|
|
326
|
+
local_checksums: dict[str, str] = {} # all local files checksums
|
|
327
|
+
|
|
328
|
+
# have to batch requests if above max number allowed by divbase server
|
|
329
|
+
for i in range(0, len(all_files), MAX_S3_API_BATCH_SIZE):
|
|
330
|
+
batch_files = all_files[i : i + MAX_S3_API_BATCH_SIZE]
|
|
331
|
+
batch_files_names = [file.name for file in batch_files]
|
|
332
|
+
|
|
333
|
+
response = make_authenticated_request(
|
|
334
|
+
method="POST",
|
|
335
|
+
divbase_base_url=divbase_base_url,
|
|
336
|
+
api_route=f"v1/s3/checksums?project_name={project_name}",
|
|
337
|
+
json=batch_files_names,
|
|
338
|
+
)
|
|
339
|
+
server_checksum_responses = [FileChecksumResponse(**item) for item in response.json()]
|
|
340
|
+
server_checksums = {item.object_name: item.md5_checksum for item in server_checksum_responses}
|
|
341
|
+
|
|
342
|
+
for file in batch_files:
|
|
343
|
+
if file.stat().st_size > S3_MULTIPART_UPLOAD_THRESHOLD:
|
|
344
|
+
calculated_checksum = calculate_composite_md5_s3_etag(file_path=file)
|
|
345
|
+
else:
|
|
346
|
+
calculated_checksum = calculate_md5_checksum(file_path=file, output_format=MD5CheckSumFormat.HEX)
|
|
347
|
+
|
|
348
|
+
local_checksums[file.name] = calculated_checksum
|
|
349
|
+
if server_checksums.get(file.name) and server_checksums[file.name] == calculated_checksum:
|
|
350
|
+
already_uploaded_files[file] = calculated_checksum
|
|
351
|
+
|
|
352
|
+
if already_uploaded_files:
|
|
353
|
+
raise FilesAlreadyInProjectError(existing_files=already_uploaded_files, project_name=project_name)
|
|
354
|
+
|
|
355
|
+
return local_checksums
|
divbase_cli/user_auth.py
CHANGED
|
@@ -10,11 +10,13 @@ from dataclasses import dataclass
|
|
|
10
10
|
from pathlib import Path
|
|
11
11
|
|
|
12
12
|
import httpx
|
|
13
|
+
import stamina
|
|
13
14
|
import yaml
|
|
14
15
|
from pydantic import SecretStr
|
|
15
16
|
|
|
16
17
|
from divbase_cli.cli_config import cli_settings
|
|
17
18
|
from divbase_cli.cli_exceptions import AuthenticationError, DivBaseAPIConnectionError, DivBaseAPIError
|
|
19
|
+
from divbase_cli.retries import retry_only_on_retryable_divbase_api_errors
|
|
18
20
|
from divbase_cli.user_config import load_user_config
|
|
19
21
|
from divbase_lib.api_schemas.auth import LogoutRequest
|
|
20
22
|
|
|
@@ -76,7 +78,8 @@ def check_existing_session(divbase_url: str, config) -> int | None:
|
|
|
76
78
|
return token_data.refresh_token_expires_at
|
|
77
79
|
|
|
78
80
|
|
|
79
|
-
|
|
81
|
+
@stamina.retry(on=retry_only_on_retryable_divbase_api_errors, attempts=3)
|
|
82
|
+
def login_to_divbase(email: str, password: SecretStr, divbase_url: str) -> None:
|
|
80
83
|
"""
|
|
81
84
|
Log in to the DivBase server and return user tokens.
|
|
82
85
|
"""
|
|
@@ -110,18 +113,16 @@ def login_to_divbase(email: str, password: SecretStr, divbase_url: str, config_p
|
|
|
110
113
|
)
|
|
111
114
|
token_data.dump_tokens()
|
|
112
115
|
|
|
113
|
-
config = load_user_config(
|
|
116
|
+
config = load_user_config()
|
|
114
117
|
config.set_login_status(url=divbase_url, email=email)
|
|
115
118
|
|
|
116
119
|
|
|
117
|
-
def logout_of_divbase(
|
|
118
|
-
token_path: Path = cli_settings.TOKENS_PATH, config_path: Path = cli_settings.CONFIG_PATH
|
|
119
|
-
) -> None:
|
|
120
|
+
def logout_of_divbase(token_path: Path = cli_settings.TOKENS_PATH) -> None:
|
|
120
121
|
"""
|
|
121
122
|
Log out of the DivBase server.
|
|
122
123
|
We send the refresh token to DivBase to be revoked server-side.
|
|
123
124
|
"""
|
|
124
|
-
config = load_user_config(
|
|
125
|
+
config = load_user_config()
|
|
125
126
|
|
|
126
127
|
# the "if" avoids raising an error on a non logged in user trying to logout
|
|
127
128
|
if config.logged_in_url:
|
|
@@ -182,6 +183,7 @@ def load_user_tokens(token_path: Path = cli_settings.TOKENS_PATH) -> TokenData:
|
|
|
182
183
|
)
|
|
183
184
|
|
|
184
185
|
|
|
186
|
+
@stamina.retry(on=retry_only_on_retryable_divbase_api_errors, attempts=3)
|
|
185
187
|
def make_authenticated_request(
|
|
186
188
|
method: str,
|
|
187
189
|
divbase_base_url: str,
|
|
@@ -235,13 +237,15 @@ def _refresh_access_token(token_data: TokenData, divbase_base_url: str) -> Token
|
|
|
235
237
|
Use the refresh token to get a new access token and update the token file.
|
|
236
238
|
|
|
237
239
|
Returns the new TokenData object which can be used immediately in a new request.
|
|
240
|
+
NOTE: We do not need retry logic inside this function as the calling function has it.
|
|
238
241
|
"""
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
"refresh_token": token_data.refresh_token.get_secret_value(),
|
|
243
|
-
|
|
244
|
-
|
|
242
|
+
try:
|
|
243
|
+
response = httpx.post(
|
|
244
|
+
url=f"{divbase_base_url}/v1/auth/refresh",
|
|
245
|
+
json={"refresh_token": token_data.refresh_token.get_secret_value()},
|
|
246
|
+
)
|
|
247
|
+
except httpx.HTTPError as e:
|
|
248
|
+
raise DivBaseAPIConnectionError() from e
|
|
245
249
|
|
|
246
250
|
# Possible if e.g. token revoked on server side.
|
|
247
251
|
if response.status_code == 401:
|
|
@@ -251,7 +255,16 @@ def _refresh_access_token(token_data: TokenData, divbase_base_url: str) -> Token
|
|
|
251
255
|
config.set_login_status(url=None, email=None)
|
|
252
256
|
raise AuthenticationError(LOGIN_AGAIN_MESSAGE)
|
|
253
257
|
|
|
254
|
-
|
|
258
|
+
try:
|
|
259
|
+
response.raise_for_status()
|
|
260
|
+
except httpx.HTTPStatusError as e:
|
|
261
|
+
raise DivBaseAPIError(
|
|
262
|
+
error_details=response.json().get("detail", "No error details provided"),
|
|
263
|
+
status_code=response.status_code,
|
|
264
|
+
error_type=response.json().get("type", "unknown"),
|
|
265
|
+
http_method="POST",
|
|
266
|
+
url=f"{divbase_base_url}/v1/auth/refresh",
|
|
267
|
+
) from e
|
|
255
268
|
data = response.json()
|
|
256
269
|
|
|
257
270
|
new_token_data = TokenData(
|
divbase_cli/user_config.py
CHANGED
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Handles the user's configuration file for the divbase-cli package.
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
The user configuration is stored in a local file.
|
|
4
|
+
|
|
5
|
+
The path to the config file is determined by `cli_settings.CONFIG_PATH` and
|
|
6
|
+
is stored in a platform-specific user configuration directory (for example,
|
|
7
|
+
under the user application data directory on Linux, macOS, or Windows).
|
|
5
8
|
"""
|
|
6
9
|
|
|
10
|
+
import logging
|
|
7
11
|
import warnings
|
|
8
12
|
from dataclasses import dataclass, field
|
|
9
13
|
from pathlib import Path
|
|
@@ -11,7 +15,9 @@ from pathlib import Path
|
|
|
11
15
|
import yaml
|
|
12
16
|
|
|
13
17
|
from divbase_cli.cli_config import cli_settings
|
|
14
|
-
from divbase_cli.cli_exceptions import
|
|
18
|
+
from divbase_cli.cli_exceptions import ProjectNotInConfigError
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
15
21
|
|
|
16
22
|
|
|
17
23
|
@dataclass
|
|
@@ -136,18 +142,21 @@ class UserConfig:
|
|
|
136
142
|
self.dump_config()
|
|
137
143
|
|
|
138
144
|
|
|
139
|
-
def load_user_config(
|
|
145
|
+
def load_user_config() -> UserConfig:
|
|
140
146
|
"""Helper function to load the user config file"""
|
|
141
147
|
try:
|
|
142
|
-
with open(
|
|
148
|
+
with open(cli_settings.CONFIG_PATH, "r") as file:
|
|
143
149
|
config_contents = yaml.safe_load(file)
|
|
144
150
|
except FileNotFoundError:
|
|
145
|
-
|
|
151
|
+
logger.debug(f"No existing config file found at {cli_settings.CONFIG_PATH}, creating a new one.")
|
|
152
|
+
create_user_config(config_path=cli_settings.CONFIG_PATH)
|
|
153
|
+
with open(cli_settings.CONFIG_PATH, "r") as file:
|
|
154
|
+
config_contents = yaml.safe_load(file)
|
|
146
155
|
|
|
147
156
|
projects = [ProjectConfig(**project) for project in config_contents.get("projects", [])]
|
|
148
157
|
|
|
149
158
|
return UserConfig(
|
|
150
|
-
config_path=
|
|
159
|
+
config_path=cli_settings.CONFIG_PATH,
|
|
151
160
|
logged_in_url=config_contents.get("logged_in_url"),
|
|
152
161
|
logged_in_email=config_contents.get("logged_in_email"),
|
|
153
162
|
projects=projects,
|
|
@@ -159,8 +168,10 @@ def load_user_config(config_path: Path = cli_settings.CONFIG_PATH) -> UserConfig
|
|
|
159
168
|
def create_user_config(config_path: Path) -> None:
|
|
160
169
|
"""Create a user configuration file at the specified path."""
|
|
161
170
|
if config_path.exists():
|
|
162
|
-
raise FileExistsError(
|
|
163
|
-
|
|
171
|
+
raise FileExistsError(
|
|
172
|
+
f"Config file already exists at {config_path}. Stopping to avoid overwriting existing config."
|
|
173
|
+
)
|
|
174
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
164
175
|
|
|
165
176
|
user_config = UserConfig(config_path=config_path, projects=[])
|
|
166
177
|
user_config.dump_config()
|
divbase_cli/utils.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""Collection of utility functions for divbase-cli package that haven't found a better home"""
|
|
2
|
+
|
|
3
|
+
import csv
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
from rich.table import Table
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def format_file_size(size_bytes: int | float | None) -> str:
|
|
10
|
+
"""
|
|
11
|
+
Converts a file size in bytes to a human-readable format.
|
|
12
|
+
|
|
13
|
+
Uses powers of 1000 so KB, MB, GB, TB and not 1024 KiB, MiB, GiB, TiB.
|
|
14
|
+
"""
|
|
15
|
+
if size_bytes is None:
|
|
16
|
+
return "N/A"
|
|
17
|
+
if size_bytes == 0:
|
|
18
|
+
return "0 B"
|
|
19
|
+
power = 1000
|
|
20
|
+
n = 0
|
|
21
|
+
power_labels = {0: "", 1: "K", 2: "M", 3: "G", 4: "T"}
|
|
22
|
+
while size_bytes >= power and n < len(power_labels) - 1:
|
|
23
|
+
size_bytes /= power
|
|
24
|
+
n += 1
|
|
25
|
+
return f"{size_bytes:.2f} {power_labels[n]}B"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def print_rich_table_as_tsv(table: Table) -> None:
|
|
29
|
+
"""
|
|
30
|
+
Helper function to print a rich Table as a TSV file to standard output.
|
|
31
|
+
|
|
32
|
+
This is useful for CLI commands that want to offer both rich table output
|
|
33
|
+
for human users as well as TSV output for programmatic parsing.
|
|
34
|
+
|
|
35
|
+
NOTE: This function expects all table rows to be of same length (you can have None values in cells).
|
|
36
|
+
"""
|
|
37
|
+
writer = csv.writer(sys.stdout, delimiter="\t")
|
|
38
|
+
|
|
39
|
+
headers = [str(col.header) for col in table.columns]
|
|
40
|
+
writer.writerow(headers)
|
|
41
|
+
|
|
42
|
+
columns_data = [col._cells for col in table.columns]
|
|
43
|
+
|
|
44
|
+
num_rows = min(len(col) for col in columns_data)
|
|
45
|
+
for row_index in range(num_rows):
|
|
46
|
+
row = [str(columns_data[col_index][row_index]) for col_index in range(len(columns_data))]
|
|
47
|
+
writer.writerow(row)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: divbase-cli
|
|
3
|
-
Version: 0.1.0.
|
|
3
|
+
Version: 0.1.0.dev3
|
|
4
4
|
Summary: Command Line Interface for Divbase
|
|
5
5
|
Project-URL: Homepage, https://divbase.scilifelab.se
|
|
6
6
|
Project-URL: Documentation, https://scilifelabdatacentre.github.io/divbase
|
|
@@ -18,8 +18,9 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.13
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.14
|
|
20
20
|
Requires-Python: >=3.12
|
|
21
|
-
Requires-Dist: divbase-lib==0.1.0.
|
|
22
|
-
Requires-Dist: httpx<
|
|
21
|
+
Requires-Dist: divbase-lib==0.1.0.dev3
|
|
22
|
+
Requires-Dist: httpx<1,>=0.28.1
|
|
23
|
+
Requires-Dist: stamina>=25.2.0
|
|
23
24
|
Requires-Dist: typer<1,>=0.21.1
|
|
24
25
|
Description-Content-Type: text/markdown
|
|
25
26
|
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
divbase_cli/__init__.py,sha256=jQHG8OW4TlfIzPKW4IrX9q58EfEf9pDfm2YgO0ydKaA,27
|
|
2
|
+
divbase_cli/cli_config.py,sha256=0h40kFmWK03-Gi7qU9S_GtpGT5XpsZVHSxLH127Elkk,1810
|
|
3
|
+
divbase_cli/cli_exceptions.py,sha256=MeSignmkzTidzlhRDdbUBHkbxD6qOWUYtTRUOfDVz0w,6501
|
|
4
|
+
divbase_cli/config_resolver.py,sha256=olnt5U4HtlYKcTS4zGDV1KyIgLcrOxyY4q87T7lx65U,3004
|
|
5
|
+
divbase_cli/display_task_history.py,sha256=k9V4YeWsqv6HTZn2Ns6sl4vngiWANFiPPj7E5kE0_K4,7628
|
|
6
|
+
divbase_cli/divbase_cli.py,sha256=JrUvPB925gJHroNXez4uSNehKHKfDCGMLzpyipHkGKY,2358
|
|
7
|
+
divbase_cli/retries.py,sha256=UFv4CHudC1Sb0kRe2HuuCidoCNA9TSXB-mOdrt4uDew,1298
|
|
8
|
+
divbase_cli/user_auth.py,sha256=0CbwED1xL7TgVdA58_x6Vq4Tpvaq8PwE3Pw5LwrVy6I,10308
|
|
9
|
+
divbase_cli/user_config.py,sha256=oROFuNZN5MoC01070ufZbVh8lwavFN2bTj6u4oy7MYE,6350
|
|
10
|
+
divbase_cli/utils.py,sha256=H31UK32jeg21wJlKpFsPry6D1XJlpzOuDgOZIDanKsI,1512
|
|
11
|
+
divbase_cli/cli_commands/__init__.py,sha256=K_2r8V1QGpEmxDcD2QOyWlXR4HPoc16yytmZwGkIyLw,166
|
|
12
|
+
divbase_cli/cli_commands/auth_cli.py,sha256=IbuQGFvljvn8uezGFu8q0H9v62IKKzFsdpTwmehoriU,2871
|
|
13
|
+
divbase_cli/cli_commands/dimensions_cli.py,sha256=zSWTupp0fx-AGSobKlkSpNNGlv-o62EAe8yarVbmF0k,4875
|
|
14
|
+
divbase_cli/cli_commands/file_cli.py,sha256=6DIPilDtEQBYIoVy6wkjWOgSDh12cuqHqL5duHtVB7U,18772
|
|
15
|
+
divbase_cli/cli_commands/query_cli.py,sha256=Hty7BzrBTV0e8P0OCenlxJm-S7EO9LOscjFhOETjPvw,5551
|
|
16
|
+
divbase_cli/cli_commands/shared_args_options.py,sha256=NNFUp0wRPMUpdtmvgmF_e3loKWbUlSL5uuPCSpJMylk,548
|
|
17
|
+
divbase_cli/cli_commands/task_history_cli.py,sha256=fBKyPzzsOBDf9DC-Uice00wy_q-QqMid0pBXcErrO7A,4453
|
|
18
|
+
divbase_cli/cli_commands/user_config_cli.py,sha256=WB1iPPuecEdYmUBxnMQYTT1E3LC5FzU62LF4WUp-lJE,5396
|
|
19
|
+
divbase_cli/cli_commands/version_cli.py,sha256=UKT-t2MvFwFNRPgSldxTsFvwPiS8EM5vSerXRCmibsw,6073
|
|
20
|
+
divbase_cli/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
+
divbase_cli/services/pre_signed_urls.py,sha256=7evDg2aeWN609VJnlcGCqZdk7M2yg_6VKUsCYIAt0ME,16479
|
|
22
|
+
divbase_cli/services/project_versions.py,sha256=RzuexQ7UmcoeZTnV7LSU9AehHnDpVVqxKJ5G8t_njBM,2865
|
|
23
|
+
divbase_cli/services/s3_files.py,sha256=lURk4m7MyPBzDz0puQO72i8djC25WmIWzVf1oY9nJwA,14470
|
|
24
|
+
divbase_cli-0.1.0.dev3.dist-info/METADATA,sha256=whCWvjLzGCtDuTPOqE2pIsMyo9LCXPGcEWT8LZO8VYk,1421
|
|
25
|
+
divbase_cli-0.1.0.dev3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
26
|
+
divbase_cli-0.1.0.dev3.dist-info/entry_points.txt,sha256=vaRJvvGmfesTaMMCCy3kcBhYzf51wZoEPuLqas5LDMg,100
|
|
27
|
+
divbase_cli-0.1.0.dev3.dist-info/RECORD,,
|