divbase-cli 0.1.0.dev2__py3-none-any.whl → 0.1.0.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- divbase_cli/__init__.py +1 -1
- divbase_cli/cli_commands/auth_cli.py +4 -9
- divbase_cli/cli_commands/dimensions_cli.py +4 -8
- divbase_cli/cli_commands/file_cli.py +284 -70
- divbase_cli/cli_commands/query_cli.py +3 -7
- divbase_cli/cli_commands/shared_args_options.py +20 -0
- divbase_cli/cli_commands/task_history_cli.py +3 -8
- divbase_cli/cli_commands/user_config_cli.py +14 -44
- divbase_cli/cli_commands/version_cli.py +16 -24
- divbase_cli/cli_config.py +18 -7
- divbase_cli/cli_exceptions.py +37 -22
- divbase_cli/config_resolver.py +10 -10
- divbase_cli/divbase_cli.py +1 -1
- divbase_cli/retries.py +34 -0
- divbase_cli/services/__init__.py +0 -0
- divbase_cli/services/pre_signed_urls.py +446 -0
- divbase_cli/services/project_versions.py +77 -0
- divbase_cli/services/s3_files.py +355 -0
- divbase_cli/user_auth.py +26 -13
- divbase_cli/user_config.py +20 -9
- divbase_cli/utils.py +47 -0
- {divbase_cli-0.1.0.dev2.dist-info → divbase_cli-0.1.0.dev3.dist-info}/METADATA +4 -3
- divbase_cli-0.1.0.dev3.dist-info/RECORD +27 -0
- divbase_cli/pre_signed_urls.py +0 -169
- divbase_cli/services.py +0 -219
- divbase_cli-0.1.0.dev2.dist-info/RECORD +0 -22
- {divbase_cli-0.1.0.dev2.dist-info → divbase_cli-0.1.0.dev3.dist-info}/WHEEL +0 -0
- {divbase_cli-0.1.0.dev2.dist-info → divbase_cli-0.1.0.dev3.dist-info}/entry_points.txt +0 -0
divbase_cli/pre_signed_urls.py
DELETED
|
@@ -1,169 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Module responsible for taking pre-signed urls and using them to do file download and upload.
|
|
3
|
-
|
|
4
|
-
TODO: Consider adding retries, error handling, progress bars, etc.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
import logging
|
|
8
|
-
from dataclasses import dataclass
|
|
9
|
-
from pathlib import Path
|
|
10
|
-
|
|
11
|
-
import httpx
|
|
12
|
-
|
|
13
|
-
from divbase_lib.api_schemas.s3 import PreSignedDownloadResponse, PreSignedUploadResponse
|
|
14
|
-
from divbase_lib.exceptions import ChecksumVerificationError
|
|
15
|
-
from divbase_lib.s3_checksums import verify_downloaded_checksum
|
|
16
|
-
|
|
17
|
-
logger = logging.getLogger(__name__)
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
@dataclass
|
|
21
|
-
class SuccessfulDownload:
|
|
22
|
-
"""Represents a successfully downloaded file."""
|
|
23
|
-
|
|
24
|
-
file_path: Path
|
|
25
|
-
object_name: str
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
@dataclass
|
|
29
|
-
class FailedDownload:
|
|
30
|
-
"""Represents a failed download attempt."""
|
|
31
|
-
|
|
32
|
-
object_name: str
|
|
33
|
-
file_path: Path
|
|
34
|
-
exception: Exception
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@dataclass
|
|
38
|
-
class DownloadOutcome:
|
|
39
|
-
"""Outcome of attempting to download multiple files."""
|
|
40
|
-
|
|
41
|
-
successful: list[SuccessfulDownload]
|
|
42
|
-
failed: list[FailedDownload]
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def download_multiple_pre_signed_urls(
|
|
46
|
-
pre_signed_urls: list[PreSignedDownloadResponse], verify_checksums: bool, download_dir: Path
|
|
47
|
-
) -> DownloadOutcome:
|
|
48
|
-
"""
|
|
49
|
-
Download files using pre-signed URLs.
|
|
50
|
-
Returns a DownloadResults object containing all successful and failed downloads.
|
|
51
|
-
"""
|
|
52
|
-
successful_downloads, failed_downloads = [], []
|
|
53
|
-
with httpx.Client(timeout=30.0) as client:
|
|
54
|
-
for obj in pre_signed_urls:
|
|
55
|
-
result = _download_single_pre_signed_url(
|
|
56
|
-
httpx_client=client,
|
|
57
|
-
pre_signed_url=obj.pre_signed_url,
|
|
58
|
-
verify_checksums=verify_checksums,
|
|
59
|
-
output_file_path=download_dir / obj.name,
|
|
60
|
-
object_name=obj.name,
|
|
61
|
-
)
|
|
62
|
-
if isinstance(result, SuccessfulDownload):
|
|
63
|
-
successful_downloads.append(result)
|
|
64
|
-
else:
|
|
65
|
-
failed_downloads.append(result)
|
|
66
|
-
|
|
67
|
-
return DownloadOutcome(successful=successful_downloads, failed=failed_downloads)
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
def _download_single_pre_signed_url(
|
|
71
|
-
httpx_client: httpx.Client, pre_signed_url: str, verify_checksums: bool, output_file_path: Path, object_name: str
|
|
72
|
-
) -> SuccessfulDownload | FailedDownload:
|
|
73
|
-
"""
|
|
74
|
-
Download a single file using a pre-signed URL.
|
|
75
|
-
Helper function, do not call directly from outside this module.
|
|
76
|
-
"""
|
|
77
|
-
with httpx_client.stream("GET", pre_signed_url) as response:
|
|
78
|
-
try:
|
|
79
|
-
response.raise_for_status()
|
|
80
|
-
except httpx.HTTPError as err:
|
|
81
|
-
return FailedDownload(object_name=object_name, file_path=output_file_path, exception=err)
|
|
82
|
-
|
|
83
|
-
server_checksum = response.headers.get("ETag", "").strip('"')
|
|
84
|
-
|
|
85
|
-
with open(output_file_path, "wb") as file:
|
|
86
|
-
for chunk in response.iter_bytes(chunk_size=8192):
|
|
87
|
-
file.write(chunk)
|
|
88
|
-
|
|
89
|
-
if verify_checksums:
|
|
90
|
-
try:
|
|
91
|
-
verify_downloaded_checksum(file_path=output_file_path, expected_checksum=server_checksum)
|
|
92
|
-
except ChecksumVerificationError as err:
|
|
93
|
-
return FailedDownload(object_name=object_name, file_path=output_file_path, exception=err)
|
|
94
|
-
|
|
95
|
-
return SuccessfulDownload(file_path=output_file_path, object_name=object_name)
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
@dataclass
|
|
99
|
-
class SuccessfulUpload:
|
|
100
|
-
"""Represents a successfully uploaded file."""
|
|
101
|
-
|
|
102
|
-
file_path: Path
|
|
103
|
-
object_name: str
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
@dataclass
|
|
107
|
-
class FailedUpload:
|
|
108
|
-
"""Represents a failed upload attempt."""
|
|
109
|
-
|
|
110
|
-
object_name: str
|
|
111
|
-
file_path: Path
|
|
112
|
-
exception: Exception
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
@dataclass
|
|
116
|
-
class UploadOutcome:
|
|
117
|
-
"""Outcome of attempting to upload multiple files."""
|
|
118
|
-
|
|
119
|
-
successful: list[SuccessfulUpload]
|
|
120
|
-
failed: list[FailedUpload]
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
def upload_multiple_pre_signed_urls(
|
|
124
|
-
pre_signed_urls: list[PreSignedUploadResponse], all_files: list[Path]
|
|
125
|
-
) -> UploadOutcome:
|
|
126
|
-
"""
|
|
127
|
-
Upload files using pre-signed PUT URLs.
|
|
128
|
-
Returns a UploadResults object containing the results of the upload attempts.
|
|
129
|
-
"""
|
|
130
|
-
file_map = {file.name: file for file in all_files}
|
|
131
|
-
|
|
132
|
-
successful_uploads, failed_uploads = [], []
|
|
133
|
-
with httpx.Client(timeout=30.0) as client:
|
|
134
|
-
for obj in pre_signed_urls:
|
|
135
|
-
result = _upload_single_pre_signed_url(
|
|
136
|
-
httpx_client=client,
|
|
137
|
-
pre_signed_url=obj.pre_signed_url,
|
|
138
|
-
file_path=file_map[obj.name],
|
|
139
|
-
object_name=obj.name,
|
|
140
|
-
headers=obj.put_headers,
|
|
141
|
-
)
|
|
142
|
-
|
|
143
|
-
if isinstance(result, SuccessfulUpload):
|
|
144
|
-
successful_uploads.append(result)
|
|
145
|
-
else:
|
|
146
|
-
failed_uploads.append(result)
|
|
147
|
-
|
|
148
|
-
return UploadOutcome(successful=successful_uploads, failed=failed_uploads)
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
def _upload_single_pre_signed_url(
|
|
152
|
-
httpx_client: httpx.Client,
|
|
153
|
-
pre_signed_url: str,
|
|
154
|
-
file_path: Path,
|
|
155
|
-
object_name: str,
|
|
156
|
-
headers: dict[str, str],
|
|
157
|
-
) -> SuccessfulUpload | FailedUpload:
|
|
158
|
-
"""
|
|
159
|
-
Upload a single file using a pre-signed PUT URL.
|
|
160
|
-
Helper function, do not call directly from outside this module.
|
|
161
|
-
"""
|
|
162
|
-
with open(file_path, "rb") as file:
|
|
163
|
-
try:
|
|
164
|
-
response = httpx_client.put(pre_signed_url, content=file, headers=headers)
|
|
165
|
-
response.raise_for_status()
|
|
166
|
-
except httpx.HTTPError as err:
|
|
167
|
-
return FailedUpload(object_name=object_name, file_path=file_path, exception=err)
|
|
168
|
-
|
|
169
|
-
return SuccessfulUpload(file_path=file_path, object_name=object_name)
|
divbase_cli/services.py
DELETED
|
@@ -1,219 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Service layer for DivBase CLI project version and S3 file operations.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
|
|
7
|
-
from divbase_cli.cli_exceptions import (
|
|
8
|
-
FileDoesNotExistInSpecifiedVersionError,
|
|
9
|
-
FilesAlreadyInProjectError,
|
|
10
|
-
)
|
|
11
|
-
from divbase_cli.pre_signed_urls import (
|
|
12
|
-
DownloadOutcome,
|
|
13
|
-
UploadOutcome,
|
|
14
|
-
download_multiple_pre_signed_urls,
|
|
15
|
-
upload_multiple_pre_signed_urls,
|
|
16
|
-
)
|
|
17
|
-
from divbase_cli.user_auth import make_authenticated_request
|
|
18
|
-
from divbase_lib.api_schemas.project_versions import (
|
|
19
|
-
AddVersionRequest,
|
|
20
|
-
AddVersionResponse,
|
|
21
|
-
DeleteVersionRequest,
|
|
22
|
-
DeleteVersionResponse,
|
|
23
|
-
ProjectVersionDetailResponse,
|
|
24
|
-
ProjectVersionInfo,
|
|
25
|
-
)
|
|
26
|
-
from divbase_lib.api_schemas.s3 import ExistingFileResponse, PreSignedDownloadResponse, PreSignedUploadResponse
|
|
27
|
-
from divbase_lib.s3_checksums import MD5CheckSumFormat, calculate_md5_checksum, convert_checksum_hex_to_base64
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def add_version_command(project_name: str, divbase_base_url: str, name: str, description: str) -> AddVersionResponse:
|
|
31
|
-
"""Add a new version to the project versions table stored on the divbase server"""
|
|
32
|
-
request_data = AddVersionRequest(name=name, description=description)
|
|
33
|
-
|
|
34
|
-
response = make_authenticated_request(
|
|
35
|
-
method="PATCH",
|
|
36
|
-
divbase_base_url=divbase_base_url,
|
|
37
|
-
api_route=f"v1/project-versions/add?project_name={project_name}",
|
|
38
|
-
json=request_data.model_dump(),
|
|
39
|
-
)
|
|
40
|
-
|
|
41
|
-
return AddVersionResponse(**response.json())
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def list_versions_command(project_name: str, include_deleted: bool, divbase_base_url: str) -> list[ProjectVersionInfo]:
|
|
45
|
-
"""
|
|
46
|
-
List all versions in the project versions table stored on the divbase server.
|
|
47
|
-
Returns a dict of version names (keys) to details about the versions.
|
|
48
|
-
"""
|
|
49
|
-
response = make_authenticated_request(
|
|
50
|
-
method="GET",
|
|
51
|
-
divbase_base_url=divbase_base_url,
|
|
52
|
-
api_route=f"v1/project-versions/list?project_name={project_name}&include_deleted={str(include_deleted).lower()}",
|
|
53
|
-
)
|
|
54
|
-
|
|
55
|
-
project_versions = []
|
|
56
|
-
response_data = response.json()
|
|
57
|
-
for version in response_data:
|
|
58
|
-
project_versions.append(ProjectVersionInfo(**version))
|
|
59
|
-
|
|
60
|
-
return project_versions
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def get_version_details_command(
|
|
64
|
-
project_name: str, divbase_base_url: str, version_name: str
|
|
65
|
-
) -> ProjectVersionDetailResponse:
|
|
66
|
-
"""Get details about a specific project version, including all files and their version IDs at that version."""
|
|
67
|
-
response = make_authenticated_request(
|
|
68
|
-
method="GET",
|
|
69
|
-
divbase_base_url=divbase_base_url,
|
|
70
|
-
api_route=f"v1/project-versions/version_details?project_name={project_name}&version_name={version_name}",
|
|
71
|
-
)
|
|
72
|
-
|
|
73
|
-
return ProjectVersionDetailResponse(**response.json())
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
def delete_version_command(project_name: str, divbase_base_url: str, version_name: str) -> DeleteVersionResponse:
|
|
77
|
-
"""
|
|
78
|
-
Delete a version from the project versions table stored on the divbase server.
|
|
79
|
-
This marks the version as (soft) deleted server side,
|
|
80
|
-
and it will eventually be permanently deleted (after some grace period).
|
|
81
|
-
"""
|
|
82
|
-
request_data = DeleteVersionRequest(version_name=version_name)
|
|
83
|
-
|
|
84
|
-
response = make_authenticated_request(
|
|
85
|
-
method="DELETE",
|
|
86
|
-
divbase_base_url=divbase_base_url,
|
|
87
|
-
api_route=f"v1/project-versions/delete?project_name={project_name}",
|
|
88
|
-
json=request_data.model_dump(),
|
|
89
|
-
)
|
|
90
|
-
|
|
91
|
-
return DeleteVersionResponse(**response.json())
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
def list_files_command(divbase_base_url: str, project_name: str) -> list[str]:
|
|
95
|
-
"""List all files in a project."""
|
|
96
|
-
response = make_authenticated_request(
|
|
97
|
-
method="GET",
|
|
98
|
-
divbase_base_url=divbase_base_url,
|
|
99
|
-
api_route=f"v1/s3/?project_name={project_name}",
|
|
100
|
-
)
|
|
101
|
-
|
|
102
|
-
return response.json()
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def download_files_command(
|
|
106
|
-
divbase_base_url: str,
|
|
107
|
-
project_name: str,
|
|
108
|
-
all_files: list[str],
|
|
109
|
-
download_dir: Path,
|
|
110
|
-
verify_checksums: bool,
|
|
111
|
-
project_version: str | None = None,
|
|
112
|
-
) -> DownloadOutcome:
|
|
113
|
-
"""
|
|
114
|
-
Download files from the given project's S3 bucket.
|
|
115
|
-
"""
|
|
116
|
-
if not download_dir.is_dir():
|
|
117
|
-
raise NotADirectoryError(
|
|
118
|
-
f"The specified download directory '{download_dir}' is not a directory. Please create it or specify a valid directory before continuing."
|
|
119
|
-
)
|
|
120
|
-
|
|
121
|
-
if project_version:
|
|
122
|
-
project_version_details = get_version_details_command(
|
|
123
|
-
project_name=project_name, divbase_base_url=divbase_base_url, version_name=project_version
|
|
124
|
-
)
|
|
125
|
-
|
|
126
|
-
# check if all files specified exist for download exist at this project version
|
|
127
|
-
missing_objects = [f for f in all_files if f not in project_version_details.files]
|
|
128
|
-
if missing_objects:
|
|
129
|
-
raise FileDoesNotExistInSpecifiedVersionError(
|
|
130
|
-
project_name=project_name,
|
|
131
|
-
project_version=project_version,
|
|
132
|
-
missing_files=missing_objects,
|
|
133
|
-
)
|
|
134
|
-
to_download = {file: project_version_details.files[file] for file in all_files}
|
|
135
|
-
json_data = [{"name": obj, "version_id": to_download[obj]} for obj in all_files]
|
|
136
|
-
else:
|
|
137
|
-
json_data = [{"name": obj, "version_id": None} for obj in all_files]
|
|
138
|
-
|
|
139
|
-
response = make_authenticated_request(
|
|
140
|
-
method="POST",
|
|
141
|
-
divbase_base_url=divbase_base_url,
|
|
142
|
-
api_route=f"v1/s3/download?project_name={project_name}",
|
|
143
|
-
json=json_data,
|
|
144
|
-
)
|
|
145
|
-
pre_signed_urls = [PreSignedDownloadResponse(**item) for item in response.json()]
|
|
146
|
-
|
|
147
|
-
download_results = download_multiple_pre_signed_urls(
|
|
148
|
-
pre_signed_urls=pre_signed_urls, download_dir=download_dir, verify_checksums=verify_checksums
|
|
149
|
-
)
|
|
150
|
-
return download_results
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
def upload_files_command(
|
|
154
|
-
project_name: str, divbase_base_url: str, all_files: list[Path], safe_mode: bool
|
|
155
|
-
) -> UploadOutcome:
|
|
156
|
-
"""
|
|
157
|
-
Upload files to the project's S3 bucket.
|
|
158
|
-
Returns an UploadOutcome object containing details of which files were successfully uploaded and which failed.
|
|
159
|
-
|
|
160
|
-
- Safe mode:
|
|
161
|
-
1. checks if any of the files that are to be uploaded already exist in the bucket (by comparing checksums)
|
|
162
|
-
2. Adds checksum to upload request to allow server to verify upload.
|
|
163
|
-
"""
|
|
164
|
-
file_checksums_hex = {}
|
|
165
|
-
if safe_mode:
|
|
166
|
-
for file in all_files:
|
|
167
|
-
file_checksums_hex[file.name] = calculate_md5_checksum(file_path=file, output_format=MD5CheckSumFormat.HEX)
|
|
168
|
-
|
|
169
|
-
files_to_check = []
|
|
170
|
-
for file in all_files:
|
|
171
|
-
files_to_check.append({"object_name": file.name, "md5_checksum": file_checksums_hex[file.name]})
|
|
172
|
-
|
|
173
|
-
response = make_authenticated_request(
|
|
174
|
-
method="POST",
|
|
175
|
-
divbase_base_url=divbase_base_url,
|
|
176
|
-
api_route=f"v1/s3/check-exists?project_name={project_name}",
|
|
177
|
-
json=files_to_check,
|
|
178
|
-
)
|
|
179
|
-
existing_files = response.json()
|
|
180
|
-
|
|
181
|
-
if existing_files:
|
|
182
|
-
existing_object_names = [ExistingFileResponse(**file) for file in existing_files]
|
|
183
|
-
raise FilesAlreadyInProjectError(existing_files=existing_object_names, project_name=project_name)
|
|
184
|
-
|
|
185
|
-
objects_to_upload = []
|
|
186
|
-
for file in all_files:
|
|
187
|
-
upload_object = {
|
|
188
|
-
"name": file.name,
|
|
189
|
-
"content_length": file.stat().st_size,
|
|
190
|
-
}
|
|
191
|
-
if safe_mode:
|
|
192
|
-
hex_checksum = file_checksums_hex[file.name]
|
|
193
|
-
base64_checksum = convert_checksum_hex_to_base64(hex_checksum)
|
|
194
|
-
upload_object["md5_hash"] = base64_checksum
|
|
195
|
-
|
|
196
|
-
objects_to_upload.append(upload_object)
|
|
197
|
-
|
|
198
|
-
response = make_authenticated_request(
|
|
199
|
-
method="POST",
|
|
200
|
-
divbase_base_url=divbase_base_url,
|
|
201
|
-
api_route=f"v1/s3/upload?project_name={project_name}",
|
|
202
|
-
json=objects_to_upload,
|
|
203
|
-
)
|
|
204
|
-
pre_signed_urls = [PreSignedUploadResponse(**item) for item in response.json()]
|
|
205
|
-
return upload_multiple_pre_signed_urls(pre_signed_urls=pre_signed_urls, all_files=all_files)
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
def soft_delete_objects_command(divbase_base_url: str, project_name: str, all_files: list[str]) -> list[str]:
|
|
209
|
-
"""
|
|
210
|
-
Soft delete objects from the project's bucket.
|
|
211
|
-
Returns a list of the soft deleted objects
|
|
212
|
-
"""
|
|
213
|
-
response = make_authenticated_request(
|
|
214
|
-
method="DELETE",
|
|
215
|
-
divbase_base_url=divbase_base_url,
|
|
216
|
-
api_route=f"v1/s3/?project_name={project_name}",
|
|
217
|
-
json=all_files,
|
|
218
|
-
)
|
|
219
|
-
return response.json()
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
divbase_cli/__init__.py,sha256=njbWM3TEZYvcuYvwka0xuGnHkcz7YX1ZvGC9gdbzzkI,27
|
|
2
|
-
divbase_cli/cli_config.py,sha256=zMAFg-GWdrBIgJicVYDms-UhipPIqQO2z0Q2-GNmwc4,1541
|
|
3
|
-
divbase_cli/cli_exceptions.py,sha256=lzKVKhkWEtK-GPFwM5ADvrtkRPwAVmaOeY5IvBZgiLo,5555
|
|
4
|
-
divbase_cli/config_resolver.py,sha256=Yd4JCa1Hul4oJFTBeSRdaPJzmYZ8-7iu5v7Q8Wdl6Kg,3159
|
|
5
|
-
divbase_cli/display_task_history.py,sha256=k9V4YeWsqv6HTZn2Ns6sl4vngiWANFiPPj7E5kE0_K4,7628
|
|
6
|
-
divbase_cli/divbase_cli.py,sha256=16Qa0p8NB_vQVO3yl9PQQ7BG85zHgzSZ_hnzRK-nrNo,2358
|
|
7
|
-
divbase_cli/pre_signed_urls.py,sha256=ypQixtG03xEhMa3R_RpVUQ4thUaAJ1sIqRUy9HO35pc,5312
|
|
8
|
-
divbase_cli/services.py,sha256=CTkadqDqg5XbEJJRdfG5QPIQehK3iWIrFje03oRYNJE,8286
|
|
9
|
-
divbase_cli/user_auth.py,sha256=N3niQaU-4xIMqIHAwovp0h4wKHkjebU_u3q4KODBJfM,9603
|
|
10
|
-
divbase_cli/user_config.py,sha256=tXsRXuJYkSwTvY89dzC3JF4Aw-6kp87rPrWYdmbsRYk,5884
|
|
11
|
-
divbase_cli/cli_commands/__init__.py,sha256=K_2r8V1QGpEmxDcD2QOyWlXR4HPoc16yytmZwGkIyLw,166
|
|
12
|
-
divbase_cli/cli_commands/auth_cli.py,sha256=SQUAU-34HYcCA7Qenbrtq454GoaQM3SMxXXCx0uMctk,3104
|
|
13
|
-
divbase_cli/cli_commands/dimensions_cli.py,sha256=JPeQrFY6eBXmIXu5DsmOKI27m40KytRbS-kZAXd7dB0,5104
|
|
14
|
-
divbase_cli/cli_commands/file_cli.py,sha256=W4SsEQE87gD1UqYMqI44B3iBgmuTD4Xx72xeWtDhxOY,10038
|
|
15
|
-
divbase_cli/cli_commands/query_cli.py,sha256=-ncdyFkHTSblToWLlKjh6GEdBfo57sZcJCZLvHLSG0w,5778
|
|
16
|
-
divbase_cli/cli_commands/task_history_cli.py,sha256=2A46RCST8iKWNaOjubHo7HxYcExxXkWk_fdG40sEC4U,4715
|
|
17
|
-
divbase_cli/cli_commands/user_config_cli.py,sha256=eRw5bHDBuSEEjQQPZhI3SseplK4elvAvrtkIEMHdnIY,6463
|
|
18
|
-
divbase_cli/cli_commands/version_cli.py,sha256=GnHsgLWGi7xOraFatAUnVBgUWdeYyQZ2C6qjpO9WTJ0,6469
|
|
19
|
-
divbase_cli-0.1.0.dev2.dist-info/METADATA,sha256=MsUISyphFyqD0SGSdO_Q3UDY5FhDlG-wLYnLcPJtEmo,1390
|
|
20
|
-
divbase_cli-0.1.0.dev2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
21
|
-
divbase_cli-0.1.0.dev2.dist-info/entry_points.txt,sha256=vaRJvvGmfesTaMMCCy3kcBhYzf51wZoEPuLqas5LDMg,100
|
|
22
|
-
divbase_cli-0.1.0.dev2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|