kleinkram 0.13.1__tar.gz → 0.13.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kleinkram might be problematic. Click here for more details.
- {kleinkram-0.13.1 → kleinkram-0.13.2}/PKG-INFO +1 -1
- {kleinkram-0.13.1 → kleinkram-0.13.2}/pyproject.toml +1 -1
- {kleinkram-0.13.1 → kleinkram-0.13.2}/requirements.txt +3 -1
- kleinkram-0.13.2/src/kleinkram/helper.py +169 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/main.py +10 -8
- kleinkram-0.13.1/src/kleinkram/helper.py +0 -74
- {kleinkram-0.13.1 → kleinkram-0.13.2}/.gitignore +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/LICENSE +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/README.md +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/deploy.sh +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/dev.sh +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/klein.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/__init__.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/api_client.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/auth/auth.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/consts.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/endpoint/endpoint.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/error_handling.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/file/file.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/mission/mission.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/project/project.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/queue/queue.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/tag/tag.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/topic/topic.py +0 -0
- {kleinkram-0.13.1 → kleinkram-0.13.2}/src/kleinkram/user/user.py +0 -0
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
import glob
|
|
2
|
+
import os
|
|
3
|
+
import queue
|
|
4
|
+
import threading
|
|
5
|
+
from functools import partial
|
|
6
|
+
from typing import Dict
|
|
7
|
+
import boto3
|
|
8
|
+
|
|
9
|
+
import tqdm
|
|
10
|
+
from boto3.s3.transfer import TransferConfig
|
|
11
|
+
from botocore.client import BaseClient
|
|
12
|
+
from rich import print
|
|
13
|
+
|
|
14
|
+
from kleinkram.api_client import AuthenticatedClient
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class TransferCallback:
|
|
18
|
+
"""
|
|
19
|
+
Handle callbacks from the transfer manager.
|
|
20
|
+
|
|
21
|
+
The transfer manager periodically calls the __call__ method throughout
|
|
22
|
+
the upload process so that it can take action, such as displaying progress
|
|
23
|
+
to the user and collecting data about the transfer.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self):
|
|
27
|
+
"""
|
|
28
|
+
Initialize the TransferCallback.
|
|
29
|
+
|
|
30
|
+
This initializes an empty dictionary to hold progress bars for each file.
|
|
31
|
+
"""
|
|
32
|
+
self._lock = threading.Lock()
|
|
33
|
+
self.file_progress = {}
|
|
34
|
+
|
|
35
|
+
def add_file(self, file_id, target_size):
|
|
36
|
+
"""
|
|
37
|
+
Add a new file to track.
|
|
38
|
+
|
|
39
|
+
:param file_id: A unique identifier for the file (e.g., file name or ID).
|
|
40
|
+
:param target_size: The total size of the file being transferred.
|
|
41
|
+
"""
|
|
42
|
+
with self._lock:
|
|
43
|
+
tqdm_instance = tqdm.tqdm(
|
|
44
|
+
total=target_size,
|
|
45
|
+
unit="B",
|
|
46
|
+
unit_scale=True,
|
|
47
|
+
desc=f"Uploading {file_id}",
|
|
48
|
+
)
|
|
49
|
+
self.file_progress[file_id] = {
|
|
50
|
+
"tqdm": tqdm_instance,
|
|
51
|
+
"total_transferred": 0,
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
def __call__(self, file_id, bytes_transferred):
|
|
55
|
+
"""
|
|
56
|
+
The callback method that is called by the transfer manager.
|
|
57
|
+
|
|
58
|
+
Display progress during file transfer and collect per-thread transfer
|
|
59
|
+
data. This method can be called by multiple threads, so shared instance
|
|
60
|
+
data is protected by a thread lock.
|
|
61
|
+
|
|
62
|
+
:param file_id: The identifier of the file being transferred.
|
|
63
|
+
:param bytes_transferred: The number of bytes transferred in this call.
|
|
64
|
+
"""
|
|
65
|
+
with self._lock:
|
|
66
|
+
if file_id in self.file_progress:
|
|
67
|
+
progress = self.file_progress[file_id]
|
|
68
|
+
progress["total_transferred"] += bytes_transferred
|
|
69
|
+
|
|
70
|
+
# Update tqdm progress bar
|
|
71
|
+
progress["tqdm"].update(bytes_transferred)
|
|
72
|
+
|
|
73
|
+
def close(self):
|
|
74
|
+
"""Close all tqdm progress bars."""
|
|
75
|
+
with self._lock:
|
|
76
|
+
for progress in self.file_progress.values():
|
|
77
|
+
progress["tqdm"].close()
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def create_transfer_callback(callback_instance, file_id):
|
|
81
|
+
"""
|
|
82
|
+
Factory function to create a partial function for TransferCallback.
|
|
83
|
+
:param callback_instance: Instance of TransferCallback.
|
|
84
|
+
:param file_id: The unique identifier for the file.
|
|
85
|
+
:return: A callable that can be passed as a callback to boto3's upload_file method.
|
|
86
|
+
"""
|
|
87
|
+
return partial(callback_instance.__call__, file_id)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def expand_and_match(path_pattern):
|
|
91
|
+
expanded_path = os.path.expanduser(path_pattern)
|
|
92
|
+
expanded_path = os.path.expandvars(expanded_path)
|
|
93
|
+
|
|
94
|
+
normalized_path = os.path.normpath(expanded_path)
|
|
95
|
+
|
|
96
|
+
if "**" in normalized_path:
|
|
97
|
+
file_list = glob.glob(normalized_path, recursive=True)
|
|
98
|
+
else:
|
|
99
|
+
file_list = glob.glob(normalized_path)
|
|
100
|
+
|
|
101
|
+
return file_list
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def uploadFiles(files: Dict[str, str], credentials: Dict[str, str], nrThreads: int):
|
|
105
|
+
client = AuthenticatedClient()
|
|
106
|
+
|
|
107
|
+
session = boto3.Session(
|
|
108
|
+
aws_access_key_id=credentials["accessKey"],
|
|
109
|
+
aws_secret_access_key=credentials["secretKey"],
|
|
110
|
+
aws_session_token=credentials["sessionToken"],
|
|
111
|
+
)
|
|
112
|
+
api_endpoint = client.tokenfile.endpoint
|
|
113
|
+
if api_endpoint == "http://localhost:3000":
|
|
114
|
+
minio_endpoint = "http://localhost:9000"
|
|
115
|
+
else:
|
|
116
|
+
minio_endpoint = api_endpoint.replace("api", "minio")
|
|
117
|
+
s3 = session.resource("s3", endpoint_url=minio_endpoint)
|
|
118
|
+
|
|
119
|
+
_queue = queue.Queue()
|
|
120
|
+
for file in files.items():
|
|
121
|
+
_queue.put(file)
|
|
122
|
+
threads = []
|
|
123
|
+
transferCallback = TransferCallback()
|
|
124
|
+
|
|
125
|
+
for i in range(nrThreads):
|
|
126
|
+
thread = threading.Thread(
|
|
127
|
+
target=uploadFile, args=(_queue, s3, transferCallback)
|
|
128
|
+
)
|
|
129
|
+
thread.start()
|
|
130
|
+
threads.append(thread)
|
|
131
|
+
for thread in threads:
|
|
132
|
+
thread.join()
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def uploadFile(_queue: queue.Queue, s3: BaseClient, transferCallback: TransferCallback):
|
|
136
|
+
while True:
|
|
137
|
+
try:
|
|
138
|
+
filename, _file = _queue.get(timeout=3)
|
|
139
|
+
queueUUID = _file["queueUUID"]
|
|
140
|
+
filepath = _file["filepath"]
|
|
141
|
+
bucket = _file["bucket"]
|
|
142
|
+
target_location = _file["location"]
|
|
143
|
+
config = TransferConfig(
|
|
144
|
+
multipart_chunksize=10 * 1024 * 1024, max_concurrency=5
|
|
145
|
+
)
|
|
146
|
+
with open(filepath, "rb") as f:
|
|
147
|
+
size = os.path.getsize(filepath)
|
|
148
|
+
transferCallback.add_file(filename, size)
|
|
149
|
+
callback_function = create_transfer_callback(transferCallback, filename)
|
|
150
|
+
s3.Bucket(bucket).upload_file(
|
|
151
|
+
filepath, target_location, Config=config, Callback=callback_function
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
client = AuthenticatedClient()
|
|
155
|
+
res = client.post("/queue/confirmUpload", json={"uuid": queueUUID})
|
|
156
|
+
res.raise_for_status()
|
|
157
|
+
_queue.task_done()
|
|
158
|
+
except queue.Empty:
|
|
159
|
+
break
|
|
160
|
+
except Exception as e:
|
|
161
|
+
print(f"Error uploading {filename}: {e}")
|
|
162
|
+
_queue.task_done()
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
if __name__ == "__main__":
|
|
166
|
+
res = expand_and_match(
|
|
167
|
+
"~/Downloads/dodo_mission_2024_02_08-20240408T074313Z-003/**.bag"
|
|
168
|
+
)
|
|
169
|
+
print(res)
|
|
@@ -179,23 +179,25 @@ def upload(
|
|
|
179
179
|
)
|
|
180
180
|
new_mission.raise_for_status()
|
|
181
181
|
new_mission_data = new_mission.json()
|
|
182
|
-
print(f"Created mission: {new_mission_data['name']}")
|
|
183
182
|
|
|
184
|
-
|
|
183
|
+
get_temporary_credentials = "/file/temporaryAccess"
|
|
185
184
|
|
|
186
185
|
response_2 = client.post(
|
|
187
|
-
|
|
186
|
+
get_temporary_credentials,
|
|
188
187
|
json={"filenames": filenames, "missionUUID": new_mission_data["uuid"]},
|
|
189
188
|
)
|
|
190
189
|
response_2.raise_for_status()
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
190
|
+
temp_credentials = response_2.json()
|
|
191
|
+
credential = temp_credentials["credentials"]
|
|
192
|
+
confirmed_files = temp_credentials["files"]
|
|
193
|
+
for _file in filenames:
|
|
194
|
+
if not _file in confirmed_files.keys():
|
|
194
195
|
raise Exception(
|
|
195
196
|
"Could not upload File '" + file + "'. Is the filename unique? "
|
|
196
197
|
)
|
|
197
|
-
|
|
198
|
-
|
|
198
|
+
confirmed_files[_file]["filepath"] = filepaths[_file]
|
|
199
|
+
if len(confirmed_files.keys()) > 0:
|
|
200
|
+
uploadFiles(confirmed_files, credential, 4)
|
|
199
201
|
|
|
200
202
|
except httpx.HTTPError as e:
|
|
201
203
|
print(e)
|
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
import glob
|
|
2
|
-
import os
|
|
3
|
-
import queue
|
|
4
|
-
import threading
|
|
5
|
-
from typing import Dict
|
|
6
|
-
|
|
7
|
-
import httpx
|
|
8
|
-
import tqdm
|
|
9
|
-
from rich import print
|
|
10
|
-
|
|
11
|
-
from kleinkram.api_client import AuthenticatedClient
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def expand_and_match(path_pattern):
|
|
15
|
-
expanded_path = os.path.expanduser(path_pattern)
|
|
16
|
-
expanded_path = os.path.expandvars(expanded_path)
|
|
17
|
-
|
|
18
|
-
normalized_path = os.path.normpath(expanded_path)
|
|
19
|
-
|
|
20
|
-
if "**" in normalized_path:
|
|
21
|
-
file_list = glob.glob(normalized_path, recursive=True)
|
|
22
|
-
else:
|
|
23
|
-
file_list = glob.glob(normalized_path)
|
|
24
|
-
|
|
25
|
-
return file_list
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def uploadFiles(files: Dict[str, str], paths: Dict[str, str], nrThreads: int):
|
|
29
|
-
_queue = queue.Queue()
|
|
30
|
-
for file in files.items():
|
|
31
|
-
_queue.put(file)
|
|
32
|
-
threads = []
|
|
33
|
-
pbar = tqdm.tqdm(total=len(files.items()) * 100)
|
|
34
|
-
for i in range(nrThreads):
|
|
35
|
-
thread = threading.Thread(target=uploadFile, args=(_queue, paths, pbar))
|
|
36
|
-
thread.start()
|
|
37
|
-
threads.append(thread)
|
|
38
|
-
for thread in threads:
|
|
39
|
-
thread.join()
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def uploadFile(_queue: queue.Queue, paths: Dict[str, str], pbar: tqdm):
|
|
43
|
-
while True:
|
|
44
|
-
try:
|
|
45
|
-
filename, info = _queue.get(timeout=3)
|
|
46
|
-
url = info["url"]
|
|
47
|
-
uuid = info["uuid"]
|
|
48
|
-
filepath = paths[filename]
|
|
49
|
-
headers = {"Content-Type": "application/octet-stream"}
|
|
50
|
-
with open(filepath, "rb") as f:
|
|
51
|
-
with httpx.Client() as cli:
|
|
52
|
-
# Using PUT method directly for the upload
|
|
53
|
-
response = cli.put(url, content=f, headers=headers)
|
|
54
|
-
if response.status_code == 200:
|
|
55
|
-
pbar.update(100) # Update progress for each file
|
|
56
|
-
client = AuthenticatedClient()
|
|
57
|
-
client.post("/queue/confirmUpload", json={"uuid": uuid})
|
|
58
|
-
else:
|
|
59
|
-
print(
|
|
60
|
-
f"Failed to upload {filename}. HTTP status: {response.status_code}"
|
|
61
|
-
)
|
|
62
|
-
_queue.task_done()
|
|
63
|
-
except queue.Empty:
|
|
64
|
-
break
|
|
65
|
-
except Exception as e:
|
|
66
|
-
print(f"Error uploading {filename}: {e}")
|
|
67
|
-
_queue.task_done()
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
if __name__ == "__main__":
|
|
71
|
-
res = expand_and_match(
|
|
72
|
-
"~/Downloads/dodo_mission_2024_02_08-20240408T074313Z-003/**.bag"
|
|
73
|
-
)
|
|
74
|
-
print(res)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|