lfss 0.12.3__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lfss/api/__init__.py CHANGED
@@ -1,203 +1,12 @@
1
- import os, time, pathlib
2
- from threading import Lock
3
- from .connector import Connector
4
- from ..eng.datatype import FileRecord
5
- from ..eng.utils import decode_uri_components
6
- from ..eng.bounded_pool import BoundedThreadPoolExecutor
7
1
 
8
- def upload_file(
9
- connector: Connector,
10
- file_path: str,
11
- dst_url: str,
12
- n_retries: int = 0,
13
- interval: float = 0,
14
- verbose: bool = False,
15
- **put_kwargs
16
- ) -> tuple[bool, str]:
17
- this_try = 0
18
- error_msg = ""
19
- assert not file_path.endswith('/'), "File path must not end with a slash."
20
- if dst_url.endswith('/'):
21
- fname = file_path.split('/')[-1]
22
- dst_url = f"{dst_url}{fname}"
2
+ from .bundle import *
3
+ from .connector import Client
23
4
 
24
- while this_try <= n_retries:
25
- try:
26
- fsize = os.path.getsize(file_path)
27
- if fsize < 32 * 1024 * 1024: # 32MB
28
- with open(file_path, 'rb') as f:
29
- blob = f.read()
30
- connector.put(dst_url, blob, **put_kwargs)
31
- else:
32
- connector.post(dst_url, file_path, **put_kwargs)
33
- break
34
- except Exception as e:
35
- if isinstance(e, KeyboardInterrupt):
36
- raise e
37
- if verbose:
38
- print(f"Error uploading {file_path}: {e}, retrying...")
39
- error_msg = str(e)
40
- if hasattr(e, 'response'):
41
- error_msg = f"{error_msg}, {e.response.text}" # type: ignore
42
- this_try += 1
43
- finally:
44
- time.sleep(interval)
5
+ # Backward compatibility
6
+ class Connector(Client): ...
45
7
 
46
- if this_try > n_retries:
47
- if verbose:
48
- print(f"Failed to upload {file_path} after {n_retries} retries.")
49
- return False, error_msg
50
- return True, error_msg
51
-
52
- def upload_directory(
53
- connector: Connector,
54
- directory: str,
55
- path: str,
56
- n_concurrent: int = 1,
57
- n_retries: int = 0,
58
- interval: float = 0,
59
- verbose: bool = False,
60
- **put_kwargs
61
- ) -> list[tuple[str, str]]:
62
- assert path.endswith('/'), "Path must end with a slash."
63
- if path.startswith('/'):
64
- path = path[1:]
65
- directory = str(directory)
66
-
67
- _counter = 0
68
- _counter_lock = Lock()
69
-
70
- faild_items = []
71
- def put_file(c: Connector, file_path):
72
- with _counter_lock:
73
- nonlocal _counter
74
- _counter += 1
75
- this_count = _counter
76
- dst_path = f"{path}{os.path.relpath(file_path, directory)}"
77
- if verbose:
78
- print(f"[{this_count}] {file_path} -> {dst_path}")
79
-
80
- if not (res:=upload_file(
81
- c, file_path, dst_path,
82
- n_retries=n_retries, interval=interval, verbose=verbose, **put_kwargs
83
- ))[0]:
84
- faild_items.append((file_path, res[1]))
85
-
86
- with connector.session(n_concurrent) as c:
87
- with BoundedThreadPoolExecutor(n_concurrent) as executor:
88
- for root, dirs, files in os.walk(directory):
89
- for file in files:
90
- executor.submit(put_file, c, os.path.join(root, file))
91
-
92
- return faild_items
93
-
94
- def download_file(
95
- connector: Connector,
96
- src_url: str,
97
- file_path: str,
98
- n_retries: int = 0,
99
- interval: float = 0,
100
- verbose: bool = False,
101
- overwrite: bool = False
102
- ) -> tuple[bool, str]:
103
- this_try = 0
104
- error_msg = ""
105
- assert not src_url.endswith('/'), "Source URL must not end with a slash."
106
- while this_try <= n_retries:
107
- if os.path.isdir(file_path):
108
- fname = decode_uri_components(src_url.split('/')[-1])
109
- file_path = os.path.join(file_path, fname)
110
-
111
- if not overwrite and os.path.exists(file_path):
112
- if verbose:
113
- print(f"File {file_path} already exists, skipping download.")
114
- return True, error_msg
115
- try:
116
- fmeta = connector.get_meta(src_url)
117
- if fmeta is None:
118
- error_msg = "File not found."
119
- return False, error_msg
120
-
121
- pathlib.Path(file_path).parent.mkdir(parents=True, exist_ok=True)
122
- fsize = fmeta.file_size # type: ignore
123
- if fsize < 32 * 1024 * 1024: # 32MB
124
- blob = connector.get(src_url)
125
- assert blob is not None
126
- with open(file_path, 'wb') as f:
127
- f.write(blob)
128
- else:
129
- with open(file_path, 'wb') as f:
130
- for chunk in connector.get_stream(src_url):
131
- f.write(chunk)
132
- break
133
-
134
- except Exception as e:
135
- if isinstance(e, KeyboardInterrupt):
136
- raise e
137
- if verbose:
138
- print(f"Error downloading {src_url}: {e}, retrying...")
139
- error_msg = str(e)
140
- if hasattr(e, 'response'):
141
- error_msg = f"{error_msg}, {e.response.text}" # type: ignore
142
- this_try += 1
143
- finally:
144
- time.sleep(interval)
145
-
146
- if this_try > n_retries:
147
- if verbose:
148
- print(f"Failed to download {src_url} after {n_retries} retries.")
149
- return False, error_msg
150
- return True, error_msg
151
-
152
- def download_directory(
153
- connector: Connector,
154
- src_path: str,
155
- directory: str,
156
- n_concurrent: int = 1,
157
- n_retries: int = 0,
158
- interval: float = 0,
159
- verbose: bool = False,
160
- overwrite: bool = False
161
- ) -> list[tuple[str, str]]:
162
-
163
- directory = str(directory)
164
-
165
- if not src_path.endswith('/'):
166
- src_path += '/'
167
- if not directory.endswith(os.sep):
168
- directory += os.sep
169
-
170
- _counter = 0
171
- _counter_lock = Lock()
172
- failed_items: list[tuple[str, str]] = []
173
- file_count = 0
174
- def get_file(c, src_url):
175
- nonlocal _counter, failed_items, file_count, verbose
176
- with _counter_lock:
177
- _counter += 1
178
- this_count = _counter
179
- dst_path = f"{directory}{os.path.relpath(decode_uri_components(src_url), decode_uri_components(src_path))}"
180
- if verbose:
181
- print(f"[{this_count}/{file_count}] {src_url} -> {dst_path}")
182
-
183
- if not (res:=download_file(
184
- c, src_url, dst_path,
185
- n_retries=n_retries, interval=interval, verbose=verbose, overwrite=overwrite
186
- ))[0]:
187
- failed_items.append((src_url, res[1]))
188
-
189
- batch_size = 10_000
190
- file_list: list[FileRecord] = []
191
- with connector.session(n_concurrent) as c:
192
- file_count = c.count_files(src_path, flat=True)
193
- for offset in range(0, file_count, batch_size):
194
- if verbose:
195
- print(f"Retrieving file list... ({offset}/{file_count})", end='\r')
196
- file_list.extend(c.list_files(
197
- src_path, offset=offset, limit=batch_size, flat=True
198
- ))
199
-
200
- with BoundedThreadPoolExecutor(n_concurrent) as executor:
201
- for file in file_list:
202
- executor.submit(get_file, c, file.url)
203
- return failed_items
8
+ __all__ = [
9
+ "upload_file", "upload_directory",
10
+ "download_file", "download_directory",
11
+ "Client", "Connector",
12
+ ]
lfss/api/bundle.py ADDED
@@ -0,0 +1,201 @@
1
+ import os, time, pathlib
2
+ from threading import Lock
3
+ from .connector import Client
4
+ from ..eng.datatype import FileRecord
5
+ from ..eng.utils import decode_uri_components
6
+ from ..eng.bounded_pool import BoundedThreadPoolExecutor
7
+
8
+ def upload_file(
9
+ connector: Client,
10
+ file_path: str,
11
+ dst_url: str,
12
+ n_retries: int = 0,
13
+ interval: float = 0,
14
+ verbose: bool = False,
15
+ **put_kwargs
16
+ ) -> tuple[bool, str]:
17
+ this_try = 0
18
+ error_msg = ""
19
+ assert not file_path.endswith('/'), "File path must not end with a slash."
20
+ if dst_url.endswith('/'):
21
+ fname = file_path.split('/')[-1]
22
+ dst_url = f"{dst_url}{fname}"
23
+
24
+ while this_try <= n_retries:
25
+ try:
26
+ fsize = os.path.getsize(file_path)
27
+ if fsize < 32 * 1024 * 1024: # 32MB
28
+ with open(file_path, 'rb') as f:
29
+ blob = f.read()
30
+ connector.put(dst_url, blob, **put_kwargs)
31
+ else:
32
+ connector.post(dst_url, file_path, **put_kwargs)
33
+ break
34
+ except Exception as e:
35
+ if isinstance(e, KeyboardInterrupt):
36
+ raise e
37
+ if verbose:
38
+ print(f"Error uploading {file_path}: {e}, retrying...")
39
+ error_msg = str(e)
40
+ if hasattr(e, 'response'):
41
+ error_msg = f"{error_msg}, {e.response.text}" # type: ignore
42
+ this_try += 1
43
+ finally:
44
+ time.sleep(interval)
45
+
46
+ if this_try > n_retries:
47
+ if verbose:
48
+ print(f"Failed to upload {file_path} after {n_retries} retries.")
49
+ return False, error_msg
50
+ return True, error_msg
51
+
52
+ def upload_directory(
53
+ connector: Client,
54
+ directory: str,
55
+ path: str,
56
+ n_concurrent: int = 1,
57
+ n_retries: int = 0,
58
+ interval: float = 0,
59
+ verbose: bool = False,
60
+ **put_kwargs
61
+ ) -> list[tuple[str, str]]:
62
+ assert path.endswith('/'), "Path must end with a slash."
63
+ if path.startswith('/'):
64
+ path = path[1:]
65
+ directory = str(directory)
66
+
67
+ _counter = 0
68
+ _counter_lock = Lock()
69
+
70
+ faild_items = []
71
+ def put_file(c: Client, file_path):
72
+ with _counter_lock:
73
+ nonlocal _counter
74
+ _counter += 1
75
+ this_count = _counter
76
+ dst_path = f"{path}{os.path.relpath(file_path, directory)}"
77
+ if verbose:
78
+ print(f"[{this_count}] {file_path} -> {dst_path}")
79
+
80
+ if not (res:=upload_file(
81
+ c, file_path, dst_path,
82
+ n_retries=n_retries, interval=interval, verbose=verbose, **put_kwargs
83
+ ))[0]:
84
+ faild_items.append((file_path, res[1]))
85
+
86
+ with connector.session(n_concurrent) as c, BoundedThreadPoolExecutor(n_concurrent) as executor:
87
+ for root, dirs, files in os.walk(directory):
88
+ for file in files:
89
+ executor.submit(put_file, c, os.path.join(root, file))
90
+
91
+ return faild_items
92
+
93
+ def download_file(
94
+ connector: Client,
95
+ src_url: str,
96
+ file_path: str,
97
+ n_retries: int = 0,
98
+ interval: float = 0,
99
+ verbose: bool = False,
100
+ overwrite: bool = False
101
+ ) -> tuple[bool, str]:
102
+ this_try = 0
103
+ error_msg = ""
104
+ assert not src_url.endswith('/'), "Source URL must not end with a slash."
105
+ while this_try <= n_retries:
106
+ if os.path.isdir(file_path):
107
+ fname = decode_uri_components(src_url.split('/')[-1])
108
+ file_path = os.path.join(file_path, fname)
109
+
110
+ if not overwrite and os.path.exists(file_path):
111
+ if verbose:
112
+ print(f"File {file_path} already exists, skipping download.")
113
+ return True, error_msg
114
+ try:
115
+ fmeta = connector.get_meta(src_url)
116
+ if fmeta is None:
117
+ error_msg = "File not found."
118
+ return False, error_msg
119
+
120
+ pathlib.Path(file_path).parent.mkdir(parents=True, exist_ok=True)
121
+ fsize = fmeta.file_size # type: ignore
122
+ if fsize < 32 * 1024 * 1024: # 32MB
123
+ blob = connector.get(src_url)
124
+ assert blob is not None
125
+ with open(file_path, 'wb') as f:
126
+ f.write(blob)
127
+ else:
128
+ with open(file_path, 'wb') as f:
129
+ for chunk in connector.get_stream(src_url):
130
+ f.write(chunk)
131
+ break
132
+
133
+ except Exception as e:
134
+ if isinstance(e, KeyboardInterrupt):
135
+ raise e
136
+ if verbose:
137
+ print(f"Error downloading {src_url}: {e}, retrying...")
138
+ error_msg = str(e)
139
+ if hasattr(e, 'response'):
140
+ error_msg = f"{error_msg}, {e.response.text}" # type: ignore
141
+ this_try += 1
142
+ finally:
143
+ time.sleep(interval)
144
+
145
+ if this_try > n_retries:
146
+ if verbose:
147
+ print(f"Failed to download {src_url} after {n_retries} retries.")
148
+ return False, error_msg
149
+ return True, error_msg
150
+
151
+ def download_directory(
152
+ connector: Client,
153
+ src_path: str,
154
+ directory: str,
155
+ n_concurrent: int = 1,
156
+ n_retries: int = 0,
157
+ interval: float = 0,
158
+ verbose: bool = False,
159
+ overwrite: bool = False
160
+ ) -> list[tuple[str, str]]:
161
+
162
+ directory = str(directory)
163
+
164
+ if not src_path.endswith('/'):
165
+ src_path += '/'
166
+ if not directory.endswith(os.sep):
167
+ directory += os.sep
168
+
169
+ _counter = 0
170
+ _counter_lock = Lock()
171
+ failed_items: list[tuple[str, str]] = []
172
+ file_count = 0
173
+ def get_file(c, src_url):
174
+ nonlocal _counter, failed_items, file_count, verbose
175
+ with _counter_lock:
176
+ _counter += 1
177
+ this_count = _counter
178
+ dst_path = f"{directory}{os.path.relpath(decode_uri_components(src_url), decode_uri_components(src_path))}"
179
+ if verbose:
180
+ print(f"[{this_count}/{file_count}] {src_url} -> {dst_path}")
181
+
182
+ if not (res:=download_file(
183
+ c, src_url, dst_path,
184
+ n_retries=n_retries, interval=interval, verbose=verbose, overwrite=overwrite
185
+ ))[0]:
186
+ failed_items.append((src_url, res[1]))
187
+
188
+ batch_size = 10_000
189
+ file_list: list[FileRecord] = []
190
+ with connector.session(n_concurrent) as c, BoundedThreadPoolExecutor(n_concurrent) as executor:
191
+ file_count = c.count_files(src_path, flat=True)
192
+ for offset in range(0, file_count, batch_size):
193
+ if verbose:
194
+ print(f"Retrieving file list... ({offset}/{file_count})", end='\r')
195
+ file_list.extend(c.list_files(
196
+ src_path, offset=offset, limit=batch_size, flat=True
197
+ ))
198
+
199
+ for file in file_list:
200
+ executor.submit(get_file, c, file.url)
201
+ return failed_items
lfss/api/connector.py CHANGED
@@ -7,7 +7,6 @@ import requests.adapters
7
7
  import urllib.parse
8
8
  from tempfile import SpooledTemporaryFile
9
9
  from concurrent.futures import ThreadPoolExecutor, as_completed
10
- from lfss.eng.error import PathNotFoundError
11
10
  from lfss.eng.datatype import (
12
11
  FileReadPermission, FileRecord, DirectoryRecord, UserRecord, PathContents, AccessLevel,
13
12
  FileSortKey, DirSortKey
@@ -25,34 +24,34 @@ def _p(x: str) -> str:
25
24
  x = x[1:]
26
25
  return x
27
26
 
28
- class Connector:
27
+ class Client:
29
28
  class Session:
30
29
  def __init__(
31
- self, connector: Connector, pool_size: int = 10,
30
+ self, client: Client, pool_size: int = 10,
32
31
  retry: int = 1, backoff_factor: num_t = 0.5, status_forcelist: list[int] = [503]
33
32
  ):
34
- self.connector = connector
33
+ self.client = client
35
34
  self.pool_size = pool_size
36
35
  self.retry_adapter = requests.adapters.Retry(
37
36
  total=retry, backoff_factor=backoff_factor, status_forcelist=status_forcelist,
38
37
  )
39
38
  def open(self):
40
39
  self.close()
41
- if self.connector._session is None:
40
+ if self.client._session is None:
42
41
  s = requests.Session()
43
42
  adapter = requests.adapters.HTTPAdapter(pool_connections=self.pool_size, pool_maxsize=self.pool_size, max_retries=self.retry_adapter)
44
43
  s.mount('http://', adapter)
45
44
  s.mount('https://', adapter)
46
- self.connector._session = s
45
+ self.client._session = s
47
46
  def close(self):
48
- if self.connector._session is not None:
49
- self.connector._session.close()
50
- self.connector._session = None
47
+ if self.client._session is not None:
48
+ self.client._session.close()
49
+ self.client._session = None
51
50
  def __call__(self):
52
- return self.connector
51
+ return self.client
53
52
  def __enter__(self):
54
53
  self.open()
55
- return self.connector
54
+ return self.client
56
55
  def __exit__(self, exc_type, exc_value, traceback):
57
56
  self.close()
58
57
 
@@ -77,7 +76,7 @@ class Connector:
77
76
  return self.Session(self, pool_size, **kwargs)
78
77
 
79
78
  def _fetch_factory(
80
- self, method: Literal['GET', 'POST', 'PUT', 'DELETE'],
79
+ self, method: Literal['GET', 'POST', 'PUT', 'DELETE', 'HEAD'],
81
80
  path: str, search_params: dict = {}, extra_headers: dict = {}
82
81
  ):
83
82
  if path.startswith('/'):
@@ -103,6 +102,17 @@ class Connector:
103
102
  response.raise_for_status()
104
103
  return response
105
104
  return f
105
+
106
+ def exists(self, path: str) -> bool:
107
+ """Checks if a file/directory exists."""
108
+ path = _p(path)
109
+ try:
110
+ self._fetch_factory('HEAD', path)()
111
+ except requests.exceptions.HTTPError as e:
112
+ if e.response.status_code == 404:
113
+ return False
114
+ raise e
115
+ return True
106
116
 
107
117
  def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
108
118
  """Uploads a file to the specified path."""
@@ -184,23 +194,16 @@ class Connector:
184
194
  )
185
195
  return response.json()
186
196
 
187
- def _get(self, path: str, stream: bool = False) -> Optional[requests.Response]:
188
- try:
189
- response = self._fetch_factory('GET', path)(stream=stream)
190
- except requests.exceptions.HTTPError as e:
191
- if e.response.status_code == 404:
192
- return None
193
- raise e
194
- return response
197
+ def _get(self, path: str, stream: bool = False) -> requests.Response:
198
+ return self._fetch_factory('GET', path)(stream=stream)
195
199
 
196
- def get(self, path: str) -> Optional[bytes]:
200
+ def get(self, path: str) -> bytes:
197
201
  """Downloads a file from the specified path."""
198
202
  path = _p(path)
199
203
  response = self._get(path)
200
- if response is None: return None
201
204
  return response.content
202
205
 
203
- def get_partial(self, path: str, range_start: int = -1, range_end: int = -1) -> Optional[bytes]:
206
+ def get_partial(self, path: str, range_start: int = -1, range_end: int = -1) -> bytes:
204
207
  """
205
208
  Downloads a partial file from the specified path.
206
209
  start and end are the byte offsets, both inclusive.
@@ -209,20 +212,16 @@ class Connector:
209
212
  response = self._fetch_factory('GET', path, extra_headers={
210
213
  'Range': f"bytes={range_start if range_start >= 0 else ''}-{range_end if range_end >= 0 else ''}"
211
214
  })()
212
- if response is None: return None
213
215
  return response.content
214
216
 
215
217
  def get_stream(self, path: str, chunk_size = 1024) -> Iterator[bytes]:
216
218
  """Downloads a file from the specified path, will raise PathNotFoundError if path not found."""
217
219
  path = _p(path)
218
- response = self._get(path, stream=True)
219
- if response is None: raise PathNotFoundError("Path not found: " + path)
220
- return response.iter_content(chunk_size)
220
+ return self._get(path, stream=True).iter_content(chunk_size)
221
221
 
222
- def get_json(self, path: str) -> Optional[dict]:
222
+ def get_json(self, path: str) -> dict:
223
223
  path = _p(path)
224
224
  response = self._get(path)
225
- if response is None: return None
226
225
  assert response.headers['Content-Type'] == 'application/json'
227
226
  return response.json()
228
227
 
@@ -242,23 +241,18 @@ class Connector:
242
241
  path = _p(path)
243
242
  self._fetch_factory('DELETE', path)()
244
243
 
245
- def get_meta(self, path: str) -> Optional[FileRecord | DirectoryRecord]:
244
+ def get_meta(self, path: str) -> FileRecord | DirectoryRecord:
246
245
  """Gets the metadata for the file at the specified path."""
247
246
  path = _p(path)
248
- try:
249
- response = self._fetch_factory('GET', '_api/meta', {'path': path})()
250
- if path.endswith('/'):
251
- return DirectoryRecord(**response.json())
252
- else:
253
- return FileRecord(**response.json())
254
- except requests.exceptions.HTTPError as e:
255
- if e.response.status_code == 404:
256
- return None
257
- raise e
247
+ response = self._fetch_factory('GET', '_api/meta', {'path': path})()
248
+ if path.endswith('/'):
249
+ return DirectoryRecord(**response.json())
250
+ else:
251
+ return FileRecord(**response.json())
258
252
  # shorthand methods for type constraints
259
- def get_fmeta(self, path: str) -> Optional[FileRecord]: assert (f:=self.get_meta(path)) is None or isinstance(f, FileRecord); return f
260
- def get_dmeta(self, path: str) -> Optional[DirectoryRecord]: assert (d:=self.get_meta(path)) is None or isinstance(d, DirectoryRecord); return d
261
-
253
+ def get_fmeta(self, path: str) -> FileRecord: assert (f:=self.get_meta(path)) is None or isinstance(f, FileRecord); return f
254
+ def get_dmeta(self, path: str) -> DirectoryRecord: assert (d:=self.get_meta(path)) is None or isinstance(d, DirectoryRecord); return d
255
+
262
256
  def count_files(self, path: str, flat: bool = False) -> int:
263
257
  assert path.endswith('/')
264
258
  path = _p(path)
@@ -307,11 +301,11 @@ class Connector:
307
301
  path = _p(path)
308
302
  if path == '/':
309
303
  # handle root path separately
310
- # TODO: change later
311
- response = self._fetch_factory('GET', path)()
312
- dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
313
- files = [FileRecord(**f) for f in response.json()['files']]
314
- return PathContents(dirs=dirs, files=files)
304
+ dirnames = [f'{self.whoami().username}/'] + [f'{p.username}/' for p in self.list_peers(AccessLevel.READ)]
305
+ return PathContents(
306
+ dirs = [DirectoryRecord(url = d) for d in dirnames],
307
+ files = []
308
+ )
315
309
 
316
310
  dirs: list[DirectoryRecord] = []
317
311
  files: list[FileRecord] = []
@@ -360,14 +354,14 @@ class Connector:
360
354
  def set_file_permission(self, path: str, permission: int | FileReadPermission):
361
355
  """Sets the file permission for the specified path."""
362
356
  path = _p(path)
363
- self._fetch_factory('POST', '_api/meta', {'path': path, 'perm': int(permission)})(
357
+ self._fetch_factory('POST', '_api/set-perm', {'path': path, 'perm': int(permission)})(
364
358
  headers={'Content-Type': 'application/www-form-urlencoded'}
365
359
  )
366
360
 
367
361
  def move(self, path: str, new_path: str):
368
362
  """Move file or directory to a new path."""
369
363
  path = _p(path); new_path = _p(new_path)
370
- self._fetch_factory('POST', '_api/meta', {'path': path, 'new_path': new_path})(
364
+ self._fetch_factory('POST', '_api/move', {'src': path, 'dst': new_path})(
371
365
  headers = {'Content-Type': 'application/www-form-urlencoded'}
372
366
  )
373
367
 
@@ -389,11 +383,21 @@ class Connector:
389
383
 
390
384
  def whoami(self) -> UserRecord:
391
385
  """Gets information about the current user."""
392
- response = self._fetch_factory('GET', '_api/whoami')()
386
+ response = self._fetch_factory('GET', '_api/user/whoami')()
393
387
  return UserRecord(**response.json())
388
+
389
+ def storage_used(self) -> int:
390
+ """Gets the storage used by the current user, in bytes."""
391
+ response = self._fetch_factory('GET', '_api/user/storage')()
392
+ return response.json()['used']
394
393
 
395
394
  def list_peers(self, level: AccessLevel = AccessLevel.READ, incoming: bool = False) -> list[UserRecord]:
396
- """List all users that have at least the given access level to the current user."""
395
+ """
396
+ if incoming is False (default):
397
+ list all users that the current user has at least the given access level to,
398
+ if incoming is True:
399
+ list all users that have at least the given access level to the current user
400
+ """
397
401
  response = self._fetch_factory('GET', '_api/list-peers', {'level': int(level), 'incoming': incoming})()
398
402
  users = [UserRecord(**u) for u in response.json()]
399
403
  return users
lfss/cli/__init__.py CHANGED
@@ -3,17 +3,24 @@ from typing import Iterable, TypeVar, Generator, Callable, Optional
3
3
  import requests, os
4
4
 
5
5
  @contextmanager
6
- def catch_request_error(error_code_handler: Optional[ dict[int, Callable[[requests.Response], None]] ] = None):
6
+ def catch_request_error(
7
+ error_code_handler: Optional[ dict[int, Callable[[requests.Response], None]] ] = None,
8
+ cleanup_fn: Optional[Callable[[], None]] = None
9
+ ):
7
10
  try:
8
11
  yield
9
12
  except requests.RequestException as e:
10
13
  if error_code_handler is not None:
11
14
  if e.response is not None and e.response.status_code in error_code_handler:
12
15
  error_code_handler[e.response.status_code](e.response)
16
+ if cleanup_fn is not None:
17
+ cleanup_fn()
13
18
  return
14
19
  print(f"\033[31m[Request error]: {e}\033[0m")
15
20
  if e.response is not None:
16
21
  print(f"\033[91m[Error message]: {e.response.text}\033[0m")
22
+ if cleanup_fn is not None:
23
+ cleanup_fn()
17
24
 
18
25
  T = TypeVar('T')
19
26
  def line_sep(iter: Iterable[T], enable=True, start=True, end=True, middle=False, color="\033[90m") -> Generator[T, None, None]: