lfss 0.11.6__tar.gz → 0.12.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. {lfss-0.11.6 → lfss-0.12.0}/PKG-INFO +1 -1
  2. lfss-0.12.0/docs/Client.md +37 -0
  3. {lfss-0.11.6 → lfss-0.12.0}/docs/changelog.md +8 -0
  4. {lfss-0.11.6 → lfss-0.12.0}/lfss/api/__init__.py +2 -2
  5. {lfss-0.11.6 → lfss-0.12.0}/lfss/api/connector.py +87 -13
  6. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/__init__.py +10 -6
  7. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/cli.py +82 -25
  8. {lfss-0.11.6 → lfss-0.12.0}/pyproject.toml +1 -1
  9. lfss-0.11.6/docs/Client.md +0 -40
  10. {lfss-0.11.6 → lfss-0.12.0}/Readme.md +0 -0
  11. {lfss-0.11.6 → lfss-0.12.0}/docs/Enviroment_variables.md +0 -0
  12. {lfss-0.11.6 → lfss-0.12.0}/docs/Known_issues.md +0 -0
  13. {lfss-0.11.6 → lfss-0.12.0}/docs/Permission.md +0 -0
  14. {lfss-0.11.6 → lfss-0.12.0}/docs/Webdav.md +0 -0
  15. {lfss-0.11.6 → lfss-0.12.0}/frontend/api.js +0 -0
  16. {lfss-0.11.6 → lfss-0.12.0}/frontend/index.html +0 -0
  17. {lfss-0.11.6 → lfss-0.12.0}/frontend/info.css +0 -0
  18. {lfss-0.11.6 → lfss-0.12.0}/frontend/info.js +0 -0
  19. {lfss-0.11.6 → lfss-0.12.0}/frontend/login.css +0 -0
  20. {lfss-0.11.6 → lfss-0.12.0}/frontend/login.js +0 -0
  21. {lfss-0.11.6 → lfss-0.12.0}/frontend/popup.css +0 -0
  22. {lfss-0.11.6 → lfss-0.12.0}/frontend/popup.js +0 -0
  23. {lfss-0.11.6 → lfss-0.12.0}/frontend/scripts.js +0 -0
  24. {lfss-0.11.6 → lfss-0.12.0}/frontend/state.js +0 -0
  25. {lfss-0.11.6 → lfss-0.12.0}/frontend/styles.css +0 -0
  26. {lfss-0.11.6 → lfss-0.12.0}/frontend/thumb.css +0 -0
  27. {lfss-0.11.6 → lfss-0.12.0}/frontend/thumb.js +0 -0
  28. {lfss-0.11.6 → lfss-0.12.0}/frontend/utils.js +0 -0
  29. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/balance.py +0 -0
  30. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/log.py +0 -0
  31. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/panel.py +0 -0
  32. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/serve.py +0 -0
  33. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/user.py +0 -0
  34. {lfss-0.11.6 → lfss-0.12.0}/lfss/cli/vacuum.py +0 -0
  35. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/__init__.py +0 -0
  36. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/bounded_pool.py +0 -0
  37. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/config.py +0 -0
  38. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/connection_pool.py +0 -0
  39. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/database.py +0 -0
  40. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/datatype.py +0 -0
  41. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/error.py +0 -0
  42. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/log.py +0 -0
  43. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/thumb.py +0 -0
  44. {lfss-0.11.6 → lfss-0.12.0}/lfss/eng/utils.py +0 -0
  45. {lfss-0.11.6 → lfss-0.12.0}/lfss/sql/init.sql +0 -0
  46. {lfss-0.11.6 → lfss-0.12.0}/lfss/sql/pragma.sql +0 -0
  47. {lfss-0.11.6 → lfss-0.12.0}/lfss/svc/app.py +0 -0
  48. {lfss-0.11.6 → lfss-0.12.0}/lfss/svc/app_base.py +0 -0
  49. {lfss-0.11.6 → lfss-0.12.0}/lfss/svc/app_dav.py +0 -0
  50. {lfss-0.11.6 → lfss-0.12.0}/lfss/svc/app_native.py +0 -0
  51. {lfss-0.11.6 → lfss-0.12.0}/lfss/svc/common_impl.py +0 -0
  52. {lfss-0.11.6 → lfss-0.12.0}/lfss/svc/request_log.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lfss
3
- Version: 0.11.6
3
+ Version: 0.12.0
4
4
  Summary: Lite file storage service
5
5
  Home-page: https://github.com/MenxLi/lfss
6
6
  Author: Li, Mengxun
@@ -0,0 +1,37 @@
1
+
2
+ # Client-side CLI tools
3
+
4
+ To install python CLI tools without dependencies (to avoid conflicts with your existing packages):
5
+ ```sh
6
+ pip install requests
7
+ pip install lfss --no-deps
8
+ ```
9
+
10
+ Then set the `LFSS_ENDPOINT`, `LFSS_TOKEN` environment variables,
11
+ then you can use the following commands:
12
+ ```sh
13
+ # Check current user information
14
+ lfss whoami
15
+
16
+ # Query a path
17
+ lfss i remote/file[/or_dir/]
18
+
19
+ # List a specified path,
20
+ # with pagination and sorting
21
+ lfss ls remote/dir/ --offset 0 --limit 100 --order access_time
22
+
23
+ # Upload a file
24
+ lfss up local/file.txt remote/file.txt
25
+
26
+ # Upload a directory, note the ending slashes
27
+ lfss up local/dir/ remote/dir/
28
+
29
+ # Download a file
30
+ lfss down remote/file.txt local/file.txt
31
+
32
+ # Download a directory, with 8 concurrent jobs
33
+ # Overwrite existing files
34
+ lfss down remote/dir/ local/dir/ -j 8 --conflict overwrite
35
+ ```
36
+
37
+ More commands can be found using `lfss --help`.
@@ -1,3 +1,11 @@
1
+ ## 0.12
2
+
3
+ ### 0.12.0
4
+ - Change default script to client CLI
5
+ - Client CLI default to verbose output
6
+ - Client CLI subcommand rename and alias
7
+ - Add delete path and more error handling for client CLI
8
+
1
9
  ## 0.11
2
10
 
3
11
  ### 0.11.6
@@ -75,7 +75,7 @@ def upload_directory(
75
75
  this_count = _counter
76
76
  dst_path = f"{path}{os.path.relpath(file_path, directory)}"
77
77
  if verbose:
78
- print(f"[{this_count}] Uploading {file_path} to {dst_path}")
78
+ print(f"[{this_count}] {file_path} -> {dst_path}")
79
79
 
80
80
  if not (res:=upload_file(
81
81
  c, file_path, dst_path,
@@ -178,7 +178,7 @@ def download_directory(
178
178
  this_count = _counter
179
179
  dst_path = f"{directory}{os.path.relpath(decode_uri_components(src_url), decode_uri_components(src_path))}"
180
180
  if verbose:
181
- print(f"[{this_count}/{file_count}] Downloading {src_url} to {dst_path}")
181
+ print(f"[{this_count}/{file_count}] {src_url} -> {dst_path}")
182
182
 
183
183
  if not (res:=download_file(
184
184
  c, src_url, dst_path,
@@ -1,11 +1,12 @@
1
1
  from __future__ import annotations
2
2
  from typing import Optional, Literal
3
3
  from collections.abc import Iterator
4
- import os, json
4
+ import os
5
5
  import requests
6
6
  import requests.adapters
7
7
  import urllib.parse
8
8
  from tempfile import SpooledTemporaryFile
9
+ from concurrent.futures import ThreadPoolExecutor, as_completed
9
10
  from lfss.eng.error import PathNotFoundError
10
11
  from lfss.eng.datatype import (
11
12
  FileReadPermission, FileRecord, DirectoryRecord, UserRecord, PathContents, AccessLevel,
@@ -17,6 +18,13 @@ _default_endpoint = os.environ.get('LFSS_ENDPOINT', 'http://localhost:8000')
17
18
  _default_token = os.environ.get('LFSS_TOKEN', '')
18
19
  num_t = float | int
19
20
 
21
+ def _p(x: str) -> str:
22
+ if x == '/':
23
+ return x
24
+ if x.startswith('/'):
25
+ x = x[1:]
26
+ return x
27
+
20
28
  class Connector:
21
29
  class Session:
22
30
  def __init__(
@@ -99,6 +107,7 @@ class Connector:
99
107
  def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
100
108
  """Uploads a file to the specified path."""
101
109
  assert isinstance(file_data, bytes), "file_data must be bytes"
110
+ path = _p(path)
102
111
 
103
112
  # Skip ahead by checking if the file already exists
104
113
  if conflict == 'skip-ahead':
@@ -123,6 +132,7 @@ class Connector:
123
132
  using the POST method, with form-data/multipart.
124
133
  file can be a path to a file on disk, or bytes.
125
134
  """
135
+ path = _p(path)
126
136
 
127
137
  # Skip ahead by checking if the file already exists
128
138
  if conflict == 'skip-ahead':
@@ -155,6 +165,7 @@ class Connector:
155
165
  """Uploads a JSON file to the specified path."""
156
166
  assert path.endswith('.json'), "Path must end with .json"
157
167
  assert isinstance(data, dict), "data must be a dict"
168
+ path = _p(path)
158
169
 
159
170
  # Skip ahead by checking if the file already exists
160
171
  if conflict == 'skip-ahead':
@@ -184,6 +195,7 @@ class Connector:
184
195
 
185
196
  def get(self, path: str) -> Optional[bytes]:
186
197
  """Downloads a file from the specified path."""
198
+ path = _p(path)
187
199
  response = self._get(path)
188
200
  if response is None: return None
189
201
  return response.content
@@ -193,6 +205,7 @@ class Connector:
193
205
  Downloads a partial file from the specified path.
194
206
  start and end are the byte offsets, both inclusive.
195
207
  """
208
+ path = _p(path)
196
209
  response = self._fetch_factory('GET', path, extra_headers={
197
210
  'Range': f"bytes={range_start if range_start >= 0 else ''}-{range_end if range_end >= 0 else ''}"
198
211
  })()
@@ -201,11 +214,13 @@ class Connector:
201
214
 
202
215
  def get_stream(self, path: str) -> Iterator[bytes]:
203
216
  """Downloads a file from the specified path, will raise PathNotFoundError if path not found."""
217
+ path = _p(path)
204
218
  response = self._get(path, stream=True)
205
219
  if response is None: raise PathNotFoundError("Path not found: " + path)
206
220
  return response.iter_content(chunk_size=1024)
207
221
 
208
222
  def get_json(self, path: str) -> Optional[dict]:
223
+ path = _p(path)
209
224
  response = self._get(path)
210
225
  if response is None: return None
211
226
  assert response.headers['Content-Type'] == 'application/json'
@@ -218,16 +233,18 @@ class Connector:
218
233
  """
219
234
  response = self._fetch_factory(
220
235
  'GET', '_api/get-multiple',
221
- {'path': paths, "skip_content": skip_content}
236
+ {'path': [_p(p) for p in paths], "skip_content": skip_content}
222
237
  )()
223
238
  return response.json()
224
239
 
225
240
  def delete(self, path: str):
226
241
  """Deletes the file at the specified path."""
242
+ path = _p(path)
227
243
  self._fetch_factory('DELETE', path)()
228
244
 
229
245
  def get_meta(self, path: str) -> Optional[FileRecord | DirectoryRecord]:
230
246
  """Gets the metadata for the file at the specified path."""
247
+ path = _p(path)
231
248
  try:
232
249
  response = self._fetch_factory('GET', '_api/meta', {'path': path})()
233
250
  if path.endswith('/'):
@@ -242,19 +259,9 @@ class Connector:
242
259
  def get_fmeta(self, path: str) -> Optional[FileRecord]: assert (f:=self.get_meta(path)) is None or isinstance(f, FileRecord); return f
243
260
  def get_dmeta(self, path: str) -> Optional[DirectoryRecord]: assert (d:=self.get_meta(path)) is None or isinstance(d, DirectoryRecord); return d
244
261
 
245
- def list_path(self, path: str) -> PathContents:
246
- """
247
- shorthand list with limited options,
248
- for large directories / more options, use list_files and list_dirs instead.
249
- """
250
- assert path.endswith('/')
251
- response = self._fetch_factory('GET', path)()
252
- dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
253
- files = [FileRecord(**f) for f in response.json()['files']]
254
- return PathContents(dirs=dirs, files=files)
255
-
256
262
  def count_files(self, path: str, flat: bool = False) -> int:
257
263
  assert path.endswith('/')
264
+ path = _p(path)
258
265
  response = self._fetch_factory('GET', '_api/count-files', {'path': path, 'flat': flat})()
259
266
  return response.json()['count']
260
267
 
@@ -264,6 +271,7 @@ class Connector:
264
271
  flat: bool = False
265
272
  ) -> list[FileRecord]:
266
273
  assert path.endswith('/')
274
+ path = _p(path)
267
275
  response = self._fetch_factory('GET', "_api/list-files", {
268
276
  'path': path,
269
277
  'offset': offset, 'limit': limit, 'order_by': order_by, 'order_desc': order_desc, 'flat': flat
@@ -272,6 +280,7 @@ class Connector:
272
280
 
273
281
  def count_dirs(self, path: str) -> int:
274
282
  assert path.endswith('/')
283
+ path = _p(path)
275
284
  response = self._fetch_factory('GET', '_api/count-dirs', {'path': path})()
276
285
  return response.json()['count']
277
286
 
@@ -281,32 +290,97 @@ class Connector:
281
290
  skim: bool = True
282
291
  ) -> list[DirectoryRecord]:
283
292
  assert path.endswith('/')
293
+ path = _p(path)
284
294
  response = self._fetch_factory('GET', "_api/list-dirs", {
285
295
  'path': path,
286
296
  'offset': offset, 'limit': limit, 'order_by': order_by, 'order_desc': order_desc, 'skim': skim
287
297
  })()
288
298
  return [DirectoryRecord(**d) for d in response.json()]
299
+
300
+ def list_path(
301
+ self, path: str, offset: int = 0, limit: int = 1000,
302
+ order_by: FileSortKey = '', order_desc: bool = False,
303
+ _workers: int = 2
304
+ ) -> PathContents:
305
+ """ Aggregately lists both files and directories under the given path. """
306
+ assert path.endswith('/')
307
+ path = _p(path)
308
+ if path == '/':
309
+ # handle root path separately
310
+ # TODO: change later
311
+ response = self._fetch_factory('GET', path)()
312
+ dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
313
+ files = [FileRecord(**f) for f in response.json()['files']]
314
+ return PathContents(dirs=dirs, files=files)
315
+
316
+ dirs: list[DirectoryRecord] = []
317
+ files: list[FileRecord] = []
318
+ with ThreadPoolExecutor(max_workers=_workers) as executor:
319
+ count_futures = {
320
+ executor.submit(self.count_dirs, path): 'dirs',
321
+ executor.submit(self.count_files, path, flat=False): 'files'
322
+ }
323
+ dir_count = 0
324
+ file_count = 0
325
+ for future in as_completed(count_futures):
326
+ if count_futures[future] == 'dirs':
327
+ dir_count = future.result()
328
+ else:
329
+ file_count = future.result()
330
+ dir_offset = offset
331
+ dir_limit = min(limit, max(0, dir_count - dir_offset))
332
+ file_offset = max(0, offset - dir_count)
333
+ file_limit = min(limit - dir_limit, max(0, file_count - file_offset))
334
+
335
+ dir_order_by = 'dirname' if order_by == 'url' else ''
336
+ file_order_by = order_by
337
+
338
+ def fetch_dirs():
339
+ nonlocal dirs
340
+ if dir_limit > 0:
341
+ dirs = self.list_dirs(
342
+ path, offset=dir_offset, limit=dir_limit,
343
+ order_by=dir_order_by, order_desc=order_desc
344
+ )
345
+ def fetch_files():
346
+ nonlocal files
347
+ if file_limit > 0:
348
+ files = self.list_files(
349
+ path, offset=file_offset, limit=file_limit,
350
+ order_by=file_order_by, order_desc=order_desc, flat=False
351
+ )
352
+ futures = [
353
+ executor.submit(fetch_dirs),
354
+ executor.submit(fetch_files)
355
+ ]
356
+ for future in as_completed(futures):
357
+ future.result()
358
+ return PathContents(dirs=dirs, files=files)
289
359
 
290
360
  def set_file_permission(self, path: str, permission: int | FileReadPermission):
291
361
  """Sets the file permission for the specified path."""
362
+ path = _p(path)
292
363
  self._fetch_factory('POST', '_api/meta', {'path': path, 'perm': int(permission)})(
293
364
  headers={'Content-Type': 'application/www-form-urlencoded'}
294
365
  )
295
366
 
296
367
  def move(self, path: str, new_path: str):
297
368
  """Move file or directory to a new path."""
369
+ path = _p(path); new_path = _p(new_path)
298
370
  self._fetch_factory('POST', '_api/meta', {'path': path, 'new_path': new_path})(
299
371
  headers = {'Content-Type': 'application/www-form-urlencoded'}
300
372
  )
301
373
 
302
374
  def copy(self, src: str, dst: str):
303
375
  """Copy file from src to dst."""
376
+ src = _p(src); dst = _p(dst)
304
377
  self._fetch_factory('POST', '_api/copy', {'src': src, 'dst': dst})(
305
378
  headers = {'Content-Type': 'application/www-form-urlencoded'}
306
379
  )
307
380
 
308
381
  def bundle(self, path: str) -> Iterator[bytes]:
309
382
  """Bundle a path into a zip file."""
383
+ path = _p(path)
310
384
  response = self._fetch_factory('GET', '_api/bundle', {'path': path})(
311
385
  headers = {'Content-Type': 'application/www-form-urlencoded'},
312
386
  stream = True
@@ -1,12 +1,16 @@
1
1
  from contextlib import contextmanager
2
- from typing import Iterable, TypeVar, Generator
2
+ from typing import Iterable, TypeVar, Generator, Callable, Optional
3
3
  import requests, os
4
4
 
5
5
  @contextmanager
6
- def catch_request_error():
6
+ def catch_request_error(error_code_handler: Optional[ dict[int, Callable[[requests.Response], None]] ] = None):
7
7
  try:
8
8
  yield
9
9
  except requests.RequestException as e:
10
+ if error_code_handler is not None:
11
+ if e.response is not None and e.response.status_code in error_code_handler:
12
+ error_code_handler[e.response.status_code](e.response)
13
+ return
10
14
  print(f"\033[31m[Request error]: {e}\033[0m")
11
15
  if e.response is not None:
12
16
  print(f"\033[91m[Error message]: {e.response.text}\033[0m")
@@ -15,13 +19,13 @@ T = TypeVar('T')
15
19
  def line_sep(iter: Iterable[T], enable=True, start=True, end=True, color="\033[90m") -> Generator[T, None, None]:
16
20
  screen_width = os.get_terminal_size().columns
17
21
  def print_ln():
18
- print(color + "-" * screen_width + "\033[0m")
22
+ if enable: print(color + "-" * screen_width + "\033[0m")
19
23
 
20
- if start and enable:
24
+ if start:
21
25
  print_ln()
22
26
  for i, line in enumerate(iter):
23
- if enable and i > 0:
27
+ if i > 0:
24
28
  print_ln()
25
29
  yield line
26
- if end and enable:
30
+ if end:
27
31
  print_ln()
@@ -3,7 +3,10 @@ import argparse, typing, sys
3
3
  from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
4
4
  from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey, AccessLevel
5
5
  from lfss.eng.utils import decode_uri_components, fmt_storage_size
6
- from . import catch_request_error, line_sep
6
+ from . import catch_request_error, line_sep as _line_sep
7
+
8
+ # monkey patch to avoid printing line separators...may remove line_sep in the future
9
+ line_sep = lambda *args, **kwargs: _line_sep(*args, enable=False, **kwargs)
7
10
 
8
11
  def parse_permission(s: str) -> FileReadPermission:
9
12
  for p in FileReadPermission:
@@ -15,6 +18,13 @@ def parse_access_level(s: str) -> AccessLevel:
15
18
  if p.name.lower() == s.lower():
16
19
  return p
17
20
  raise ValueError(f"Invalid access level {s}")
21
+ def default_error_handler_dict(path: str):
22
+ return {
23
+ 401: lambda _: print(f"\033[31mUnauthorized\033[0m ({path})", file=sys.stderr),
24
+ 403: lambda _: print(f"\033[31mForbidden\033[0m ({path})", file=sys.stderr),
25
+ 404: lambda _: print(f"\033[31mNot found\033[0m ({path})", file=sys.stderr),
26
+ 409: lambda _: print(f"\033[31mConflict\033[0m ({path})", file=sys.stderr),
27
+ }
18
28
 
19
29
  def parse_arguments():
20
30
  parser = argparse.ArgumentParser(description="Client-side command line interface, set LFSS_ENDPOINT and LFSS_TOKEN environment variables for authentication.")
@@ -30,10 +40,10 @@ def parse_arguments():
30
40
  sp_peers.add_argument('-i', '--incoming', action='store_true', help="List users that have access to you (rather than you have access to them")
31
41
 
32
42
  # upload
33
- sp_upload = sp.add_parser("upload", help="Upload file(s)")
43
+ sp_upload = sp.add_parser("upload", help="Upload a file or directory", aliases=["up"])
34
44
  sp_upload.add_argument("src", help="Source file or directory", type=str)
35
45
  sp_upload.add_argument("dst", help="Destination url path", type=str)
36
- sp_upload.add_argument("-v", "--verbose", action="store_true", help="Verbose output")
46
+ sp_upload.add_argument("-q", "--quiet", action="store_true", help="Quiet output, no progress info")
37
47
  sp_upload.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent uploads")
38
48
  sp_upload.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory upload")
39
49
  sp_upload.add_argument("--conflict", choices=["overwrite", "abort", "skip", "skip-ahead"], default="abort", help="Conflict resolution")
@@ -41,21 +51,35 @@ def parse_arguments():
41
51
  sp_upload.add_argument("--retries", type=int, default=0, help="Number of retries")
42
52
 
43
53
  # download
44
- sp_download = sp.add_parser("download", help="Download file(s)")
54
+ sp_download = sp.add_parser("download", help="Download a file or directory", aliases=["down"])
45
55
  sp_download.add_argument("src", help="Source url path", type=str)
46
56
  sp_download.add_argument("dst", help="Destination file or directory", type=str)
47
- sp_download.add_argument("-v", "--verbose", action="store_true", help="Verbose output")
57
+ sp_download.add_argument("-q", "--quiet", action="store_true", help="Quiet output, no progress info")
48
58
  sp_download.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent downloads")
49
59
  sp_download.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory download")
50
- sp_download.add_argument("--overwrite", action="store_true", help="Overwrite existing files")
60
+ sp_download.add_argument("--conflict", choices=["overwrite", "skip"], default="abort", help="Conflict resolution, only works with file download")
51
61
  sp_download.add_argument("--retries", type=int, default=0, help="Number of retries")
52
62
 
53
63
  # query
54
- sp_query = sp.add_parser("query", help="Query file or directories metadata from the server")
55
- sp_query.add_argument("path", help="Path to query", nargs="*", type=str)
64
+ sp_query = sp.add_parser("info", help="Query file or directories metadata from the server", aliases=["i"])
65
+ sp_query.add_argument("path", help="Path to query", nargs="+", type=str)
66
+
67
+ # delete
68
+ sp_delete = sp.add_parser("delete", help="Delete files or directories", aliases=["del"])
69
+ sp_delete.add_argument("path", help="Path to delete", nargs="+", type=str)
70
+ sp_delete.add_argument("-y", "--yes", action="store_true", help="Confirm deletion without prompt")
71
+
72
+ # aggregate list
73
+ sp_list = sp.add_parser("list", help="Aggregately list files and directories of a given path", aliases=["ls"])
74
+ sp_list.add_argument("path", help="Path to list", type=str)
75
+ sp_list.add_argument("--offset", type=int, default=0, help="Offset of the list")
76
+ sp_list.add_argument("--limit", type=int, default=100, help="Limit of the list")
77
+ sp_list.add_argument("-l", "--long", action="store_true", help="Detailed list, including all metadata")
78
+ sp_list.add_argument("--order", "--order-by", type=str, help="Order of the list", default="", choices=typing.get_args(FileSortKey))
79
+ sp_list.add_argument("--reverse", "--order-desc", action="store_true", help="Reverse the list order")
56
80
 
57
81
  # list directories
58
- sp_list_d = sp.add_parser("list-dirs", help="List directories of a given path")
82
+ sp_list_d = sp.add_parser("list-d", help="List directories of a given path", aliases=["lsd"])
59
83
  sp_list_d.add_argument("path", help="Path to list", type=str)
60
84
  sp_list_d.add_argument("--offset", type=int, default=0, help="Offset of the list")
61
85
  sp_list_d.add_argument("--limit", type=int, default=100, help="Limit of the list")
@@ -64,7 +88,7 @@ def parse_arguments():
64
88
  sp_list_d.add_argument("--reverse", "--order-desc", action="store_true", help="Reverse the list order")
65
89
 
66
90
  # list files
67
- sp_list_f = sp.add_parser("list-files", help="List files of a given path")
91
+ sp_list_f = sp.add_parser("list-f", help="List files of a given path", aliases=["lsf"])
68
92
  sp_list_f.add_argument("path", help="Path to list", type=str)
69
93
  sp_list_f.add_argument("--offset", type=int, default=0, help="Offset of the list")
70
94
  sp_list_f.add_argument("--limit", type=int, default=100, help="Limit of the list")
@@ -99,12 +123,12 @@ def main():
99
123
  for i, u in enumerate(line_sep(users)):
100
124
  print(f"[{i+1}] {u.username} (id={u.id})")
101
125
 
102
- elif args.command == "upload":
126
+ elif args.command in ["upload", "up"]:
103
127
  src_path = Path(args.src)
104
128
  if src_path.is_dir():
105
129
  failed_upload = upload_directory(
106
130
  connector, args.src, args.dst,
107
- verbose=args.verbose,
131
+ verbose=not args.quiet,
108
132
  n_concurrent=args.jobs,
109
133
  n_retries=args.retries,
110
134
  interval=args.interval,
@@ -120,7 +144,7 @@ def main():
120
144
  connector,
121
145
  file_path = args.src,
122
146
  dst_url = args.dst,
123
- verbose=args.verbose,
147
+ verbose=not args.quiet,
124
148
  n_retries=args.retries,
125
149
  interval=args.interval,
126
150
  conflict=args.conflict,
@@ -129,16 +153,16 @@ def main():
129
153
  if not success:
130
154
  print("\033[91mFailed to upload: \033[0m", msg, file=sys.stderr)
131
155
 
132
- elif args.command == "download":
156
+ elif args.command in ["download", "down"]:
133
157
  is_dir = args.src.endswith("/")
134
158
  if is_dir:
135
159
  failed_download = download_directory(
136
160
  connector, args.src, args.dst,
137
- verbose=args.verbose,
161
+ verbose=not args.quiet,
138
162
  n_concurrent=args.jobs,
139
163
  n_retries=args.retries,
140
164
  interval=args.interval,
141
- overwrite=args.overwrite
165
+ overwrite=args.conflict == "overwrite"
142
166
  )
143
167
  if failed_download:
144
168
  print("\033[91mFailed to download:\033[0m", file=sys.stderr)
@@ -149,7 +173,7 @@ def main():
149
173
  connector,
150
174
  src_url = args.src,
151
175
  file_path = args.dst,
152
- verbose=args.verbose,
176
+ verbose=not args.quiet,
153
177
  n_retries=args.retries,
154
178
  interval=args.interval,
155
179
  overwrite=args.overwrite
@@ -157,17 +181,50 @@ def main():
157
181
  if not success:
158
182
  print("\033[91mFailed to download: \033[0m", msg, file=sys.stderr)
159
183
 
160
- elif args.command == "query":
184
+ elif args.command in ["delete", "del"]:
185
+ if not args.yes:
186
+ print("You are about to delete the following paths:")
187
+ for path in args.path:
188
+ print("[D]" if path.endswith("/") else "[F]", path)
189
+ confirm = input("Are you sure? ([yes]/no): ")
190
+ if confirm.lower() not in ["", "y", "yes"]:
191
+ print("Aborted.")
192
+ exit(0)
161
193
  for path in args.path:
162
- with catch_request_error():
194
+ with catch_request_error(default_error_handler_dict(path)):
195
+ connector.delete(path)
196
+ print(f"\033[32mDeleted\033[0m ({path})")
197
+
198
+ elif args.command in ["info", "i"]:
199
+ for path in args.path:
200
+ with catch_request_error(default_error_handler_dict(path)):
163
201
  res = connector.get_meta(path)
164
202
  if res is None:
165
203
  print(f"\033[31mNot found\033[0m ({path})")
166
204
  else:
167
205
  print(res)
168
206
 
169
- elif args.command == "list-files":
170
- with catch_request_error():
207
+ elif args.command in ["ls", "list"]:
208
+ with catch_request_error(default_error_handler_dict(args.path)):
209
+ res = connector.list_path(
210
+ args.path,
211
+ offset=args.offset,
212
+ limit=args.limit,
213
+ order_by=args.order,
214
+ order_desc=args.reverse,
215
+ )
216
+ for i, d in enumerate(line_sep(res.dirs, end=False)):
217
+ d.url = decode_uri_components(d.url)
218
+ print(f"[d{i+1}] {d if args.long else d.url}")
219
+ for i, f in enumerate(line_sep(res.files)):
220
+ f.url = decode_uri_components(f.url)
221
+ print(f"[f{i+1}] {f if args.long else f.url}")
222
+
223
+ if len(res.dirs) + len(res.files) == args.limit:
224
+ print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more items.\033[0m")
225
+
226
+ elif args.command in ["lsf", "list-f"]:
227
+ with catch_request_error(default_error_handler_dict(args.path)):
171
228
  res = connector.list_files(
172
229
  args.path,
173
230
  offset=args.offset,
@@ -181,10 +238,10 @@ def main():
181
238
  print(f"[{i+1}] {f if args.long else f.url}")
182
239
 
183
240
  if len(res) == args.limit:
184
- print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more files.")
241
+ print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more files.\033[0m")
185
242
 
186
- elif args.command == "list-dirs":
187
- with catch_request_error():
243
+ elif args.command in ["lsd", "list-d"]:
244
+ with catch_request_error(default_error_handler_dict(args.path)):
188
245
  res = connector.list_dirs(
189
246
  args.path,
190
247
  offset=args.offset,
@@ -198,7 +255,7 @@ def main():
198
255
  print(f"[{i+1}] {d if args.long else d.url}")
199
256
 
200
257
  if len(res) == args.limit:
201
- print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more directories.")
258
+ print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more directories.\033[0m")
202
259
 
203
260
  else:
204
261
  raise NotImplementedError(f"Command {args.command} not implemented.")
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "lfss"
3
- version = "0.11.6"
3
+ version = "0.12.0"
4
4
  description = "Lite file storage service"
5
5
  authors = ["Li, Mengxun <mengxunli@whu.edu.cn>"]
6
6
  readme = "Readme.md"
@@ -1,40 +0,0 @@
1
-
2
- # Client-side CLI tools
3
-
4
- To install python CLI tools without dependencies (to avoid conflicts with your existing packages):
5
- ```sh
6
- pip install requests
7
- pip install lfss --no-deps
8
- ```
9
-
10
- Then set the `LFSS_ENDPOINT`, `LFSS_TOKEN` environment variables,
11
- then you can use the following commands:
12
- ```sh
13
- # Check current user information
14
- lfss whoami
15
-
16
- # Query a path
17
- lfss query remote/file[/or_dir/]
18
-
19
- # List directories of a specified path
20
- lfss list-dirs remote/dir/
21
-
22
- # List files of a specified path,
23
- # with pagination and sorting
24
- lfss list-files --offset 0 --limit 100 --order access_time remote/dir/
25
-
26
- # Upload a file
27
- lfss upload local/file.txt remote/file.txt
28
-
29
- # Upload a directory, note the ending slashes
30
- lfss upload local/dir/ remote/dir/
31
-
32
- # Download a file
33
- lfss download remote/file.txt local/file.txt
34
-
35
- # Download a directory, with verbose output and 8 concurrent jobs
36
- # Overwrite existing files
37
- lfss download -v -j 8 --conflict overwrite remote/dir/ local/dir/
38
- ```
39
-
40
- More commands can be found using `lfss-cli --help`.
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes