lfss 0.11.6__tar.gz → 0.12.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lfss-0.11.6 → lfss-0.12.1}/PKG-INFO +1 -1
- lfss-0.12.1/docs/Client.md +37 -0
- {lfss-0.11.6 → lfss-0.12.1}/docs/changelog.md +12 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/api/__init__.py +2 -2
- {lfss-0.11.6 → lfss-0.12.1}/lfss/api/connector.py +89 -15
- lfss-0.12.1/lfss/cli/__init__.py +28 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/cli/cli.py +134 -33
- lfss-0.12.1/lfss/cli/cli_lib.py +64 -0
- {lfss-0.11.6 → lfss-0.12.1}/pyproject.toml +1 -1
- lfss-0.11.6/docs/Client.md +0 -40
- lfss-0.11.6/lfss/cli/__init__.py +0 -27
- {lfss-0.11.6 → lfss-0.12.1}/Readme.md +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/docs/Enviroment_variables.md +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/docs/Known_issues.md +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/docs/Permission.md +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/docs/Webdav.md +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/api.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/index.html +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/info.css +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/info.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/login.css +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/login.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/popup.css +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/popup.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/scripts.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/state.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/styles.css +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/thumb.css +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/thumb.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/frontend/utils.js +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/cli/balance.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/cli/log.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/cli/panel.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/cli/serve.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/cli/user.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/cli/vacuum.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/__init__.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/bounded_pool.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/config.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/connection_pool.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/database.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/datatype.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/error.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/log.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/thumb.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/eng/utils.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/sql/init.sql +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/sql/pragma.sql +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/svc/app.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/svc/app_base.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/svc/app_dav.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/svc/app_native.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/svc/common_impl.py +0 -0
- {lfss-0.11.6 → lfss-0.12.1}/lfss/svc/request_log.py +0 -0
@@ -0,0 +1,37 @@
|
|
1
|
+
|
2
|
+
# Client-side CLI tools
|
3
|
+
|
4
|
+
To install python CLI tools without dependencies (to avoid conflicts with your existing packages):
|
5
|
+
```sh
|
6
|
+
pip install requests
|
7
|
+
pip install lfss --no-deps
|
8
|
+
```
|
9
|
+
|
10
|
+
Then set the `LFSS_ENDPOINT`, `LFSS_TOKEN` environment variables,
|
11
|
+
then you can use the following commands:
|
12
|
+
```sh
|
13
|
+
# Check current user information
|
14
|
+
lfss whoami
|
15
|
+
|
16
|
+
# Query a path
|
17
|
+
lfss i remote/file[/or_dir/]
|
18
|
+
|
19
|
+
# List a specified path,
|
20
|
+
# with pagination and sorting
|
21
|
+
lfss ls remote/dir/ --offset 0 --limit 100 --order access_time
|
22
|
+
|
23
|
+
# Upload a file
|
24
|
+
lfss up local/file.txt remote/file.txt
|
25
|
+
|
26
|
+
# Upload a directory, note the ending slashes
|
27
|
+
lfss up local/dir/ remote/dir/
|
28
|
+
|
29
|
+
# Download a file
|
30
|
+
lfss down remote/file.txt local/file.txt
|
31
|
+
|
32
|
+
# Download a directory, with 8 concurrent jobs
|
33
|
+
# Overwrite existing files
|
34
|
+
lfss down remote/dir/ local/dir/ -j 8 --conflict overwrite
|
35
|
+
```
|
36
|
+
|
37
|
+
More commands can be found using `lfss --help`.
|
@@ -1,3 +1,15 @@
|
|
1
|
+
## 0.12
|
2
|
+
|
3
|
+
### 0.12.1
|
4
|
+
- Add `cat` command
|
5
|
+
- Use unicode icons for CLI list command
|
6
|
+
|
7
|
+
### 0.12.0
|
8
|
+
- Change default script to client CLI
|
9
|
+
- Client CLI default to verbose output
|
10
|
+
- Client CLI subcommand rename and alias
|
11
|
+
- Add delete path and more error handling for client CLI
|
12
|
+
|
1
13
|
## 0.11
|
2
14
|
|
3
15
|
### 0.11.6
|
@@ -75,7 +75,7 @@ def upload_directory(
|
|
75
75
|
this_count = _counter
|
76
76
|
dst_path = f"{path}{os.path.relpath(file_path, directory)}"
|
77
77
|
if verbose:
|
78
|
-
print(f"[{this_count}]
|
78
|
+
print(f"[{this_count}] {file_path} -> {dst_path}")
|
79
79
|
|
80
80
|
if not (res:=upload_file(
|
81
81
|
c, file_path, dst_path,
|
@@ -178,7 +178,7 @@ def download_directory(
|
|
178
178
|
this_count = _counter
|
179
179
|
dst_path = f"{directory}{os.path.relpath(decode_uri_components(src_url), decode_uri_components(src_path))}"
|
180
180
|
if verbose:
|
181
|
-
print(f"[{this_count}/{file_count}]
|
181
|
+
print(f"[{this_count}/{file_count}] {src_url} -> {dst_path}")
|
182
182
|
|
183
183
|
if not (res:=download_file(
|
184
184
|
c, src_url, dst_path,
|
@@ -1,11 +1,12 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
from typing import Optional, Literal
|
3
3
|
from collections.abc import Iterator
|
4
|
-
import os
|
4
|
+
import os
|
5
5
|
import requests
|
6
6
|
import requests.adapters
|
7
7
|
import urllib.parse
|
8
8
|
from tempfile import SpooledTemporaryFile
|
9
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
9
10
|
from lfss.eng.error import PathNotFoundError
|
10
11
|
from lfss.eng.datatype import (
|
11
12
|
FileReadPermission, FileRecord, DirectoryRecord, UserRecord, PathContents, AccessLevel,
|
@@ -17,6 +18,13 @@ _default_endpoint = os.environ.get('LFSS_ENDPOINT', 'http://localhost:8000')
|
|
17
18
|
_default_token = os.environ.get('LFSS_TOKEN', '')
|
18
19
|
num_t = float | int
|
19
20
|
|
21
|
+
def _p(x: str) -> str:
|
22
|
+
if x == '/':
|
23
|
+
return x
|
24
|
+
if x.startswith('/'):
|
25
|
+
x = x[1:]
|
26
|
+
return x
|
27
|
+
|
20
28
|
class Connector:
|
21
29
|
class Session:
|
22
30
|
def __init__(
|
@@ -99,6 +107,7 @@ class Connector:
|
|
99
107
|
def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
|
100
108
|
"""Uploads a file to the specified path."""
|
101
109
|
assert isinstance(file_data, bytes), "file_data must be bytes"
|
110
|
+
path = _p(path)
|
102
111
|
|
103
112
|
# Skip ahead by checking if the file already exists
|
104
113
|
if conflict == 'skip-ahead':
|
@@ -123,6 +132,7 @@ class Connector:
|
|
123
132
|
using the POST method, with form-data/multipart.
|
124
133
|
file can be a path to a file on disk, or bytes.
|
125
134
|
"""
|
135
|
+
path = _p(path)
|
126
136
|
|
127
137
|
# Skip ahead by checking if the file already exists
|
128
138
|
if conflict == 'skip-ahead':
|
@@ -155,6 +165,7 @@ class Connector:
|
|
155
165
|
"""Uploads a JSON file to the specified path."""
|
156
166
|
assert path.endswith('.json'), "Path must end with .json"
|
157
167
|
assert isinstance(data, dict), "data must be a dict"
|
168
|
+
path = _p(path)
|
158
169
|
|
159
170
|
# Skip ahead by checking if the file already exists
|
160
171
|
if conflict == 'skip-ahead':
|
@@ -184,6 +195,7 @@ class Connector:
|
|
184
195
|
|
185
196
|
def get(self, path: str) -> Optional[bytes]:
|
186
197
|
"""Downloads a file from the specified path."""
|
198
|
+
path = _p(path)
|
187
199
|
response = self._get(path)
|
188
200
|
if response is None: return None
|
189
201
|
return response.content
|
@@ -193,19 +205,22 @@ class Connector:
|
|
193
205
|
Downloads a partial file from the specified path.
|
194
206
|
start and end are the byte offsets, both inclusive.
|
195
207
|
"""
|
208
|
+
path = _p(path)
|
196
209
|
response = self._fetch_factory('GET', path, extra_headers={
|
197
210
|
'Range': f"bytes={range_start if range_start >= 0 else ''}-{range_end if range_end >= 0 else ''}"
|
198
211
|
})()
|
199
212
|
if response is None: return None
|
200
213
|
return response.content
|
201
214
|
|
202
|
-
def get_stream(self, path: str) -> Iterator[bytes]:
|
215
|
+
def get_stream(self, path: str, chunk_size = 1024) -> Iterator[bytes]:
|
203
216
|
"""Downloads a file from the specified path, will raise PathNotFoundError if path not found."""
|
217
|
+
path = _p(path)
|
204
218
|
response = self._get(path, stream=True)
|
205
219
|
if response is None: raise PathNotFoundError("Path not found: " + path)
|
206
|
-
return response.iter_content(chunk_size
|
220
|
+
return response.iter_content(chunk_size)
|
207
221
|
|
208
222
|
def get_json(self, path: str) -> Optional[dict]:
|
223
|
+
path = _p(path)
|
209
224
|
response = self._get(path)
|
210
225
|
if response is None: return None
|
211
226
|
assert response.headers['Content-Type'] == 'application/json'
|
@@ -218,16 +233,18 @@ class Connector:
|
|
218
233
|
"""
|
219
234
|
response = self._fetch_factory(
|
220
235
|
'GET', '_api/get-multiple',
|
221
|
-
{'path': paths, "skip_content": skip_content}
|
236
|
+
{'path': [_p(p) for p in paths], "skip_content": skip_content}
|
222
237
|
)()
|
223
238
|
return response.json()
|
224
239
|
|
225
240
|
def delete(self, path: str):
|
226
241
|
"""Deletes the file at the specified path."""
|
242
|
+
path = _p(path)
|
227
243
|
self._fetch_factory('DELETE', path)()
|
228
244
|
|
229
245
|
def get_meta(self, path: str) -> Optional[FileRecord | DirectoryRecord]:
|
230
246
|
"""Gets the metadata for the file at the specified path."""
|
247
|
+
path = _p(path)
|
231
248
|
try:
|
232
249
|
response = self._fetch_factory('GET', '_api/meta', {'path': path})()
|
233
250
|
if path.endswith('/'):
|
@@ -242,19 +259,9 @@ class Connector:
|
|
242
259
|
def get_fmeta(self, path: str) -> Optional[FileRecord]: assert (f:=self.get_meta(path)) is None or isinstance(f, FileRecord); return f
|
243
260
|
def get_dmeta(self, path: str) -> Optional[DirectoryRecord]: assert (d:=self.get_meta(path)) is None or isinstance(d, DirectoryRecord); return d
|
244
261
|
|
245
|
-
def list_path(self, path: str) -> PathContents:
|
246
|
-
"""
|
247
|
-
shorthand list with limited options,
|
248
|
-
for large directories / more options, use list_files and list_dirs instead.
|
249
|
-
"""
|
250
|
-
assert path.endswith('/')
|
251
|
-
response = self._fetch_factory('GET', path)()
|
252
|
-
dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
|
253
|
-
files = [FileRecord(**f) for f in response.json()['files']]
|
254
|
-
return PathContents(dirs=dirs, files=files)
|
255
|
-
|
256
262
|
def count_files(self, path: str, flat: bool = False) -> int:
|
257
263
|
assert path.endswith('/')
|
264
|
+
path = _p(path)
|
258
265
|
response = self._fetch_factory('GET', '_api/count-files', {'path': path, 'flat': flat})()
|
259
266
|
return response.json()['count']
|
260
267
|
|
@@ -264,6 +271,7 @@ class Connector:
|
|
264
271
|
flat: bool = False
|
265
272
|
) -> list[FileRecord]:
|
266
273
|
assert path.endswith('/')
|
274
|
+
path = _p(path)
|
267
275
|
response = self._fetch_factory('GET', "_api/list-files", {
|
268
276
|
'path': path,
|
269
277
|
'offset': offset, 'limit': limit, 'order_by': order_by, 'order_desc': order_desc, 'flat': flat
|
@@ -272,6 +280,7 @@ class Connector:
|
|
272
280
|
|
273
281
|
def count_dirs(self, path: str) -> int:
|
274
282
|
assert path.endswith('/')
|
283
|
+
path = _p(path)
|
275
284
|
response = self._fetch_factory('GET', '_api/count-dirs', {'path': path})()
|
276
285
|
return response.json()['count']
|
277
286
|
|
@@ -281,32 +290,97 @@ class Connector:
|
|
281
290
|
skim: bool = True
|
282
291
|
) -> list[DirectoryRecord]:
|
283
292
|
assert path.endswith('/')
|
293
|
+
path = _p(path)
|
284
294
|
response = self._fetch_factory('GET', "_api/list-dirs", {
|
285
295
|
'path': path,
|
286
296
|
'offset': offset, 'limit': limit, 'order_by': order_by, 'order_desc': order_desc, 'skim': skim
|
287
297
|
})()
|
288
298
|
return [DirectoryRecord(**d) for d in response.json()]
|
299
|
+
|
300
|
+
def list_path(
|
301
|
+
self, path: str, offset: int = 0, limit: int = 1000,
|
302
|
+
order_by: FileSortKey = '', order_desc: bool = False,
|
303
|
+
_workers: int = 2
|
304
|
+
) -> PathContents:
|
305
|
+
""" Aggregately lists both files and directories under the given path. """
|
306
|
+
assert path.endswith('/')
|
307
|
+
path = _p(path)
|
308
|
+
if path == '/':
|
309
|
+
# handle root path separately
|
310
|
+
# TODO: change later
|
311
|
+
response = self._fetch_factory('GET', path)()
|
312
|
+
dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
|
313
|
+
files = [FileRecord(**f) for f in response.json()['files']]
|
314
|
+
return PathContents(dirs=dirs, files=files)
|
315
|
+
|
316
|
+
dirs: list[DirectoryRecord] = []
|
317
|
+
files: list[FileRecord] = []
|
318
|
+
with ThreadPoolExecutor(max_workers=_workers) as executor:
|
319
|
+
count_futures = {
|
320
|
+
executor.submit(self.count_dirs, path): 'dirs',
|
321
|
+
executor.submit(self.count_files, path, flat=False): 'files'
|
322
|
+
}
|
323
|
+
dir_count = 0
|
324
|
+
file_count = 0
|
325
|
+
for future in as_completed(count_futures):
|
326
|
+
if count_futures[future] == 'dirs':
|
327
|
+
dir_count = future.result()
|
328
|
+
else:
|
329
|
+
file_count = future.result()
|
330
|
+
dir_offset = offset
|
331
|
+
dir_limit = min(limit, max(0, dir_count - dir_offset))
|
332
|
+
file_offset = max(0, offset - dir_count)
|
333
|
+
file_limit = min(limit - dir_limit, max(0, file_count - file_offset))
|
334
|
+
|
335
|
+
dir_order_by = 'dirname' if order_by == 'url' else ''
|
336
|
+
file_order_by = order_by
|
337
|
+
|
338
|
+
def fetch_dirs():
|
339
|
+
nonlocal dirs
|
340
|
+
if dir_limit > 0:
|
341
|
+
dirs = self.list_dirs(
|
342
|
+
path, offset=dir_offset, limit=dir_limit,
|
343
|
+
order_by=dir_order_by, order_desc=order_desc
|
344
|
+
)
|
345
|
+
def fetch_files():
|
346
|
+
nonlocal files
|
347
|
+
if file_limit > 0:
|
348
|
+
files = self.list_files(
|
349
|
+
path, offset=file_offset, limit=file_limit,
|
350
|
+
order_by=file_order_by, order_desc=order_desc, flat=False
|
351
|
+
)
|
352
|
+
futures = [
|
353
|
+
executor.submit(fetch_dirs),
|
354
|
+
executor.submit(fetch_files)
|
355
|
+
]
|
356
|
+
for future in as_completed(futures):
|
357
|
+
future.result()
|
358
|
+
return PathContents(dirs=dirs, files=files)
|
289
359
|
|
290
360
|
def set_file_permission(self, path: str, permission: int | FileReadPermission):
|
291
361
|
"""Sets the file permission for the specified path."""
|
362
|
+
path = _p(path)
|
292
363
|
self._fetch_factory('POST', '_api/meta', {'path': path, 'perm': int(permission)})(
|
293
364
|
headers={'Content-Type': 'application/www-form-urlencoded'}
|
294
365
|
)
|
295
366
|
|
296
367
|
def move(self, path: str, new_path: str):
|
297
368
|
"""Move file or directory to a new path."""
|
369
|
+
path = _p(path); new_path = _p(new_path)
|
298
370
|
self._fetch_factory('POST', '_api/meta', {'path': path, 'new_path': new_path})(
|
299
371
|
headers = {'Content-Type': 'application/www-form-urlencoded'}
|
300
372
|
)
|
301
373
|
|
302
374
|
def copy(self, src: str, dst: str):
|
303
375
|
"""Copy file from src to dst."""
|
376
|
+
src = _p(src); dst = _p(dst)
|
304
377
|
self._fetch_factory('POST', '_api/copy', {'src': src, 'dst': dst})(
|
305
378
|
headers = {'Content-Type': 'application/www-form-urlencoded'}
|
306
379
|
)
|
307
380
|
|
308
381
|
def bundle(self, path: str) -> Iterator[bytes]:
|
309
382
|
"""Bundle a path into a zip file."""
|
383
|
+
path = _p(path)
|
310
384
|
response = self._fetch_factory('GET', '_api/bundle', {'path': path})(
|
311
385
|
headers = {'Content-Type': 'application/www-form-urlencoded'},
|
312
386
|
stream = True
|
@@ -0,0 +1,28 @@
|
|
1
|
+
from contextlib import contextmanager
|
2
|
+
from typing import Iterable, TypeVar, Generator, Callable, Optional
|
3
|
+
import requests, os
|
4
|
+
|
5
|
+
@contextmanager
|
6
|
+
def catch_request_error(error_code_handler: Optional[ dict[int, Callable[[requests.Response], None]] ] = None):
|
7
|
+
try:
|
8
|
+
yield
|
9
|
+
except requests.RequestException as e:
|
10
|
+
if error_code_handler is not None:
|
11
|
+
if e.response is not None and e.response.status_code in error_code_handler:
|
12
|
+
error_code_handler[e.response.status_code](e.response)
|
13
|
+
return
|
14
|
+
print(f"\033[31m[Request error]: {e}\033[0m")
|
15
|
+
if e.response is not None:
|
16
|
+
print(f"\033[91m[Error message]: {e.response.text}\033[0m")
|
17
|
+
|
18
|
+
T = TypeVar('T')
|
19
|
+
def line_sep(iter: Iterable[T], enable=True, start=True, end=True, middle=False, color="\033[90m") -> Generator[T, None, None]:
|
20
|
+
screen_width = os.get_terminal_size().columns
|
21
|
+
def print_ln():
|
22
|
+
if enable: print(color + "-" * screen_width + "\033[0m")
|
23
|
+
|
24
|
+
if start: print_ln()
|
25
|
+
for i, line in enumerate(iter):
|
26
|
+
if i > 0 and middle: print_ln()
|
27
|
+
yield line
|
28
|
+
if end: print_ln()
|
@@ -1,9 +1,15 @@
|
|
1
1
|
from pathlib import Path
|
2
2
|
import argparse, typing, sys
|
3
3
|
from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
|
4
|
-
from lfss.eng.datatype import
|
4
|
+
from lfss.eng.datatype import (
|
5
|
+
FileReadPermission, AccessLevel,
|
6
|
+
FileSortKey, DirSortKey,
|
7
|
+
FileRecord, DirectoryRecord, PathContents
|
8
|
+
)
|
5
9
|
from lfss.eng.utils import decode_uri_components, fmt_storage_size
|
10
|
+
|
6
11
|
from . import catch_request_error, line_sep
|
12
|
+
from .cli_lib import mimetype_unicode, stream_text
|
7
13
|
|
8
14
|
def parse_permission(s: str) -> FileReadPermission:
|
9
15
|
for p in FileReadPermission:
|
@@ -15,6 +21,53 @@ def parse_access_level(s: str) -> AccessLevel:
|
|
15
21
|
if p.name.lower() == s.lower():
|
16
22
|
return p
|
17
23
|
raise ValueError(f"Invalid access level {s}")
|
24
|
+
def default_error_handler_dict(path: str):
|
25
|
+
return {
|
26
|
+
401: lambda _: print(f"\033[31mUnauthorized\033[0m ({path})", file=sys.stderr),
|
27
|
+
403: lambda _: print(f"\033[31mForbidden\033[0m ({path})", file=sys.stderr),
|
28
|
+
404: lambda _: print(f"\033[31mNot found\033[0m ({path})", file=sys.stderr),
|
29
|
+
409: lambda _: print(f"\033[31mConflict\033[0m ({path})", file=sys.stderr),
|
30
|
+
}
|
31
|
+
def print_path_list(
|
32
|
+
path_list: list[FileRecord] | list[DirectoryRecord] | PathContents,
|
33
|
+
detailed: bool = False
|
34
|
+
):
|
35
|
+
dirs: list[DirectoryRecord]
|
36
|
+
files: list[FileRecord]
|
37
|
+
if isinstance(path_list, PathContents):
|
38
|
+
dirs = path_list.dirs
|
39
|
+
files = path_list.files
|
40
|
+
else:
|
41
|
+
dirs = [p for p in path_list if isinstance(p, DirectoryRecord)]
|
42
|
+
files = [p for p in path_list if isinstance(p, FileRecord)]
|
43
|
+
# check if terminal supports unicode
|
44
|
+
supports_unicode = sys.stdout.encoding.lower().startswith("utf")
|
45
|
+
def print_ln(r: DirectoryRecord | FileRecord):
|
46
|
+
nonlocal detailed, supports_unicode
|
47
|
+
match (r, supports_unicode):
|
48
|
+
case (DirectoryRecord(), True):
|
49
|
+
print(mimetype_unicode(r), end=" ")
|
50
|
+
case (DirectoryRecord(), False):
|
51
|
+
print("[D]", end=" ")
|
52
|
+
case (FileRecord(), True):
|
53
|
+
print(mimetype_unicode(r), end=" ")
|
54
|
+
case (FileRecord(), False):
|
55
|
+
print("[F]", end=" ")
|
56
|
+
case _:
|
57
|
+
print("[?]", end=" ")
|
58
|
+
print(decode_uri_components(r.url), end="")
|
59
|
+
if detailed:
|
60
|
+
if isinstance(r, FileRecord):
|
61
|
+
print(f" | {fmt_storage_size(r.file_size)}, permission={r.permission.name}, created={r.create_time}, accessed={r.access_time}")
|
62
|
+
else:
|
63
|
+
print()
|
64
|
+
else:
|
65
|
+
print()
|
66
|
+
|
67
|
+
for d in line_sep(dirs, end=False):
|
68
|
+
print_ln(d)
|
69
|
+
for f in line_sep(files, start=False):
|
70
|
+
print_ln(f)
|
18
71
|
|
19
72
|
def parse_arguments():
|
20
73
|
parser = argparse.ArgumentParser(description="Client-side command line interface, set LFSS_ENDPOINT and LFSS_TOKEN environment variables for authentication.")
|
@@ -30,10 +83,10 @@ def parse_arguments():
|
|
30
83
|
sp_peers.add_argument('-i', '--incoming', action='store_true', help="List users that have access to you (rather than you have access to them")
|
31
84
|
|
32
85
|
# upload
|
33
|
-
sp_upload = sp.add_parser("upload", help="Upload file
|
86
|
+
sp_upload = sp.add_parser("upload", help="Upload a file or directory", aliases=["up"])
|
34
87
|
sp_upload.add_argument("src", help="Source file or directory", type=str)
|
35
88
|
sp_upload.add_argument("dst", help="Destination url path", type=str)
|
36
|
-
sp_upload.add_argument("-
|
89
|
+
sp_upload.add_argument("-q", "--quiet", action="store_true", help="Quiet output, no progress info")
|
37
90
|
sp_upload.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent uploads")
|
38
91
|
sp_upload.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory upload")
|
39
92
|
sp_upload.add_argument("--conflict", choices=["overwrite", "abort", "skip", "skip-ahead"], default="abort", help="Conflict resolution")
|
@@ -41,21 +94,35 @@ def parse_arguments():
|
|
41
94
|
sp_upload.add_argument("--retries", type=int, default=0, help="Number of retries")
|
42
95
|
|
43
96
|
# download
|
44
|
-
sp_download = sp.add_parser("download", help="Download file
|
97
|
+
sp_download = sp.add_parser("download", help="Download a file or directory", aliases=["down"])
|
45
98
|
sp_download.add_argument("src", help="Source url path", type=str)
|
46
99
|
sp_download.add_argument("dst", help="Destination file or directory", type=str)
|
47
|
-
sp_download.add_argument("-
|
100
|
+
sp_download.add_argument("-q", "--quiet", action="store_true", help="Quiet output, no progress info")
|
48
101
|
sp_download.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent downloads")
|
49
102
|
sp_download.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory download")
|
50
|
-
sp_download.add_argument("--overwrite",
|
103
|
+
sp_download.add_argument("--conflict", choices=["overwrite", "skip"], default="abort", help="Conflict resolution, only works with file download")
|
51
104
|
sp_download.add_argument("--retries", type=int, default=0, help="Number of retries")
|
52
105
|
|
53
106
|
# query
|
54
|
-
sp_query = sp.add_parser("
|
55
|
-
sp_query.add_argument("path", help="Path to query", nargs="
|
107
|
+
sp_query = sp.add_parser("info", help="Query file or directories metadata from the server", aliases=["i"])
|
108
|
+
sp_query.add_argument("path", help="Path to query", nargs="+", type=str)
|
109
|
+
|
110
|
+
# delete
|
111
|
+
sp_delete = sp.add_parser("delete", help="Delete files or directories", aliases=["del"])
|
112
|
+
sp_delete.add_argument("path", help="Path to delete", nargs="+", type=str)
|
113
|
+
sp_delete.add_argument("-y", "--yes", action="store_true", help="Confirm deletion without prompt")
|
114
|
+
|
115
|
+
# aggregate list
|
116
|
+
sp_list = sp.add_parser("list", help="Aggregately list files and directories of a given path", aliases=["ls"])
|
117
|
+
sp_list.add_argument("path", help="Path to list", type=str)
|
118
|
+
sp_list.add_argument("--offset", type=int, default=0, help="Offset of the list")
|
119
|
+
sp_list.add_argument("--limit", type=int, default=100, help="Limit of the list")
|
120
|
+
sp_list.add_argument("-l", "--long", action="store_true", help="Detailed list, including all metadata")
|
121
|
+
sp_list.add_argument("--order", "--order-by", type=str, help="Order of the list", default="", choices=typing.get_args(FileSortKey))
|
122
|
+
sp_list.add_argument("--reverse", "--order-desc", action="store_true", help="Reverse the list order")
|
56
123
|
|
57
124
|
# list directories
|
58
|
-
sp_list_d = sp.add_parser("list-
|
125
|
+
sp_list_d = sp.add_parser("list-d", help="List directories of a given path", aliases=["lsd"])
|
59
126
|
sp_list_d.add_argument("path", help="Path to list", type=str)
|
60
127
|
sp_list_d.add_argument("--offset", type=int, default=0, help="Offset of the list")
|
61
128
|
sp_list_d.add_argument("--limit", type=int, default=100, help="Limit of the list")
|
@@ -64,7 +131,7 @@ def parse_arguments():
|
|
64
131
|
sp_list_d.add_argument("--reverse", "--order-desc", action="store_true", help="Reverse the list order")
|
65
132
|
|
66
133
|
# list files
|
67
|
-
sp_list_f = sp.add_parser("list-
|
134
|
+
sp_list_f = sp.add_parser("list-f", help="List files of a given path", aliases=["lsf"])
|
68
135
|
sp_list_f.add_argument("path", help="Path to list", type=str)
|
69
136
|
sp_list_f.add_argument("--offset", type=int, default=0, help="Offset of the list")
|
70
137
|
sp_list_f.add_argument("--limit", type=int, default=100, help="Limit of the list")
|
@@ -73,6 +140,10 @@ def parse_arguments():
|
|
73
140
|
sp_list_f.add_argument("--order", "--order-by", type=str, help="Order of the list", default="", choices=typing.get_args(FileSortKey))
|
74
141
|
sp_list_f.add_argument("--reverse", "--order-desc", action="store_true", help="Reverse the list order")
|
75
142
|
|
143
|
+
# show content
|
144
|
+
sp_show = sp.add_parser("concatenate", help="Concatenate and print files", aliases=["cat"])
|
145
|
+
sp_show.add_argument("path", help="Path to the text files", type=str, nargs="+")
|
146
|
+
sp_show.add_argument("-e", "--encoding", type=str, default="utf-8", help="Text file encoding, default utf-8")
|
76
147
|
return parser.parse_args()
|
77
148
|
|
78
149
|
def main():
|
@@ -99,12 +170,12 @@ def main():
|
|
99
170
|
for i, u in enumerate(line_sep(users)):
|
100
171
|
print(f"[{i+1}] {u.username} (id={u.id})")
|
101
172
|
|
102
|
-
elif args.command
|
173
|
+
elif args.command in ["upload", "up"]:
|
103
174
|
src_path = Path(args.src)
|
104
175
|
if src_path.is_dir():
|
105
176
|
failed_upload = upload_directory(
|
106
177
|
connector, args.src, args.dst,
|
107
|
-
verbose=args.
|
178
|
+
verbose=not args.quiet,
|
108
179
|
n_concurrent=args.jobs,
|
109
180
|
n_retries=args.retries,
|
110
181
|
interval=args.interval,
|
@@ -120,7 +191,7 @@ def main():
|
|
120
191
|
connector,
|
121
192
|
file_path = args.src,
|
122
193
|
dst_url = args.dst,
|
123
|
-
verbose=args.
|
194
|
+
verbose=not args.quiet,
|
124
195
|
n_retries=args.retries,
|
125
196
|
interval=args.interval,
|
126
197
|
conflict=args.conflict,
|
@@ -129,16 +200,16 @@ def main():
|
|
129
200
|
if not success:
|
130
201
|
print("\033[91mFailed to upload: \033[0m", msg, file=sys.stderr)
|
131
202
|
|
132
|
-
elif args.command
|
203
|
+
elif args.command in ["download", "down"]:
|
133
204
|
is_dir = args.src.endswith("/")
|
134
205
|
if is_dir:
|
135
206
|
failed_download = download_directory(
|
136
207
|
connector, args.src, args.dst,
|
137
|
-
verbose=args.
|
208
|
+
verbose=not args.quiet,
|
138
209
|
n_concurrent=args.jobs,
|
139
210
|
n_retries=args.retries,
|
140
211
|
interval=args.interval,
|
141
|
-
overwrite=args.overwrite
|
212
|
+
overwrite=args.conflict == "overwrite"
|
142
213
|
)
|
143
214
|
if failed_download:
|
144
215
|
print("\033[91mFailed to download:\033[0m", file=sys.stderr)
|
@@ -149,7 +220,7 @@ def main():
|
|
149
220
|
connector,
|
150
221
|
src_url = args.src,
|
151
222
|
file_path = args.dst,
|
152
|
-
verbose=args.
|
223
|
+
verbose=not args.quiet,
|
153
224
|
n_retries=args.retries,
|
154
225
|
interval=args.interval,
|
155
226
|
overwrite=args.overwrite
|
@@ -157,17 +228,44 @@ def main():
|
|
157
228
|
if not success:
|
158
229
|
print("\033[91mFailed to download: \033[0m", msg, file=sys.stderr)
|
159
230
|
|
160
|
-
elif args.command
|
231
|
+
elif args.command in ["delete", "del"]:
|
232
|
+
if not args.yes:
|
233
|
+
print("You are about to delete the following paths:")
|
234
|
+
for path in args.path:
|
235
|
+
print("[D]" if path.endswith("/") else "[F]", path)
|
236
|
+
confirm = input("Are you sure? ([yes]/no): ")
|
237
|
+
if confirm.lower() not in ["", "y", "yes"]:
|
238
|
+
print("Aborted.")
|
239
|
+
exit(0)
|
161
240
|
for path in args.path:
|
162
|
-
with catch_request_error():
|
241
|
+
with catch_request_error(default_error_handler_dict(path)):
|
242
|
+
connector.delete(path)
|
243
|
+
print(f"\033[32mDeleted\033[0m ({path})")
|
244
|
+
|
245
|
+
elif args.command in ["info", "i"]:
|
246
|
+
for path in args.path:
|
247
|
+
with catch_request_error(default_error_handler_dict(path)):
|
163
248
|
res = connector.get_meta(path)
|
164
249
|
if res is None:
|
165
250
|
print(f"\033[31mNot found\033[0m ({path})")
|
166
251
|
else:
|
167
252
|
print(res)
|
168
253
|
|
169
|
-
elif args.command
|
170
|
-
with catch_request_error():
|
254
|
+
elif args.command in ["ls", "list"]:
|
255
|
+
with catch_request_error(default_error_handler_dict(args.path)):
|
256
|
+
res = connector.list_path(
|
257
|
+
args.path,
|
258
|
+
offset=args.offset,
|
259
|
+
limit=args.limit,
|
260
|
+
order_by=args.order,
|
261
|
+
order_desc=args.reverse,
|
262
|
+
)
|
263
|
+
print_path_list(res, detailed=args.long)
|
264
|
+
if len(res.dirs) + len(res.files) == args.limit:
|
265
|
+
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more items.\033[0m")
|
266
|
+
|
267
|
+
elif args.command in ["lsf", "list-f"]:
|
268
|
+
with catch_request_error(default_error_handler_dict(args.path)):
|
171
269
|
res = connector.list_files(
|
172
270
|
args.path,
|
173
271
|
offset=args.offset,
|
@@ -176,15 +274,12 @@ def main():
|
|
176
274
|
order_by=args.order,
|
177
275
|
order_desc=args.reverse,
|
178
276
|
)
|
179
|
-
|
180
|
-
f.url = decode_uri_components(f.url)
|
181
|
-
print(f"[{i+1}] {f if args.long else f.url}")
|
182
|
-
|
277
|
+
print_path_list(res, detailed=args.long)
|
183
278
|
if len(res) == args.limit:
|
184
|
-
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more files
|
279
|
+
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more files.\033[0m")
|
185
280
|
|
186
|
-
elif args.command
|
187
|
-
with catch_request_error():
|
281
|
+
elif args.command in ["lsd", "list-d"]:
|
282
|
+
with catch_request_error(default_error_handler_dict(args.path)):
|
188
283
|
res = connector.list_dirs(
|
189
284
|
args.path,
|
190
285
|
offset=args.offset,
|
@@ -193,12 +288,18 @@ def main():
|
|
193
288
|
order_by=args.order,
|
194
289
|
order_desc=args.reverse,
|
195
290
|
)
|
196
|
-
|
197
|
-
d.url = decode_uri_components(d.url)
|
198
|
-
print(f"[{i+1}] {d if args.long else d.url}")
|
199
|
-
|
291
|
+
print_path_list(res, detailed=args.long)
|
200
292
|
if len(res) == args.limit:
|
201
|
-
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more directories
|
293
|
+
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more directories.\033[0m")
|
294
|
+
|
295
|
+
elif args.command in ["cat", "concatenate"]:
|
296
|
+
for _p in args.path:
|
297
|
+
with catch_request_error(default_error_handler_dict(_p)):
|
298
|
+
try:
|
299
|
+
for chunk in stream_text(connector, _p, encoding=args.encoding):
|
300
|
+
print(chunk, end="")
|
301
|
+
except (FileNotFoundError, ValueError) as e:
|
302
|
+
print(f"\033[31m{e}\033[0m", file=sys.stderr)
|
202
303
|
|
203
304
|
else:
|
204
305
|
raise NotImplementedError(f"Command {args.command} not implemented.")
|
@@ -0,0 +1,64 @@
|
|
1
|
+
|
2
|
+
from ..api.connector import Connector
|
3
|
+
from ..eng.datatype import DirectoryRecord, FileRecord
|
4
|
+
|
5
|
+
def mimetype_unicode(r: DirectoryRecord | FileRecord):
|
6
|
+
if isinstance(r, DirectoryRecord):
|
7
|
+
return "📁"
|
8
|
+
if r.mime_type in ["application/pdf", "application/x-pdf"]:
|
9
|
+
return "📕"
|
10
|
+
elif r.mime_type.startswith("image/"):
|
11
|
+
return "🖼️"
|
12
|
+
elif r.mime_type.startswith("video/"):
|
13
|
+
return "🎞️"
|
14
|
+
elif r.mime_type.startswith("audio/"):
|
15
|
+
return "🎵"
|
16
|
+
elif r.mime_type in ["application/zip", "application/x-tar", "application/gzip", "application/x-7z-compressed"]:
|
17
|
+
return "📦"
|
18
|
+
elif r.mime_type in ["application/vnd.ms-excel", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"]:
|
19
|
+
return "📊"
|
20
|
+
elif r.mime_type in ["application/x-msdownload", "application/x-executable", "application/x-mach-binary", "application/x-elf"]:
|
21
|
+
return "💻"
|
22
|
+
elif r.mime_type in ["application/vnd.ms-powerpoint", "application/vnd.openxmlformats-officedocument.presentationml.presentation"]:
|
23
|
+
return "📈"
|
24
|
+
elif r.mime_type in set([
|
25
|
+
"text/html", "application/xhtml+xml", "application/xml", "text/css", "text/x-scss", "application/javascript", "text/javascript",
|
26
|
+
"application/json", "text/x-yaml", "text/x-markdown", "application/wasm",
|
27
|
+
"text/x-ruby", "application/x-ruby", "text/x-perl", "application/x-lisp",
|
28
|
+
"text/x-haskell", "text/x-lua", "application/x-tcl",
|
29
|
+
"text/x-python", "text/x-java-source", "text/x-go", "application/x-rust", "text/x-asm",
|
30
|
+
"application/sql", "text/x-c", "text/x-c++", "text/x-csharp",
|
31
|
+
"application/x-httpd-php", "application/x-sh", "application/x-shellscript",
|
32
|
+
"application/x-latex", "application/x-tex",
|
33
|
+
]):
|
34
|
+
return "👨💻"
|
35
|
+
elif r.mime_type.startswith("text/"):
|
36
|
+
return "📃"
|
37
|
+
return "📄"
|
38
|
+
|
39
|
+
def stream_text(
|
40
|
+
conn: Connector,
|
41
|
+
path: str,
|
42
|
+
encoding="utf-8",
|
43
|
+
chunk_size=1024 * 8,
|
44
|
+
):
|
45
|
+
"""
|
46
|
+
Stream text content of a file from the server.
|
47
|
+
Raise FileNotFoundError if the file does not exist.
|
48
|
+
Raise ValueError if the file size exceeds MAX_TEXT_SIZE.
|
49
|
+
|
50
|
+
Yields str chunks.
|
51
|
+
"""
|
52
|
+
MAX_TEXT_SIZE = 100 * 1024 * 1024 # 100 MB
|
53
|
+
r = conn.get_fmeta(path)
|
54
|
+
if r is None:
|
55
|
+
raise FileNotFoundError(f"File not found: {path}")
|
56
|
+
if r.file_size > MAX_TEXT_SIZE:
|
57
|
+
raise ValueError(f"File size {r.file_size} exceeds maximum text size {MAX_TEXT_SIZE}")
|
58
|
+
ss = conn.get_stream(r.url, chunk_size=chunk_size)
|
59
|
+
total_read = 0
|
60
|
+
for chunk in ss:
|
61
|
+
total_read += len(chunk)
|
62
|
+
if total_read > MAX_TEXT_SIZE:
|
63
|
+
raise ValueError(f"File size exceeds maximum text size {MAX_TEXT_SIZE}")
|
64
|
+
yield chunk.decode(encoding, errors='replace') # decode bytes to str, replace errors
|
lfss-0.11.6/docs/Client.md
DELETED
@@ -1,40 +0,0 @@
|
|
1
|
-
|
2
|
-
# Client-side CLI tools
|
3
|
-
|
4
|
-
To install python CLI tools without dependencies (to avoid conflicts with your existing packages):
|
5
|
-
```sh
|
6
|
-
pip install requests
|
7
|
-
pip install lfss --no-deps
|
8
|
-
```
|
9
|
-
|
10
|
-
Then set the `LFSS_ENDPOINT`, `LFSS_TOKEN` environment variables,
|
11
|
-
then you can use the following commands:
|
12
|
-
```sh
|
13
|
-
# Check current user information
|
14
|
-
lfss whoami
|
15
|
-
|
16
|
-
# Query a path
|
17
|
-
lfss query remote/file[/or_dir/]
|
18
|
-
|
19
|
-
# List directories of a specified path
|
20
|
-
lfss list-dirs remote/dir/
|
21
|
-
|
22
|
-
# List files of a specified path,
|
23
|
-
# with pagination and sorting
|
24
|
-
lfss list-files --offset 0 --limit 100 --order access_time remote/dir/
|
25
|
-
|
26
|
-
# Upload a file
|
27
|
-
lfss upload local/file.txt remote/file.txt
|
28
|
-
|
29
|
-
# Upload a directory, note the ending slashes
|
30
|
-
lfss upload local/dir/ remote/dir/
|
31
|
-
|
32
|
-
# Download a file
|
33
|
-
lfss download remote/file.txt local/file.txt
|
34
|
-
|
35
|
-
# Download a directory, with verbose output and 8 concurrent jobs
|
36
|
-
# Overwrite existing files
|
37
|
-
lfss download -v -j 8 --conflict overwrite remote/dir/ local/dir/
|
38
|
-
```
|
39
|
-
|
40
|
-
More commands can be found using `lfss-cli --help`.
|
lfss-0.11.6/lfss/cli/__init__.py
DELETED
@@ -1,27 +0,0 @@
|
|
1
|
-
from contextlib import contextmanager
|
2
|
-
from typing import Iterable, TypeVar, Generator
|
3
|
-
import requests, os
|
4
|
-
|
5
|
-
@contextmanager
|
6
|
-
def catch_request_error():
|
7
|
-
try:
|
8
|
-
yield
|
9
|
-
except requests.RequestException as e:
|
10
|
-
print(f"\033[31m[Request error]: {e}\033[0m")
|
11
|
-
if e.response is not None:
|
12
|
-
print(f"\033[91m[Error message]: {e.response.text}\033[0m")
|
13
|
-
|
14
|
-
T = TypeVar('T')
|
15
|
-
def line_sep(iter: Iterable[T], enable=True, start=True, end=True, color="\033[90m") -> Generator[T, None, None]:
|
16
|
-
screen_width = os.get_terminal_size().columns
|
17
|
-
def print_ln():
|
18
|
-
print(color + "-" * screen_width + "\033[0m")
|
19
|
-
|
20
|
-
if start and enable:
|
21
|
-
print_ln()
|
22
|
-
for i, line in enumerate(iter):
|
23
|
-
if enable and i > 0:
|
24
|
-
print_ln()
|
25
|
-
yield line
|
26
|
-
if end and enable:
|
27
|
-
print_ln()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|