lfss 0.11.5__tar.gz → 0.12.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lfss-0.11.5 → lfss-0.12.0}/PKG-INFO +2 -2
- lfss-0.12.0/docs/Client.md +37 -0
- {lfss-0.11.5 → lfss-0.12.0}/docs/changelog.md +13 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/api.js +1 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/scripts.js +2 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/api/__init__.py +2 -2
- {lfss-0.11.5 → lfss-0.12.0}/lfss/api/connector.py +95 -15
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/__init__.py +10 -6
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/cli.py +118 -27
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/user.py +6 -3
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/database.py +16 -10
- {lfss-0.11.5 → lfss-0.12.0}/lfss/svc/app_native.py +11 -1
- {lfss-0.11.5 → lfss-0.12.0}/pyproject.toml +2 -2
- lfss-0.11.5/docs/Client.md +0 -37
- {lfss-0.11.5 → lfss-0.12.0}/Readme.md +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/docs/Enviroment_variables.md +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/docs/Known_issues.md +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/docs/Permission.md +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/docs/Webdav.md +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/index.html +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/info.css +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/info.js +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/login.css +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/login.js +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/popup.css +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/popup.js +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/state.js +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/styles.css +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/thumb.css +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/thumb.js +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/frontend/utils.js +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/balance.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/log.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/panel.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/serve.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/cli/vacuum.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/__init__.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/bounded_pool.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/config.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/connection_pool.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/datatype.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/error.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/log.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/thumb.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/eng/utils.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/sql/init.sql +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/sql/pragma.sql +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/svc/app.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/svc/app_base.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/svc/app_dav.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/svc/common_impl.py +0 -0
- {lfss-0.11.5 → lfss-0.12.0}/lfss/svc/request_log.py +0 -0
@@ -0,0 +1,37 @@
|
|
1
|
+
|
2
|
+
# Client-side CLI tools
|
3
|
+
|
4
|
+
To install python CLI tools without dependencies (to avoid conflicts with your existing packages):
|
5
|
+
```sh
|
6
|
+
pip install requests
|
7
|
+
pip install lfss --no-deps
|
8
|
+
```
|
9
|
+
|
10
|
+
Then set the `LFSS_ENDPOINT`, `LFSS_TOKEN` environment variables,
|
11
|
+
then you can use the following commands:
|
12
|
+
```sh
|
13
|
+
# Check current user information
|
14
|
+
lfss whoami
|
15
|
+
|
16
|
+
# Query a path
|
17
|
+
lfss i remote/file[/or_dir/]
|
18
|
+
|
19
|
+
# List a specified path,
|
20
|
+
# with pagination and sorting
|
21
|
+
lfss ls remote/dir/ --offset 0 --limit 100 --order access_time
|
22
|
+
|
23
|
+
# Upload a file
|
24
|
+
lfss up local/file.txt remote/file.txt
|
25
|
+
|
26
|
+
# Upload a directory, note the ending slashes
|
27
|
+
lfss up local/dir/ remote/dir/
|
28
|
+
|
29
|
+
# Download a file
|
30
|
+
lfss down remote/file.txt local/file.txt
|
31
|
+
|
32
|
+
# Download a directory, with 8 concurrent jobs
|
33
|
+
# Overwrite existing files
|
34
|
+
lfss down remote/dir/ local/dir/ -j 8 --conflict overwrite
|
35
|
+
```
|
36
|
+
|
37
|
+
More commands can be found using `lfss --help`.
|
@@ -1,5 +1,18 @@
|
|
1
|
+
## 0.12
|
2
|
+
|
3
|
+
### 0.12.0
|
4
|
+
- Change default script to client CLI
|
5
|
+
- Client CLI default to verbose output
|
6
|
+
- Client CLI subcommand rename and alias
|
7
|
+
- Add delete path and more error handling for client CLI
|
8
|
+
|
1
9
|
## 0.11
|
2
10
|
|
11
|
+
### 0.11.6
|
12
|
+
- Hint copy and move success for frontend.
|
13
|
+
- Add query user info and list peers api.
|
14
|
+
- Add user with random password if not specified.
|
15
|
+
|
3
16
|
### 0.11.5
|
4
17
|
- Script entry default to client CLI.
|
5
18
|
- Fix single file download name deduce with decoding.
|
@@ -249,6 +249,7 @@ export default class Connector {
|
|
249
249
|
/**
|
250
250
|
* @param {string} path - the path to the file directory, should ends with '/'
|
251
251
|
* @returns {Promise<PathListResponse>} - the promise of the request
|
252
|
+
* NOTE: will deprecated in future
|
252
253
|
*/
|
253
254
|
async listPath(path){
|
254
255
|
path = this._sanitizeDirPath(path);
|
@@ -347,6 +347,7 @@ async function refreshFileList(){
|
|
347
347
|
console.log("Moving", dirurl, "to", dstPath);
|
348
348
|
conn.move(dirurl, dstPath)
|
349
349
|
.then(() => {
|
350
|
+
showPopup('Successfully moved path.', {level: 'success', timeout: 3000});
|
350
351
|
refreshFileList();
|
351
352
|
},
|
352
353
|
(err) => {
|
@@ -371,6 +372,7 @@ async function refreshFileList(){
|
|
371
372
|
console.log("Copying", dirurl, "to", dstPath);
|
372
373
|
conn.copy(dirurl, dstPath)
|
373
374
|
.then(() => {
|
375
|
+
showPopup('Successfully copied path.', {level: 'success', timeout: 3000});
|
374
376
|
refreshFileList();
|
375
377
|
},
|
376
378
|
(err) => {
|
@@ -75,7 +75,7 @@ def upload_directory(
|
|
75
75
|
this_count = _counter
|
76
76
|
dst_path = f"{path}{os.path.relpath(file_path, directory)}"
|
77
77
|
if verbose:
|
78
|
-
print(f"[{this_count}]
|
78
|
+
print(f"[{this_count}] {file_path} -> {dst_path}")
|
79
79
|
|
80
80
|
if not (res:=upload_file(
|
81
81
|
c, file_path, dst_path,
|
@@ -178,7 +178,7 @@ def download_directory(
|
|
178
178
|
this_count = _counter
|
179
179
|
dst_path = f"{directory}{os.path.relpath(decode_uri_components(src_url), decode_uri_components(src_path))}"
|
180
180
|
if verbose:
|
181
|
-
print(f"[{this_count}/{file_count}]
|
181
|
+
print(f"[{this_count}/{file_count}] {src_url} -> {dst_path}")
|
182
182
|
|
183
183
|
if not (res:=download_file(
|
184
184
|
c, src_url, dst_path,
|
@@ -1,14 +1,15 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
from typing import Optional, Literal
|
3
3
|
from collections.abc import Iterator
|
4
|
-
import os
|
4
|
+
import os
|
5
5
|
import requests
|
6
6
|
import requests.adapters
|
7
7
|
import urllib.parse
|
8
8
|
from tempfile import SpooledTemporaryFile
|
9
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
9
10
|
from lfss.eng.error import PathNotFoundError
|
10
11
|
from lfss.eng.datatype import (
|
11
|
-
FileReadPermission, FileRecord, DirectoryRecord, UserRecord, PathContents,
|
12
|
+
FileReadPermission, FileRecord, DirectoryRecord, UserRecord, PathContents, AccessLevel,
|
12
13
|
FileSortKey, DirSortKey
|
13
14
|
)
|
14
15
|
from lfss.eng.utils import ensure_uri_components
|
@@ -17,6 +18,13 @@ _default_endpoint = os.environ.get('LFSS_ENDPOINT', 'http://localhost:8000')
|
|
17
18
|
_default_token = os.environ.get('LFSS_TOKEN', '')
|
18
19
|
num_t = float | int
|
19
20
|
|
21
|
+
def _p(x: str) -> str:
|
22
|
+
if x == '/':
|
23
|
+
return x
|
24
|
+
if x.startswith('/'):
|
25
|
+
x = x[1:]
|
26
|
+
return x
|
27
|
+
|
20
28
|
class Connector:
|
21
29
|
class Session:
|
22
30
|
def __init__(
|
@@ -99,6 +107,7 @@ class Connector:
|
|
99
107
|
def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
|
100
108
|
"""Uploads a file to the specified path."""
|
101
109
|
assert isinstance(file_data, bytes), "file_data must be bytes"
|
110
|
+
path = _p(path)
|
102
111
|
|
103
112
|
# Skip ahead by checking if the file already exists
|
104
113
|
if conflict == 'skip-ahead':
|
@@ -123,6 +132,7 @@ class Connector:
|
|
123
132
|
using the POST method, with form-data/multipart.
|
124
133
|
file can be a path to a file on disk, or bytes.
|
125
134
|
"""
|
135
|
+
path = _p(path)
|
126
136
|
|
127
137
|
# Skip ahead by checking if the file already exists
|
128
138
|
if conflict == 'skip-ahead':
|
@@ -155,6 +165,7 @@ class Connector:
|
|
155
165
|
"""Uploads a JSON file to the specified path."""
|
156
166
|
assert path.endswith('.json'), "Path must end with .json"
|
157
167
|
assert isinstance(data, dict), "data must be a dict"
|
168
|
+
path = _p(path)
|
158
169
|
|
159
170
|
# Skip ahead by checking if the file already exists
|
160
171
|
if conflict == 'skip-ahead':
|
@@ -184,6 +195,7 @@ class Connector:
|
|
184
195
|
|
185
196
|
def get(self, path: str) -> Optional[bytes]:
|
186
197
|
"""Downloads a file from the specified path."""
|
198
|
+
path = _p(path)
|
187
199
|
response = self._get(path)
|
188
200
|
if response is None: return None
|
189
201
|
return response.content
|
@@ -193,6 +205,7 @@ class Connector:
|
|
193
205
|
Downloads a partial file from the specified path.
|
194
206
|
start and end are the byte offsets, both inclusive.
|
195
207
|
"""
|
208
|
+
path = _p(path)
|
196
209
|
response = self._fetch_factory('GET', path, extra_headers={
|
197
210
|
'Range': f"bytes={range_start if range_start >= 0 else ''}-{range_end if range_end >= 0 else ''}"
|
198
211
|
})()
|
@@ -201,11 +214,13 @@ class Connector:
|
|
201
214
|
|
202
215
|
def get_stream(self, path: str) -> Iterator[bytes]:
|
203
216
|
"""Downloads a file from the specified path, will raise PathNotFoundError if path not found."""
|
217
|
+
path = _p(path)
|
204
218
|
response = self._get(path, stream=True)
|
205
219
|
if response is None: raise PathNotFoundError("Path not found: " + path)
|
206
220
|
return response.iter_content(chunk_size=1024)
|
207
221
|
|
208
222
|
def get_json(self, path: str) -> Optional[dict]:
|
223
|
+
path = _p(path)
|
209
224
|
response = self._get(path)
|
210
225
|
if response is None: return None
|
211
226
|
assert response.headers['Content-Type'] == 'application/json'
|
@@ -218,16 +233,18 @@ class Connector:
|
|
218
233
|
"""
|
219
234
|
response = self._fetch_factory(
|
220
235
|
'GET', '_api/get-multiple',
|
221
|
-
{'path': paths, "skip_content": skip_content}
|
236
|
+
{'path': [_p(p) for p in paths], "skip_content": skip_content}
|
222
237
|
)()
|
223
238
|
return response.json()
|
224
239
|
|
225
240
|
def delete(self, path: str):
|
226
241
|
"""Deletes the file at the specified path."""
|
242
|
+
path = _p(path)
|
227
243
|
self._fetch_factory('DELETE', path)()
|
228
244
|
|
229
245
|
def get_meta(self, path: str) -> Optional[FileRecord | DirectoryRecord]:
|
230
246
|
"""Gets the metadata for the file at the specified path."""
|
247
|
+
path = _p(path)
|
231
248
|
try:
|
232
249
|
response = self._fetch_factory('GET', '_api/meta', {'path': path})()
|
233
250
|
if path.endswith('/'):
|
@@ -242,19 +259,9 @@ class Connector:
|
|
242
259
|
def get_fmeta(self, path: str) -> Optional[FileRecord]: assert (f:=self.get_meta(path)) is None or isinstance(f, FileRecord); return f
|
243
260
|
def get_dmeta(self, path: str) -> Optional[DirectoryRecord]: assert (d:=self.get_meta(path)) is None or isinstance(d, DirectoryRecord); return d
|
244
261
|
|
245
|
-
def list_path(self, path: str) -> PathContents:
|
246
|
-
"""
|
247
|
-
shorthand list with limited options,
|
248
|
-
for large directories / more options, use list_files and list_dirs instead.
|
249
|
-
"""
|
250
|
-
assert path.endswith('/')
|
251
|
-
response = self._fetch_factory('GET', path)()
|
252
|
-
dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
|
253
|
-
files = [FileRecord(**f) for f in response.json()['files']]
|
254
|
-
return PathContents(dirs=dirs, files=files)
|
255
|
-
|
256
262
|
def count_files(self, path: str, flat: bool = False) -> int:
|
257
263
|
assert path.endswith('/')
|
264
|
+
path = _p(path)
|
258
265
|
response = self._fetch_factory('GET', '_api/count-files', {'path': path, 'flat': flat})()
|
259
266
|
return response.json()['count']
|
260
267
|
|
@@ -264,6 +271,7 @@ class Connector:
|
|
264
271
|
flat: bool = False
|
265
272
|
) -> list[FileRecord]:
|
266
273
|
assert path.endswith('/')
|
274
|
+
path = _p(path)
|
267
275
|
response = self._fetch_factory('GET', "_api/list-files", {
|
268
276
|
'path': path,
|
269
277
|
'offset': offset, 'limit': limit, 'order_by': order_by, 'order_desc': order_desc, 'flat': flat
|
@@ -272,6 +280,7 @@ class Connector:
|
|
272
280
|
|
273
281
|
def count_dirs(self, path: str) -> int:
|
274
282
|
assert path.endswith('/')
|
283
|
+
path = _p(path)
|
275
284
|
response = self._fetch_factory('GET', '_api/count-dirs', {'path': path})()
|
276
285
|
return response.json()['count']
|
277
286
|
|
@@ -281,32 +290,97 @@ class Connector:
|
|
281
290
|
skim: bool = True
|
282
291
|
) -> list[DirectoryRecord]:
|
283
292
|
assert path.endswith('/')
|
293
|
+
path = _p(path)
|
284
294
|
response = self._fetch_factory('GET', "_api/list-dirs", {
|
285
295
|
'path': path,
|
286
296
|
'offset': offset, 'limit': limit, 'order_by': order_by, 'order_desc': order_desc, 'skim': skim
|
287
297
|
})()
|
288
298
|
return [DirectoryRecord(**d) for d in response.json()]
|
299
|
+
|
300
|
+
def list_path(
|
301
|
+
self, path: str, offset: int = 0, limit: int = 1000,
|
302
|
+
order_by: FileSortKey = '', order_desc: bool = False,
|
303
|
+
_workers: int = 2
|
304
|
+
) -> PathContents:
|
305
|
+
""" Aggregately lists both files and directories under the given path. """
|
306
|
+
assert path.endswith('/')
|
307
|
+
path = _p(path)
|
308
|
+
if path == '/':
|
309
|
+
# handle root path separately
|
310
|
+
# TODO: change later
|
311
|
+
response = self._fetch_factory('GET', path)()
|
312
|
+
dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
|
313
|
+
files = [FileRecord(**f) for f in response.json()['files']]
|
314
|
+
return PathContents(dirs=dirs, files=files)
|
315
|
+
|
316
|
+
dirs: list[DirectoryRecord] = []
|
317
|
+
files: list[FileRecord] = []
|
318
|
+
with ThreadPoolExecutor(max_workers=_workers) as executor:
|
319
|
+
count_futures = {
|
320
|
+
executor.submit(self.count_dirs, path): 'dirs',
|
321
|
+
executor.submit(self.count_files, path, flat=False): 'files'
|
322
|
+
}
|
323
|
+
dir_count = 0
|
324
|
+
file_count = 0
|
325
|
+
for future in as_completed(count_futures):
|
326
|
+
if count_futures[future] == 'dirs':
|
327
|
+
dir_count = future.result()
|
328
|
+
else:
|
329
|
+
file_count = future.result()
|
330
|
+
dir_offset = offset
|
331
|
+
dir_limit = min(limit, max(0, dir_count - dir_offset))
|
332
|
+
file_offset = max(0, offset - dir_count)
|
333
|
+
file_limit = min(limit - dir_limit, max(0, file_count - file_offset))
|
334
|
+
|
335
|
+
dir_order_by = 'dirname' if order_by == 'url' else ''
|
336
|
+
file_order_by = order_by
|
337
|
+
|
338
|
+
def fetch_dirs():
|
339
|
+
nonlocal dirs
|
340
|
+
if dir_limit > 0:
|
341
|
+
dirs = self.list_dirs(
|
342
|
+
path, offset=dir_offset, limit=dir_limit,
|
343
|
+
order_by=dir_order_by, order_desc=order_desc
|
344
|
+
)
|
345
|
+
def fetch_files():
|
346
|
+
nonlocal files
|
347
|
+
if file_limit > 0:
|
348
|
+
files = self.list_files(
|
349
|
+
path, offset=file_offset, limit=file_limit,
|
350
|
+
order_by=file_order_by, order_desc=order_desc, flat=False
|
351
|
+
)
|
352
|
+
futures = [
|
353
|
+
executor.submit(fetch_dirs),
|
354
|
+
executor.submit(fetch_files)
|
355
|
+
]
|
356
|
+
for future in as_completed(futures):
|
357
|
+
future.result()
|
358
|
+
return PathContents(dirs=dirs, files=files)
|
289
359
|
|
290
360
|
def set_file_permission(self, path: str, permission: int | FileReadPermission):
|
291
361
|
"""Sets the file permission for the specified path."""
|
362
|
+
path = _p(path)
|
292
363
|
self._fetch_factory('POST', '_api/meta', {'path': path, 'perm': int(permission)})(
|
293
364
|
headers={'Content-Type': 'application/www-form-urlencoded'}
|
294
365
|
)
|
295
366
|
|
296
367
|
def move(self, path: str, new_path: str):
|
297
368
|
"""Move file or directory to a new path."""
|
369
|
+
path = _p(path); new_path = _p(new_path)
|
298
370
|
self._fetch_factory('POST', '_api/meta', {'path': path, 'new_path': new_path})(
|
299
371
|
headers = {'Content-Type': 'application/www-form-urlencoded'}
|
300
372
|
)
|
301
373
|
|
302
374
|
def copy(self, src: str, dst: str):
|
303
375
|
"""Copy file from src to dst."""
|
376
|
+
src = _p(src); dst = _p(dst)
|
304
377
|
self._fetch_factory('POST', '_api/copy', {'src': src, 'dst': dst})(
|
305
378
|
headers = {'Content-Type': 'application/www-form-urlencoded'}
|
306
379
|
)
|
307
380
|
|
308
381
|
def bundle(self, path: str) -> Iterator[bytes]:
|
309
382
|
"""Bundle a path into a zip file."""
|
383
|
+
path = _p(path)
|
310
384
|
response = self._fetch_factory('GET', '_api/bundle', {'path': path})(
|
311
385
|
headers = {'Content-Type': 'application/www-form-urlencoded'},
|
312
386
|
stream = True
|
@@ -316,4 +390,10 @@ class Connector:
|
|
316
390
|
def whoami(self) -> UserRecord:
|
317
391
|
"""Gets information about the current user."""
|
318
392
|
response = self._fetch_factory('GET', '_api/whoami')()
|
319
|
-
return UserRecord(**response.json())
|
393
|
+
return UserRecord(**response.json())
|
394
|
+
|
395
|
+
def list_peers(self, level: AccessLevel = AccessLevel.READ, incoming: bool = False) -> list[UserRecord]:
|
396
|
+
"""List all users that have at least the given access level to the current user."""
|
397
|
+
response = self._fetch_factory('GET', '_api/list-peers', {'level': int(level), 'incoming': incoming})()
|
398
|
+
users = [UserRecord(**u) for u in response.json()]
|
399
|
+
return users
|
@@ -1,12 +1,16 @@
|
|
1
1
|
from contextlib import contextmanager
|
2
|
-
from typing import Iterable, TypeVar, Generator
|
2
|
+
from typing import Iterable, TypeVar, Generator, Callable, Optional
|
3
3
|
import requests, os
|
4
4
|
|
5
5
|
@contextmanager
|
6
|
-
def catch_request_error():
|
6
|
+
def catch_request_error(error_code_handler: Optional[ dict[int, Callable[[requests.Response], None]] ] = None):
|
7
7
|
try:
|
8
8
|
yield
|
9
9
|
except requests.RequestException as e:
|
10
|
+
if error_code_handler is not None:
|
11
|
+
if e.response is not None and e.response.status_code in error_code_handler:
|
12
|
+
error_code_handler[e.response.status_code](e.response)
|
13
|
+
return
|
10
14
|
print(f"\033[31m[Request error]: {e}\033[0m")
|
11
15
|
if e.response is not None:
|
12
16
|
print(f"\033[91m[Error message]: {e.response.text}\033[0m")
|
@@ -15,13 +19,13 @@ T = TypeVar('T')
|
|
15
19
|
def line_sep(iter: Iterable[T], enable=True, start=True, end=True, color="\033[90m") -> Generator[T, None, None]:
|
16
20
|
screen_width = os.get_terminal_size().columns
|
17
21
|
def print_ln():
|
18
|
-
print(color + "-" * screen_width + "\033[0m")
|
22
|
+
if enable: print(color + "-" * screen_width + "\033[0m")
|
19
23
|
|
20
|
-
if start
|
24
|
+
if start:
|
21
25
|
print_ln()
|
22
26
|
for i, line in enumerate(iter):
|
23
|
-
if
|
27
|
+
if i > 0:
|
24
28
|
print_ln()
|
25
29
|
yield line
|
26
|
-
if end
|
30
|
+
if end:
|
27
31
|
print_ln()
|
@@ -1,26 +1,49 @@
|
|
1
1
|
from pathlib import Path
|
2
2
|
import argparse, typing, sys
|
3
3
|
from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
|
4
|
-
from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey
|
5
|
-
from lfss.eng.utils import decode_uri_components
|
6
|
-
from . import catch_request_error, line_sep
|
4
|
+
from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey, AccessLevel
|
5
|
+
from lfss.eng.utils import decode_uri_components, fmt_storage_size
|
6
|
+
from . import catch_request_error, line_sep as _line_sep
|
7
|
+
|
8
|
+
# monkey patch to avoid printing line separators...may remove line_sep in the future
|
9
|
+
line_sep = lambda *args, **kwargs: _line_sep(*args, enable=False, **kwargs)
|
7
10
|
|
8
11
|
def parse_permission(s: str) -> FileReadPermission:
|
9
12
|
for p in FileReadPermission:
|
10
13
|
if p.name.lower() == s.lower():
|
11
14
|
return p
|
12
15
|
raise ValueError(f"Invalid permission {s}")
|
16
|
+
def parse_access_level(s: str) -> AccessLevel:
|
17
|
+
for p in AccessLevel:
|
18
|
+
if p.name.lower() == s.lower():
|
19
|
+
return p
|
20
|
+
raise ValueError(f"Invalid access level {s}")
|
21
|
+
def default_error_handler_dict(path: str):
|
22
|
+
return {
|
23
|
+
401: lambda _: print(f"\033[31mUnauthorized\033[0m ({path})", file=sys.stderr),
|
24
|
+
403: lambda _: print(f"\033[31mForbidden\033[0m ({path})", file=sys.stderr),
|
25
|
+
404: lambda _: print(f"\033[31mNot found\033[0m ({path})", file=sys.stderr),
|
26
|
+
409: lambda _: print(f"\033[31mConflict\033[0m ({path})", file=sys.stderr),
|
27
|
+
}
|
13
28
|
|
14
29
|
def parse_arguments():
|
15
30
|
parser = argparse.ArgumentParser(description="Client-side command line interface, set LFSS_ENDPOINT and LFSS_TOKEN environment variables for authentication.")
|
16
31
|
|
17
32
|
sp = parser.add_subparsers(dest="command", required=True)
|
18
33
|
|
34
|
+
# whoami
|
35
|
+
sp_whoami = sp.add_parser("whoami", help="Show current user information")
|
36
|
+
|
37
|
+
# list peers
|
38
|
+
sp_peers = sp.add_parser("peers", help="Query users that you have access to or users that have access to you")
|
39
|
+
sp_peers.add_argument('-l', "--level", type=parse_access_level, default=AccessLevel.READ, help="Access level filter")
|
40
|
+
sp_peers.add_argument('-i', '--incoming', action='store_true', help="List users that have access to you (rather than you have access to them")
|
41
|
+
|
19
42
|
# upload
|
20
|
-
sp_upload = sp.add_parser("upload", help="Upload
|
43
|
+
sp_upload = sp.add_parser("upload", help="Upload a file or directory", aliases=["up"])
|
21
44
|
sp_upload.add_argument("src", help="Source file or directory", type=str)
|
22
45
|
sp_upload.add_argument("dst", help="Destination url path", type=str)
|
23
|
-
sp_upload.add_argument("-
|
46
|
+
sp_upload.add_argument("-q", "--quiet", action="store_true", help="Quiet output, no progress info")
|
24
47
|
sp_upload.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent uploads")
|
25
48
|
sp_upload.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory upload")
|
26
49
|
sp_upload.add_argument("--conflict", choices=["overwrite", "abort", "skip", "skip-ahead"], default="abort", help="Conflict resolution")
|
@@ -28,21 +51,35 @@ def parse_arguments():
|
|
28
51
|
sp_upload.add_argument("--retries", type=int, default=0, help="Number of retries")
|
29
52
|
|
30
53
|
# download
|
31
|
-
sp_download = sp.add_parser("download", help="Download
|
54
|
+
sp_download = sp.add_parser("download", help="Download a file or directory", aliases=["down"])
|
32
55
|
sp_download.add_argument("src", help="Source url path", type=str)
|
33
56
|
sp_download.add_argument("dst", help="Destination file or directory", type=str)
|
34
|
-
sp_download.add_argument("-
|
57
|
+
sp_download.add_argument("-q", "--quiet", action="store_true", help="Quiet output, no progress info")
|
35
58
|
sp_download.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent downloads")
|
36
59
|
sp_download.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory download")
|
37
|
-
sp_download.add_argument("--overwrite",
|
60
|
+
sp_download.add_argument("--conflict", choices=["overwrite", "skip"], default="abort", help="Conflict resolution, only works with file download")
|
38
61
|
sp_download.add_argument("--retries", type=int, default=0, help="Number of retries")
|
39
62
|
|
40
63
|
# query
|
41
|
-
sp_query = sp.add_parser("
|
42
|
-
sp_query.add_argument("path", help="Path to query", nargs="
|
64
|
+
sp_query = sp.add_parser("info", help="Query file or directories metadata from the server", aliases=["i"])
|
65
|
+
sp_query.add_argument("path", help="Path to query", nargs="+", type=str)
|
66
|
+
|
67
|
+
# delete
|
68
|
+
sp_delete = sp.add_parser("delete", help="Delete files or directories", aliases=["del"])
|
69
|
+
sp_delete.add_argument("path", help="Path to delete", nargs="+", type=str)
|
70
|
+
sp_delete.add_argument("-y", "--yes", action="store_true", help="Confirm deletion without prompt")
|
71
|
+
|
72
|
+
# aggregate list
|
73
|
+
sp_list = sp.add_parser("list", help="Aggregately list files and directories of a given path", aliases=["ls"])
|
74
|
+
sp_list.add_argument("path", help="Path to list", type=str)
|
75
|
+
sp_list.add_argument("--offset", type=int, default=0, help="Offset of the list")
|
76
|
+
sp_list.add_argument("--limit", type=int, default=100, help="Limit of the list")
|
77
|
+
sp_list.add_argument("-l", "--long", action="store_true", help="Detailed list, including all metadata")
|
78
|
+
sp_list.add_argument("--order", "--order-by", type=str, help="Order of the list", default="", choices=typing.get_args(FileSortKey))
|
79
|
+
sp_list.add_argument("--reverse", "--order-desc", action="store_true", help="Reverse the list order")
|
43
80
|
|
44
81
|
# list directories
|
45
|
-
sp_list_d = sp.add_parser("list-
|
82
|
+
sp_list_d = sp.add_parser("list-d", help="List directories of a given path", aliases=["lsd"])
|
46
83
|
sp_list_d.add_argument("path", help="Path to list", type=str)
|
47
84
|
sp_list_d.add_argument("--offset", type=int, default=0, help="Offset of the list")
|
48
85
|
sp_list_d.add_argument("--limit", type=int, default=100, help="Limit of the list")
|
@@ -51,7 +88,7 @@ def parse_arguments():
|
|
51
88
|
sp_list_d.add_argument("--reverse", "--order-desc", action="store_true", help="Reverse the list order")
|
52
89
|
|
53
90
|
# list files
|
54
|
-
sp_list_f = sp.add_parser("list-
|
91
|
+
sp_list_f = sp.add_parser("list-f", help="List files of a given path", aliases=["lsf"])
|
55
92
|
sp_list_f.add_argument("path", help="Path to list", type=str)
|
56
93
|
sp_list_f.add_argument("--offset", type=int, default=0, help="Offset of the list")
|
57
94
|
sp_list_f.add_argument("--limit", type=int, default=100, help="Limit of the list")
|
@@ -65,12 +102,33 @@ def parse_arguments():
|
|
65
102
|
def main():
|
66
103
|
args = parse_arguments()
|
67
104
|
connector = Connector()
|
68
|
-
if args.command == "
|
105
|
+
if args.command == "whoami":
|
106
|
+
with catch_request_error():
|
107
|
+
user = connector.whoami()
|
108
|
+
print("Username:", user.username)
|
109
|
+
print("User ID:", user.id)
|
110
|
+
print("Is Admin:", bool(user.is_admin))
|
111
|
+
print("Max Storage:", fmt_storage_size(user.max_storage))
|
112
|
+
print("Default Permission:", user.permission.name)
|
113
|
+
print("Created At:", user.create_time)
|
114
|
+
print("Last Active:", user.last_active)
|
115
|
+
|
116
|
+
elif args.command == "peers":
|
117
|
+
with catch_request_error():
|
118
|
+
users = connector.list_peers(level=args.level, incoming=args.incoming)
|
119
|
+
if not args.incoming:
|
120
|
+
print(f"Peers that you have {args.level.name} access to:")
|
121
|
+
else:
|
122
|
+
print(f"Peers that have {args.level.name} access to you:")
|
123
|
+
for i, u in enumerate(line_sep(users)):
|
124
|
+
print(f"[{i+1}] {u.username} (id={u.id})")
|
125
|
+
|
126
|
+
elif args.command in ["upload", "up"]:
|
69
127
|
src_path = Path(args.src)
|
70
128
|
if src_path.is_dir():
|
71
129
|
failed_upload = upload_directory(
|
72
130
|
connector, args.src, args.dst,
|
73
|
-
verbose=args.
|
131
|
+
verbose=not args.quiet,
|
74
132
|
n_concurrent=args.jobs,
|
75
133
|
n_retries=args.retries,
|
76
134
|
interval=args.interval,
|
@@ -86,7 +144,7 @@ def main():
|
|
86
144
|
connector,
|
87
145
|
file_path = args.src,
|
88
146
|
dst_url = args.dst,
|
89
|
-
verbose=args.
|
147
|
+
verbose=not args.quiet,
|
90
148
|
n_retries=args.retries,
|
91
149
|
interval=args.interval,
|
92
150
|
conflict=args.conflict,
|
@@ -95,16 +153,16 @@ def main():
|
|
95
153
|
if not success:
|
96
154
|
print("\033[91mFailed to upload: \033[0m", msg, file=sys.stderr)
|
97
155
|
|
98
|
-
elif args.command
|
156
|
+
elif args.command in ["download", "down"]:
|
99
157
|
is_dir = args.src.endswith("/")
|
100
158
|
if is_dir:
|
101
159
|
failed_download = download_directory(
|
102
160
|
connector, args.src, args.dst,
|
103
|
-
verbose=args.
|
161
|
+
verbose=not args.quiet,
|
104
162
|
n_concurrent=args.jobs,
|
105
163
|
n_retries=args.retries,
|
106
164
|
interval=args.interval,
|
107
|
-
overwrite=args.overwrite
|
165
|
+
overwrite=args.conflict == "overwrite"
|
108
166
|
)
|
109
167
|
if failed_download:
|
110
168
|
print("\033[91mFailed to download:\033[0m", file=sys.stderr)
|
@@ -115,7 +173,7 @@ def main():
|
|
115
173
|
connector,
|
116
174
|
src_url = args.src,
|
117
175
|
file_path = args.dst,
|
118
|
-
verbose=args.
|
176
|
+
verbose=not args.quiet,
|
119
177
|
n_retries=args.retries,
|
120
178
|
interval=args.interval,
|
121
179
|
overwrite=args.overwrite
|
@@ -123,17 +181,50 @@ def main():
|
|
123
181
|
if not success:
|
124
182
|
print("\033[91mFailed to download: \033[0m", msg, file=sys.stderr)
|
125
183
|
|
126
|
-
elif args.command
|
184
|
+
elif args.command in ["delete", "del"]:
|
185
|
+
if not args.yes:
|
186
|
+
print("You are about to delete the following paths:")
|
187
|
+
for path in args.path:
|
188
|
+
print("[D]" if path.endswith("/") else "[F]", path)
|
189
|
+
confirm = input("Are you sure? ([yes]/no): ")
|
190
|
+
if confirm.lower() not in ["", "y", "yes"]:
|
191
|
+
print("Aborted.")
|
192
|
+
exit(0)
|
193
|
+
for path in args.path:
|
194
|
+
with catch_request_error(default_error_handler_dict(path)):
|
195
|
+
connector.delete(path)
|
196
|
+
print(f"\033[32mDeleted\033[0m ({path})")
|
197
|
+
|
198
|
+
elif args.command in ["info", "i"]:
|
127
199
|
for path in args.path:
|
128
|
-
with catch_request_error():
|
200
|
+
with catch_request_error(default_error_handler_dict(path)):
|
129
201
|
res = connector.get_meta(path)
|
130
202
|
if res is None:
|
131
203
|
print(f"\033[31mNot found\033[0m ({path})")
|
132
204
|
else:
|
133
205
|
print(res)
|
134
206
|
|
135
|
-
elif args.command
|
136
|
-
with catch_request_error():
|
207
|
+
elif args.command in ["ls", "list"]:
|
208
|
+
with catch_request_error(default_error_handler_dict(args.path)):
|
209
|
+
res = connector.list_path(
|
210
|
+
args.path,
|
211
|
+
offset=args.offset,
|
212
|
+
limit=args.limit,
|
213
|
+
order_by=args.order,
|
214
|
+
order_desc=args.reverse,
|
215
|
+
)
|
216
|
+
for i, d in enumerate(line_sep(res.dirs, end=False)):
|
217
|
+
d.url = decode_uri_components(d.url)
|
218
|
+
print(f"[d{i+1}] {d if args.long else d.url}")
|
219
|
+
for i, f in enumerate(line_sep(res.files)):
|
220
|
+
f.url = decode_uri_components(f.url)
|
221
|
+
print(f"[f{i+1}] {f if args.long else f.url}")
|
222
|
+
|
223
|
+
if len(res.dirs) + len(res.files) == args.limit:
|
224
|
+
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more items.\033[0m")
|
225
|
+
|
226
|
+
elif args.command in ["lsf", "list-f"]:
|
227
|
+
with catch_request_error(default_error_handler_dict(args.path)):
|
137
228
|
res = connector.list_files(
|
138
229
|
args.path,
|
139
230
|
offset=args.offset,
|
@@ -147,10 +238,10 @@ def main():
|
|
147
238
|
print(f"[{i+1}] {f if args.long else f.url}")
|
148
239
|
|
149
240
|
if len(res) == args.limit:
|
150
|
-
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more files
|
241
|
+
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more files.\033[0m")
|
151
242
|
|
152
|
-
elif args.command
|
153
|
-
with catch_request_error():
|
243
|
+
elif args.command in ["lsd", "list-d"]:
|
244
|
+
with catch_request_error(default_error_handler_dict(args.path)):
|
154
245
|
res = connector.list_dirs(
|
155
246
|
args.path,
|
156
247
|
offset=args.offset,
|
@@ -164,7 +255,7 @@ def main():
|
|
164
255
|
print(f"[{i+1}] {d if args.long else d.url}")
|
165
256
|
|
166
257
|
if len(res) == args.limit:
|
167
|
-
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more directories
|
258
|
+
print(f"\033[33m[Warning] List limit reached, use --offset and --limit to list more directories.\033[0m")
|
168
259
|
|
169
260
|
else:
|
170
261
|
raise NotImplementedError(f"Command {args.command} not implemented.")
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import argparse, asyncio, os
|
1
|
+
import argparse, asyncio, os, secrets
|
2
2
|
from contextlib import asynccontextmanager
|
3
3
|
from .cli import parse_permission, FileReadPermission
|
4
4
|
from ..eng.utils import parse_storage_size, fmt_storage_size
|
@@ -18,10 +18,10 @@ async def _main():
|
|
18
18
|
sp = parser.add_subparsers(dest='subparser_name', required=True)
|
19
19
|
sp_add = sp.add_parser('add')
|
20
20
|
sp_add.add_argument('username', type=str)
|
21
|
-
sp_add.add_argument('password', type=str)
|
21
|
+
sp_add.add_argument('password', nargs='?', type=str, default=None)
|
22
22
|
sp_add.add_argument('--admin', action='store_true', help='Set user as admin')
|
23
23
|
sp_add.add_argument("--permission", type=parse_permission, default=FileReadPermission.UNSET, help="File permission, can be public, protected, private, or unset")
|
24
|
-
sp_add.add_argument('--max-storage', type=parse_storage_size, default="
|
24
|
+
sp_add.add_argument('--max-storage', type=parse_storage_size, default="10G", help="Maximum storage size, e.g. 1G, 100M, 10K, default is 10G")
|
25
25
|
|
26
26
|
sp_delete = sp.add_parser('delete')
|
27
27
|
sp_delete.add_argument('username', type=str)
|
@@ -58,6 +58,9 @@ async def _main():
|
|
58
58
|
|
59
59
|
if args.subparser_name == 'add':
|
60
60
|
async with get_uconn() as uconn:
|
61
|
+
if args.password is None:
|
62
|
+
passwd = secrets.token_urlsafe(16)
|
63
|
+
args.password = passwd
|
61
64
|
await uconn.create_user(args.username, args.password, args.admin, max_storage=args.max_storage, permission=args.permission)
|
62
65
|
user = await uconn.get_user(args.username)
|
63
66
|
assert user is not None
|
@@ -163,25 +163,31 @@ class UserConn(DBObjectBase):
|
|
163
163
|
return AccessLevel.NONE
|
164
164
|
return AccessLevel(res[0])
|
165
165
|
|
166
|
-
async def list_peer_users(self,
|
166
|
+
async def list_peer_users(self, user: int | str, level: AccessLevel, incoming = False) -> list[UserRecord]:
|
167
167
|
"""
|
168
|
-
|
169
|
-
|
168
|
+
if not incoming:
|
169
|
+
List all users that user can do [AliasLevel] to, with level >= level,
|
170
|
+
else:
|
171
|
+
List all users that can do [AliasLevel] to user, with level >= level
|
172
|
+
Note: the returned list does not include user and is not apporiate for admin (who has all permissions for all users)
|
170
173
|
"""
|
171
174
|
assert int(level) > AccessLevel.NONE, f"Invalid level, {level}"
|
172
|
-
|
175
|
+
aim_field = 'src_user_id' if incoming else 'dst_user_id'
|
176
|
+
query_field = 'dst_user_id' if incoming else 'src_user_id'
|
177
|
+
|
178
|
+
match user:
|
173
179
|
case int():
|
174
|
-
await self.cur.execute("""
|
180
|
+
await self.cur.execute(f"""
|
175
181
|
SELECT * FROM user WHERE id IN (
|
176
|
-
SELECT
|
182
|
+
SELECT {aim_field} FROM upeer WHERE {query_field} = ? AND access_level >= ?
|
177
183
|
)
|
178
|
-
""", (
|
184
|
+
""", (user, int(level)))
|
179
185
|
case str():
|
180
|
-
await self.cur.execute("""
|
186
|
+
await self.cur.execute(f"""
|
181
187
|
SELECT * FROM user WHERE id IN (
|
182
|
-
SELECT
|
188
|
+
SELECT {aim_field} FROM upeer WHERE {query_field} = (SELECT id FROM user WHERE username = ?) AND access_level >= ?
|
183
189
|
)
|
184
|
-
""", (
|
190
|
+
""", (user, int(level)))
|
185
191
|
case _:
|
186
192
|
raise ValueError("Invalid arguments")
|
187
193
|
res = await self.cur.fetchall()
|
@@ -8,7 +8,7 @@ from fastapi.exceptions import HTTPException
|
|
8
8
|
from ..eng.utils import ensure_uri_components
|
9
9
|
from ..eng.config import MAX_MEM_FILE_BYTES
|
10
10
|
from ..eng.connection_pool import unique_cursor
|
11
|
-
from ..eng.database import check_file_read_permission, check_path_permission, FileConn, delayed_log_access
|
11
|
+
from ..eng.database import check_file_read_permission, check_path_permission, FileConn, delayed_log_access, UserConn
|
12
12
|
from ..eng.datatype import (
|
13
13
|
FileReadPermission, UserRecord, AccessLevel,
|
14
14
|
FileSortKey, DirSortKey
|
@@ -185,6 +185,16 @@ async def copy_file(
|
|
185
185
|
):
|
186
186
|
return await copy_impl(src_path = src, dst_path = dst, op_user = user)
|
187
187
|
|
188
|
+
@router_api.get("/list-peers")
|
189
|
+
@handle_exception
|
190
|
+
async def list_peers(user: UserRecord = Depends(registered_user), level: AccessLevel = AccessLevel.READ, incoming: bool = False):
|
191
|
+
async with unique_cursor() as conn:
|
192
|
+
uconn = UserConn(conn)
|
193
|
+
peer_users = await uconn.list_peer_users(user.id, level, incoming=incoming)
|
194
|
+
for u in peer_users:
|
195
|
+
u.credential = "__HIDDEN__"
|
196
|
+
return peer_users
|
197
|
+
|
188
198
|
async def validate_path_read_permission(path: str, user: UserRecord):
|
189
199
|
if not path.endswith("/"):
|
190
200
|
raise HTTPException(status_code=400, detail="Path must end with /")
|
@@ -1,7 +1,7 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "lfss"
|
3
|
-
version = "0.
|
4
|
-
description = "
|
3
|
+
version = "0.12.0"
|
4
|
+
description = "Lite file storage service"
|
5
5
|
authors = ["Li, Mengxun <mengxunli@whu.edu.cn>"]
|
6
6
|
readme = "Readme.md"
|
7
7
|
homepage = "https://github.com/MenxLi/lfss"
|
lfss-0.11.5/docs/Client.md
DELETED
@@ -1,37 +0,0 @@
|
|
1
|
-
|
2
|
-
# Client-side CLI tools
|
3
|
-
|
4
|
-
To install python CLI tools without dependencies (to avoid conflicts with your existing packages):
|
5
|
-
```sh
|
6
|
-
pip install requests
|
7
|
-
pip install lfss --no-deps
|
8
|
-
```
|
9
|
-
|
10
|
-
Then set the `LFSS_ENDPOINT`, `LFSS_TOKEN` environment variables,
|
11
|
-
then you can use the following commands:
|
12
|
-
```sh
|
13
|
-
# Query a path
|
14
|
-
lfss query remote/file[/or_dir/]
|
15
|
-
|
16
|
-
# List directories of a specified path
|
17
|
-
lfss list-dirs remote/dir/
|
18
|
-
|
19
|
-
# List files of a specified path,
|
20
|
-
# with pagination and sorting
|
21
|
-
lfss list-files --offset 0 --limit 100 --order access_time remote/dir/
|
22
|
-
|
23
|
-
# Upload a file
|
24
|
-
lfss upload local/file.txt remote/file.txt
|
25
|
-
|
26
|
-
# Upload a directory, note the ending slashes
|
27
|
-
lfss upload local/dir/ remote/dir/
|
28
|
-
|
29
|
-
# Download a file
|
30
|
-
lfss download remote/file.txt local/file.txt
|
31
|
-
|
32
|
-
# Download a directory, with verbose output and 8 concurrent jobs
|
33
|
-
# Overwrite existing files
|
34
|
-
lfss download -v -j 8 --conflict overwrite remote/dir/ local/dir/
|
35
|
-
```
|
36
|
-
|
37
|
-
More commands can be found using `lfss-cli --help`.
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|